repo_name
stringlengths 5
122
| path
stringlengths 3
232
| text
stringlengths 6
1.05M
|
|---|---|---|
mtolley/seeker-github-actions
|
src/seeker-compliance-report.ts
|
// seeker-compliance-report
// ////////////////////////
//
// Downloads the Seeker Compliance report for the specified project and uploads
// it to the workflow results as a build artefact.
import * as core from '@actions/core'
import {generateSeekerComplianceReportPDF, getInputOrEnvironmentVariable, uploadSeekerComplianceReport} from './utils'
async function run(): Promise<void> {
try {
core.info('⬇️ Downloading Seeker compliance report from the Seeker Server')
// Get the action inputs (or environment variables)
const seekerServerURL = getInputOrEnvironmentVariable(
'seekerServerUrl',
'SEEKER_SERVER_URL',
true // required
)
const seekerProjectKey = getInputOrEnvironmentVariable(
'seekerProjectKey',
'SEEKER_PROJECT_KEY',
true // required
)
const seekerAPIToken = getInputOrEnvironmentVariable(
'seekerAPIToken',
'SEEKER_API_TOKEN',
true // required
)
// Generate and upload the Seeker Compliance report
await generateSeekerComplianceReportPDF({
seekerServerURL,
seekerProjectKey,
seekerAPIToken
})
await uploadSeekerComplianceReport()
} catch (error) {
core.setFailed(error.message)
}
}
run()
|
mtolley/seeker-github-actions
|
src/list-seeker-vulnerabilities.ts
|
<reponame>mtolley/seeker-github-actions<filename>src/list-seeker-vulnerabilities.ts
// list-seeker-vulnerabilities
// ///////////////////////////
//
// Lists vulnerabilities from the Seeker server apply these filters:
//
// * Project key
// * (Optional) Project version
// * (Optional) Only Seeker-Verified vulnerabilities
// * (Optional) Statuses (defaults to DETECTED)
// * (Optional) Minimum Severity
//
// The default use case for list-seeker-vulnerabilities is to output a list of
// currently detected vulnerabilities for the specified project.
import * as core from '@actions/core'
import {getInputOrEnvironmentVariable, getSeekerVulnerabilities} from './utils'
async function run(): Promise<void> {
try {
core.info('⬇️ Downloading Vulnerabilities from the Seeker server...')
// Get the action inputs (or environment variables)
const seekerServerURL = getInputOrEnvironmentVariable(
'seekerServerUrl',
'SEEKER_SERVER_URL',
true // required
)
const seekerProjectKey = getInputOrEnvironmentVariable(
'seekerProjectKey',
'SEEKER_PROJECT_KEY',
true // required
)
const seekerAPIToken = getInputOrEnvironmentVariable(
'seekerAPIToken',
'SEEKER_API_TOKEN',
true // required
)
const seekerProjectVersion = getInputOrEnvironmentVariable(
'seekerProjectVersion',
'SEEKER_PROJECT_VERSION',
false // optional, as versioning might not be available for this project
)
const onlySeekerVerified = core.getBooleanInput('onlySeekerVerified')
const statuses = core.getInput('statuses') || "DETECTED"
const minSeverity = core.getInput('minSeverity') || ""
if (minSeverity) {
if (minSeverity !== 'CRITICAL' &&
minSeverity !== 'HIGH' &&
minSeverity !== 'MEDIUM' &&
minSeverity !== 'LOW' &&
minSeverity !== 'INFORMATIVE') {
core.error(`Invalid value for minSeverity provided: ${minSeverity}. Permitted values are CRITICAL, HIGH, MEDIUM, LOW, and INFORMATIVE`)
}
}
const vulns = await getSeekerVulnerabilities({
seekerServerURL,
seekerProjectKey,
seekerAPIToken,
statuses,
onlySeekerVerified,
minSeverity,
seekerProjectVersion
})
for (const v of vulns) {
core.warning(`🚩 Seeker Vulnerability ${v.ItemKey} ${v.VulnerabilityName} URL: ${v.URL} ${v.SeekerServerLink}`)
}
} catch (error) {
core.setFailed(error.message)
}
}
run()
|
mtolley/seeker-github-actions
|
src/testme.ts
|
// fix-undetected-vulnerabilities
// //////////////////////////////
//
// Search for any vulnerabilities in the project that currently have a status of
// DETECTED but which were not detected during testing of the specified version of
// the project. If any such vulnerabilties are found, set the Status to FIXED.
//
// Version detection must be enable for the project or project template in Seeker
// and the current version must be specified as an input to this action (or via
// the environment variable SEEKER_PROJECT_VERSION).
import * as core from '@actions/core'
import * as github from '@actions/github'
import {getInputOrEnvironmentVariable} from './utils'
async function run(): Promise<void> {
try {
core.info('ℹ️ ?Checking for vulnerabilties that may have been fixed in this commit.')
// Get the action inputs (or environment variables)
const gitHubToken = getInputOrEnvironmentVariable(
'gitHubToken',
'GITHUB_TOKEN',
false // only required if closeFixedIssues is set to true
)
const octokit = github.getOctokit(gitHubToken)
const ownerSlashRepo = process.env.GITHUB_REPOSITORY as string
const [owner, repo] = ownerSlashRepo.split('/')
// 'https://github.com/mtolley/hippotech-front-seeker-actions/issues/9'
core.info('one')
const response = await octokit.rest.issues.createComment({
owner,
repo,
issue_number: parseInt("9"),
body: 'Hello universe!'
})
core.info(response.toString())
core.info('two')
} catch(e) {
core.info(e.toString())
}
}
run()
|
mtolley/seeker-github-actions
|
src/fix-undetected-vulnerabilities.ts
|
<reponame>mtolley/seeker-github-actions
// fix-undetected-vulnerabilities
// //////////////////////////////
//
// Search for any vulnerabilities in the project that currently have a status of
// DETECTED but which were not detected during testing of the specified version of
// the project. If any such vulnerabilties are found, set the Status to FIXED.
//
// Version detection must be enable for the project or project template in Seeker
// and the current version must be specified as an input to this action (or via
// the environment variable SEEKER_PROJECT_VERSION).
import * as core from '@actions/core'
import * as github from '@actions/github'
import axios from 'axios'
import {getInputOrEnvironmentVariable, getSeekerVulnerabilities} from './utils'
import * as querystring from 'querystring'
async function run(): Promise<void> {
try {
core.info('ℹ️ Checking for vulnerabilties that may have been fixed in this commit.')
// Get the action inputs (or environment variables)
const seekerServerURL = getInputOrEnvironmentVariable(
'seekerServerUrl',
'SEEKER_SERVER_URL',
true // required
)
const seekerProjectKey = getInputOrEnvironmentVariable(
'seekerProjectKey',
'SEEKER_PROJECT_KEY',
true // required
)
const seekerAPIToken = getInputOrEnvironmentVariable(
'seekerAPIToken',
'SEEKER_API_TOKEN',
true // required
)
const seekerProjectVersion = getInputOrEnvironmentVariable(
'seekerProjectVersion',
'SEEKER_PROJECT_VERSION',
true // required
)
const closeFixedIssues = core.getBooleanInput('closeFixedIssues')
const gitHubToken = getInputOrEnvironmentVariable(
'gitHubToken',
'GITHUB_TOKEN',
false // only required if closeFixedIssues is set to true
)
// Download all the vulnerabilities for the project that are currently still in the
// DETECTED state in the Seeker server.
let vulns = await getSeekerVulnerabilities({
seekerServerURL,
seekerProjectKey,
seekerAPIToken,
statuses: "DETECTED"
})
// Identify only the vulnerabilities that were NOT detected during the most recent test run
vulns = vulns.filter(v => v.LatestVersion !== seekerProjectVersion)
if (vulns.length > 0) {
core.info('👏 Vulnerabilities identified that have not been detected in the current version. The status for these vulnerabilities will be set to FIXED automatically.')
for (const v of vulns) {
core.info(v.ItemKey)
}
const bulkUpdate = {
vulnerabilityKeys: vulns.map(v => v.ItemKey).join(','),
status: 'FIXED',
comment: `Automatically setting status to FIXED as this defect was NOT detected during testing for version ${seekerProjectVersion}`
}
const url = `${seekerServerURL}/rest/api/latest/vulnerabilities/triage/bulk`
try {
axios({
method: 'post',
url,
data: querystring.stringify(bulkUpdate),
headers: {
Authorization: seekerAPIToken,
'content-type': 'application/x-www-form-urlencoded;charset=utf-8'
}
})
} catch(error) {
if (error.response) {
core.error(`Seeker Server responded with error code: ${error.response.status}`)
core.error(`Error message: ${error.response.data.message}`)
} else {
core.error("No response from Seeker Server")
core.error(error)
}
return
}
if (closeFixedIssues) {
const octokit = github.getOctokit(gitHubToken)
const context = github.context
const commit = process.env['GITHUB_SHA'] as string
for (const v of vulns) {
if (v.ticketUrls) {
const issue_number = parseInt(v.ticketUrls.substr(v.ticketUrls.lastIndexOf('/')+1))
// Close the issue in GitHub
await octokit.rest.issues.update({
...context.repo,
issue_number,
state: 'closed'
})
core.info(`✔️ Closed issue #${issue_number.toString()} in GitHub`)
await octokit.rest.issues.createComment({
...context.repo,
issue_number,
body: `Issue automatically closed by \`fix-undetected-vulnerabilities\` in workflow **${context.workflow}** (run number **${context.runNumber}**) for commit: ${commit}, because this vulnerabilty was not detected during the latest test run.`
})
}
}
}
} else {
core.info('ℹ️ No DETECTED vulnerabilities were identified as FIXED (non detected) for this version.')
}
} catch (error) {
core.setFailed(error.message)
}
}
run()
|
mtolley/seeker-github-actions
|
src/utils.ts
|
import * as core from '@actions/core'
import axios, { AxiosError, AxiosResponse } from 'axios'
import { writeFileSync } from 'fs'
import * as artifact from '@actions/artifact'
export interface Vulnerability {
Owner: string,
ProjectKey: string,
ItemKey: string,
CheckerKey: string,
VulnerabilityName: string,
Severity: string,
ticketUrls: string,
URL: string,
SourceName: string,
SourceType: string,
CodeLocation: string,
StackTrace: string,
VerificationTag: string,
DetectionCount: string,
FirstDetectionTime: string,
LastDetectionTime: string,
Status: string,
OWASP2013: string,
"PCI-DSS": string,
"CWE-SANS": string,
OWASP2017: string,
GDPR: string,
CAPEC: string,
LastDetectionURL: string,
SeekerServerLink: string,
CustomTags: string,
LatestVersion: string
}
export function getInputOrEnvironmentVariable(
inputName: string,
envVar: string,
required = true
): string {
const result = core.getInput(inputName) || process.env[envVar] || ""
if (required && !result) {
core.setFailed(`You must provide either the input parameter ${inputName} or environment variable ${envVar}`)
}
return result
}
export function getInputOrEnvironmentVariableBoolean(
inputName: string,
envVar: string
): boolean {
const value = core.getInput(inputName) || process.env[envVar] || ""
return value.toUpperCase() === 'TRUE'
}
export function handleAxiosError(error: AxiosError): void {
if (error.response) {
core.error(`Seeker Server responded with error code: ${error.response.status}`)
core.error(`Error message: ${error.response.data.message}`)
} else {
core.error("No response from Seeker Server")
core.error(error)
}
}
interface Status {
projectStatus: {
compliant: boolean
}
}
export interface getComplianceStatusParameters {
seekerServerURL: string,
seekerProjectKey: string,
seekerAPIToken: string,
failBuildIfNotInCompliance: boolean
}
export async function checkComplianceStatus({
seekerServerURL,
seekerProjectKey,
seekerAPIToken,
failBuildIfNotInCompliance
}: getComplianceStatusParameters): Promise<boolean> {
const url = `${seekerServerURL}/rest/api/latest/projects/${seekerProjectKey}/status`
let res: AxiosResponse<Status>
try {
res = await axios.get(url, {
headers: {
Authorization: seekerAPIToken
}
})
} catch(error) {
if (error.response) {
core.error(`Seeker Server responded with error code: ${error.response.status}`)
core.error(`Error message: ${error.response.data.message}`)
} else {
core.error("No response from Seeker Server")
core.error(error)
}
return false
}
if (failBuildIfNotInCompliance && res.data.projectStatus.compliant === false) {
const message = `❌ Seeker Project ${seekerProjectKey} is not in compliance. Please see Compliance Report for more details.`
if (failBuildIfNotInCompliance) {
core.setFailed(message)
} else {
core.warning(message)
}
} else {
core.info(`✔️ Seeker Project ${seekerProjectKey} is in compliance.`)
}
return res.data.projectStatus.compliant
}
export interface generateSeekerComplianceReportPDFParameters {
seekerServerURL: string,
seekerProjectKey: string,
seekerAPIToken: string
}
export async function generateSeekerComplianceReportPDF({
seekerServerURL,
seekerProjectKey,
seekerAPIToken,
}: generateSeekerComplianceReportPDFParameters): Promise<void> {
let res: AxiosResponse
const url = `${seekerServerURL}/rest/api/latest/reports/compliances/export?projectKeys=${seekerProjectKey}`
try {
res = await axios.get(url, {
responseType: 'arraybuffer',
headers: {
Authorization: seekerAPIToken,
Accept: 'application/pdf'
}
})
} catch(error) {
if (error.response) {
core.error(`Seeker Server responded with error code: ${error.response.status}`)
core.error(`Error message: ${error.response.data.message}`)
} else {
core.error("No response from Seeker Server")
core.error(error)
}
return
}
writeFileSync('seeker-compliance-report.pdf', res.data)
}
export interface getSeekerVulnerabilitiesParameters {
seekerServerURL: string,
seekerProjectKey: string,
seekerAPIToken: string,
seekerProjectVersion?: string
onlySeekerVerified?: boolean,
minSeverity?: string,
statuses?: string
}
export async function getSeekerVulnerabilities({
seekerServerURL,
seekerProjectKey,
seekerProjectVersion,
seekerAPIToken,
onlySeekerVerified,
minSeverity,
statuses
}: getSeekerVulnerabilitiesParameters): Promise<Vulnerability[]> {
// Every request to the Vulnerabilities API needs the Seeker Server URL, the Project key, and the API token
let url = `${seekerServerURL}/rest/api/latest/vulnerabilities?format=JSON&language=en&projectKeys=${seekerProjectKey}&includeHttpHeaders=false&includeHttpParams=false&includeDescription=false&includeRemediation=false&includeSummary=false&includeVerificationProof=false&includeTriageEvents=false&includeComments=false`
// Only add these filters to the URL if they are actually specified
if (onlySeekerVerified === true) {
url += '&onlySeekerVerified=true'
}
if (minSeverity) {
url += `&minSeverity=${minSeverity}`
}
if (statuses) {
url += `&statuses=${statuses}`
}
if (seekerProjectVersion) {
url += `&projectVersions=${seekerProjectVersion}`
}
let res: AxiosResponse<Vulnerability[]>
try {
res = await axios.get(url, {
headers: {
Authorization: seekerAPIToken
}
})
} catch(error) {
handleAxiosError(error as AxiosError)
return []
}
return res.data
}
export async function uploadSeekerComplianceReport(): Promise<void> {
core.info('⬆️ Uploading the Seeker Compliance Report PDF as a build artefact')
const artifactClient = artifact.create()
const artifactName = 'seeker-compliance-report'
const files = [
'seeker-compliance-report.pdf'
]
const rootDirectory = process.cwd()
const options = {
continueOnError: true
}
await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
}
|
allboatsrise/gigya-react-native-plugin-for-sap-customer-data-cloud
|
src/resolvers.d.ts
|
import { GigyaError } from ".";
import { GigyaDictionary, GigyaSocialProviders } from "./Models";
export class ResolverFactory {
getResolver(error: GigyaError) : IResolver
}
export class IResolver {
regToken: string;
constructor(regToken: string)
}
export interface LinkAccountResolver extends IResolver {
getConflictingAccount(): Promise<GigyaDictionary>
linkToSite(loginId: string, password: string): Promise<GigyaDictionary>
linkToSocial(provider: GigyaSocialProviders): Promise<GigyaDictionary>
}
export interface PendingRegistrationResolver extends IResolver {
setAccount(params: Record<string, any>): Promise<GigyaDictionary>
}
export interface PendingVerificationResolver extends IResolver { }
|
allboatsrise/gigya-react-native-plugin-for-sap-customer-data-cloud
|
src/models.ts
|
export type GigyaDictionary = Record<string, any>;
export type GigyaSocialProviders = "facebook" | "google" | "yahoo" | "twitter" | "line" | "wechat" | "amazon" | "apple" |
"instagram" | "linkedin"
export enum GigyaInterruption {
pendingRegistration = 206001,
pendingVerification = 206002,
conflictingAccounts = 403043,
}
|
Nike-Inc/actions-cerberus-secrets
|
src/utils.ts
|
<gh_stars>1-10
/*
* Copyright 2020-present, Nike, Inc.
* All rights reserved.
*
* This source code is licensed under the Apache-2.0 license found in
* the LICENSE file in the root directory of this source tree.
*
*/
/* eslint-disable i18n-text/no-en */
import * as core from '@actions/core'
import * as exec from '@actions/exec'
export async function getRunnerRegion(defaultRegion = ''): Promise<string> {
const METADATA_GET_REGION_COMMAND =
'curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone'
let commandOutput = ''
let commandError = ''
const options = {
silent: true,
listeners: {
stdout: (data: Buffer) => {
commandOutput += data.toString()
},
stderr: (data: Buffer) => {
commandError += data.toString()
}
}
}
core.info(`Fetching region from Runner`)
core.debug(`Running command to fetch region: ${METADATA_GET_REGION_COMMAND}`)
try {
await exec.exec(METADATA_GET_REGION_COMMAND, undefined, options)
core.debug(`Command Output: ${commandOutput}`)
} catch (ex) {
core.debug(`Exception occurred. ${ex.message}`)
core.info(
`Failed to fetch region information from Runner. Defaulting to given region: ${defaultRegion}`
)
if (commandError) {
core.debug(`Error: ${commandError}`)
}
}
const region = commandOutput ? commandOutput.slice(0, -1) : defaultRegion
core.info(`Runner Region: ${region}`)
return region
}
|
Nike-Inc/actions-cerberus-secrets
|
src/main.ts
|
/*
* Copyright 2020-present, Nike, Inc.
* All rights reserved.
*
* This source code is licensed under the Apache-2.0 license found in
* the LICENSE file in the root directory of this source tree.
*
*/
/* eslint-disable i18n-text/no-en */
import * as core from '@actions/core'
import Cerberus from './cerberus'
import {getRunnerRegion} from './utils'
async function run(): Promise<void> {
try {
core.debug('Gathering all inputs')
const cerberusUrl: string = core.getInput('cerberusUrl')
const cerberusUserInputRegion: string = core.getInput('cerberusRegion')
const sdbPath: string = core.getInput('sdbPath')
const sdbEnvVariableMapping: string = core.getInput('sdbEnvVariableMapping')
const cerberusRegion = cerberusUserInputRegion
? cerberusUserInputRegion
: await getRunnerRegion('us-east-1')
core.info(`Using Cerberus : ${cerberusUrl} in Region ${cerberusRegion}`)
core.info(`Reading SDB : ${sdbPath}`)
core.info(`Looking for ${sdbEnvVariableMapping}`)
core.debug('Converting given sdbEnvVariableMapping to JSON object')
const mapKeyVariable = JSON.parse(sdbEnvVariableMapping)
const cerberus = new Cerberus(cerberusUrl, cerberusRegion, true)
cerberus.readToEnv(sdbPath, mapKeyVariable)
} catch (error) {
core.setFailed(error.message)
}
}
run()
|
Nike-Inc/actions-cerberus-secrets
|
src/cerberus.ts
|
<reponame>Nike-Inc/actions-cerberus-secrets
/*
* Copyright 2020-present, Nike, Inc.
* All rights reserved.
*
* This source code is licensed under the Apache-2.0 license found in
* the LICENSE file in the root directory of this source tree.
*
*/
/* eslint-disable github/array-foreach */
/* eslint-disable github/no-then */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable i18n-text/no-en */
import * as core from '@actions/core'
import CerberusClient from 'cerberus-node-client'
export default class Cerberus {
client: CerberusClient
constructor(hostUrl: string, region: string, debug: boolean) {
core.debug('Creating Cerberus object and connecting to cerberus')
this.client = new CerberusClient({
hostUrl,
region,
debug
})
}
readToEnv(sdbPath: string, mapKeyVariable: any) {
core.debug(`Reading sdb ${sdbPath}`)
this.client
.getSecureData(sdbPath)
.then(secrets => {
core.debug(`Received secrets in sdb ${sdbPath}`)
Object.keys(mapKeyVariable).forEach(requestedKeyname => {
core.debug(`Looking for ${requestedKeyname}`)
if (secrets[requestedKeyname]) {
core.debug(`Found ${requestedKeyname}`)
core.setSecret(secrets[requestedKeyname])
core.exportVariable(
mapKeyVariable[requestedKeyname],
secrets[requestedKeyname]
)
} else {
core.warning(
`Key ${requestedKeyname} NOT FOUND. Not setting into environment`
)
}
})
})
.catch(ex => {
core.setFailed(ex.message)
})
}
}
|
sheva007/imdb-crawler
|
src/controller/CrawlLinksAction.ts
|
<filename>src/controller/CrawlLinksAction.ts
import { Request, Response } from "express";
import axois from 'axios';
import * as cheerio from 'cheerio';
import { movieSave } from "../module/movies";
import * as fs from 'fs';
/**
* crawl links
*/
export async function crawlLinksAction(request: Request, response: Response) {
if (request.body.links) {
// split the incoming string into links
const links = request.body.links.split('\n') as [];
// numbers to be used in stats
let crawled = 0;
let newAdded = 0;
let updated = 0;
let failed = 0;
// HTML to be used in the final page
let itemHTML = fs.readFileSync(__dirname + '/../../assets/result_item.html').toString();
let pageHTML = fs.readFileSync(__dirname + '/../../assets/results_page.html').toString();
let collection = "";
// loop around the links
for (let i = 0; i < links.length; i++) {
const url = links[i] as string;
if (url.indexOf('imdb.com/title/tt') !== -1) {//only process imdb URLs
try {
//use RegEx to extract the movie id, thanks to https://stackoverflow.com/questions/31623704/js-regular-expression-to-match-imdb-url
const imdb_id = url.match(/(?:.*\.|.*)imdb.com\/(?:t|T)itle(?:\?|\/)(..\d+)/i)[1];
// HTTP request for the url
const page = await axois.get(url.trim());
// prepare cheerio
let $ = cheerio.load(page.data);
// declearing the variables that will be used
let title: string, year: string, release: string, rating: string;
// slecting the title string
title = $('h1').text();
// removing any excess spaces
title = title.trim();
// getting the year from the title string
const start = title.indexOf("(");
year = title.substr(start + 1, 4);
// clearing the title string of the year
title = title.substr(0, start - 1);
// getting the realse date
release = $('div.subtext').children().last().text();
release = release.trim();
// getting the rating
rating = $('span[itemprop|="ratingValue"]').text();
// save and register the returned code into stats
crawled += 1;
const code = await movieSave(imdb_id, +rating, release, title, year);
if (code === 0) {
newAdded += 1;
} else if (code === 1) {
updated += 1;
} else {
failed += 1;
}
if (code === 0 || code === 1) {
// add the needed html
let singleItem = itemHTML;
singleItem = singleItem.replace("[[title]]", title);
singleItem = singleItem.replace("[[year]]", year);
singleItem = singleItem.replace("[[release]]", release);
singleItem = singleItem.replace("[[rating]]", rating);
singleItem = singleItem.replace("[[url]]", "https://www.imdb.com/title/" + imdb_id + "/");
collection += singleItem;
}
} catch (err) {
console.error(err);
}
}
}
pageHTML = pageHTML.replace('[[total]]', '' + links.length);
pageHTML = pageHTML.replace('[[rejected]]', '' + (links.length - crawled));
pageHTML = pageHTML.replace('[[crawled]]', '' + crawled);
pageHTML = pageHTML.replace('[[newAdded]]', '' + newAdded);
pageHTML = pageHTML.replace('[[updated]]', '' + updated);
pageHTML = pageHTML.replace('[[failed]]', '' + failed);
pageHTML = pageHTML.replace('[[collection]]', collection);
response.send(pageHTML);
}
// INPUT_Not_Acceptable
response.status(406);
response.end();
return;
}
|
sheva007/imdb-crawler
|
src/controller/MovieGetAllAction.ts
|
import { Request, Response } from "express";
import { movieGetAll } from "../module/movies";
/**
* return all movies
*/
export async function movieGetAllAction(request: Request, response: Response) {
response.send(await movieGetAll())
}
|
sheva007/imdb-crawler
|
src/index.ts
|
<reponame>sheva007/imdb-crawler
import "reflect-metadata";
import { createConnection } from "typeorm";
import { Request, Response } from "express";
import * as express from "express";
import * as cors from "cors";
import * as bodyParser from "body-parser";
import AppRoutes from "./routes";
var helmet = require('helmet');
const port = process.env.PORT || 3000;
// create connection with database
// note that it's not active database connection
// TypeORM creates connection pools and uses them for your requests
createConnection().then(async connection => {
// create express app
const app = express();
app.use(cors())
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(helmet());
// register all application routes
AppRoutes.forEach(route => {
app[route.method](route.path, (request: Request, response: Response, next: Function) => {
route.action(request, response)
.then(() => next)
.catch(err => next(err));
});
});
// run app
app.listen(port, () => {
console.log(`server started. on ${port}`);
});
}).catch(error => console.log("TypeORM connection error: ", error));
|
sheva007/imdb-crawler
|
src/controller/HomeAction.ts
|
import { Request, Response } from "express";
import * as path from 'path';
/**
* home page
*/
export async function homeAction(request: Request, response: Response) {
response.sendFile(path.join(__dirname + '/../../assets/links_form.html'));
}
|
sheva007/imdb-crawler
|
src/routes.ts
|
<filename>src/routes.ts
import { homeAction } from "./controller/HomeAction";
import { crawlLinksAction } from "./controller/CrawlLinksAction";
import { movieGetAllAction } from "./controller/MovieGetAllAction";
/**
* All routes.
*/
const Routes = [
{
path: "/",
method: "get",
action: homeAction
},
{
path: "/",
method: "post",
action: crawlLinksAction
},
{
path: "/movies",
method: "get",
action: movieGetAllAction
},
];
/**
* All application routes.
*/
export default Array.prototype.concat(Routes);
|
sheva007/imdb-crawler
|
src/module/movies.ts
|
<reponame>sheva007/imdb-crawler<filename>src/module/movies.ts
import { getManager } from "typeorm";
import { Movie } from "../entities/Movie";
/**
* get all Movies.
*/
export async function movieGetAll() {
const repository = getManager().getRepository(Movie);
const entities = await repository.find();
return entities;
}
/**
* save a Movie
* returns 0 if the movie saved as new
* return 1 if the movie was only updated
* returns -1 if there was a db exeption
* return -2 if the input is not valid
*/
export async function movieSave(imdb_id: string, rating: number, release: string, title: string, year: string) {
if (!imdb_id || !rating || !release || !title || !year) {
return -2;
}
try {
const repository = getManager().getRepository(Movie);
const oldMovie = await repository.findOne({
imdb_id: imdb_id
});
if (oldMovie instanceof Movie) {//the movie already exists so only update the rating
oldMovie.rating = rating;
await repository.update(oldMovie.id, oldMovie);
return 1;
} else {//this is a new movie
const newMovie = new Movie();
newMovie.imdb_id = imdb_id;
newMovie.rating = rating;
newMovie.release = release;
newMovie.title = title;
newMovie.year = year;
await repository.save(newMovie);
return 0;
}
} catch (err) {
console.log(err);
return -1;
}
}
|
sheva007/imdb-crawler
|
src/entities/Movie.ts
|
<reponame>sheva007/imdb-crawler<gh_stars>0
import { Entity, PrimaryGeneratedColumn, CreateDateColumn, Column, UpdateDateColumn } from "typeorm";
@Entity()
export class Movie {
@PrimaryGeneratedColumn()
id: number;
@Column({
type: "varchar",
length: 11,
nullable: false
})
imdb_id: string;
@Column({
type: "varchar",
length: 300,
nullable: false
})
title: string;
@Column({
type: "varchar",
length: 4,
nullable: false
})
year: string;
@Column({
type: "varchar",
length: 100,
nullable: false
})
release: string;
@Column({
type: "float",
nullable: false
})
rating: number;
@UpdateDateColumn()
lastCrawlingDate: Date;
@CreateDateColumn()
firstCrawlingDate: Date;
}
|
gigicupp/code-challenge
|
test/pages/create_account.test.tsx
|
import { fireEvent, render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import fetchMock from 'jest-fetch-mock';
import CreateAccount from 'src/pages/create_account';
describe('CreateAccount', () => {
beforeEach(() => {
fetchMock.enableMocks();
});
afterEach(() => {
fetchMock.resetMocks();
});
test('should be able to submit form', () => {
const logger = jest.spyOn(console, "log");
const { getByRole, getByTestId } = render(<CreateAccount />);
const button = getByRole('button');
expect(button).toBeTruthy();
fireEvent.submit(getByTestId("form"));
expect(logger).toBeCalledTimes(1);
})
});
|
gigicupp/code-challenge
|
src/pages/success.tsx
|
import React from 'react';
import style from '../styles/success.module.scss';
import Image from 'next/image';
export default () => {
return (
<div className={style.container}>
<h2>Congratulations!</h2>
<p>Your account has been created successfully.</p>
<Image
src='/images/giphy.gif'
height={200}
width={200}
alt='celebration'
/>
</div>
)
}
|
gigicupp/code-challenge
|
test/pages/api/create_new_account.test.ts
|
<gh_stars>0
import { expect } from '@jest/globals';
import createNewAccount from 'src/pages/api/create_new_account';
import { mockRequest } from 'test/utils';
describe('/api/create_new_account', () => {
test('returns true', async () => {
const { req, res } = mockRequest({
method: 'POST',
body: {
username: 'adminaccount',
password: '<PASSWORD>!',
password1: '<PASSWORD>!'
},
});
await createNewAccount(req, res);
expect(res._getStatusCode()).toBe(200);
expect(res._getJSONData()).toEqual({
result: true,
});
});
test('returns false for empty inputs', async () => {
const { req, res } = mockRequest({
method: 'POST',
body: {
username: '',
password: '',
password1: ''
},
});
await createNewAccount(req, res);
expect(res._getStatusCode()).toBe(200);
expect(res._getJSONData()).toEqual({
result: false,
});
});
test('returns false for empty passwords', async () => {
const { req, res } = mockRequest({
method: 'POST',
body: {
username: 'adminaccount',
password: '',
password1: ''
},
});
await createNewAccount(req, res);
expect(res._getStatusCode()).toBe(200);
expect(res._getJSONData()).toEqual({
result: false,
});
});
test('returns false for mismatching passwords', async () => {
const { req, res } = mockRequest({
method: 'POST',
body: {
username: 'adminaccount',
password: '<PASSWORD>!',
password1: '<PASSWORD>'
},
});
await createNewAccount(req, res);
expect(res._getStatusCode()).toBe(200);
expect(res._getJSONData()).toEqual({
result: false,
});
});
});
|
gigicupp/code-challenge
|
src/pages/api/create_new_account.ts
|
<filename>src/pages/api/create_new_account.ts
import type { NextApiRequest, NextApiResponse } from 'next';
import checkErrors from '../../utils/checkErrors';
interface CreateNewAccountParameters {
username: string;
password: string;
password1: string;
errors: object;
}
interface BooleanResult {
result: boolean;
errors?: Record<string, string>;
}
export default function createNewAccount(req: NextApiRequest, res: NextApiResponse<BooleanResult>) {
const { username, password, password1, errors }: CreateNewAccountParameters = JSON.parse(req.body);
if(!username.length || !password.length || !<PASSWORD>.length || password !== <PASSWORD>) {
res.status(200).json({ result: false });
} else {
res.status(200).json({ result: true });
}
}
|
gigicupp/code-challenge
|
src/pages/create_account.tsx
|
<filename>src/pages/create_account.tsx
import Head from 'next/head';
import React, { FormEvent } from 'react';
import styles from 'src/styles/create_account.module.scss';
import Image from 'next/image';
import Router from 'next/router';
import checkErrors from '../utils/checkErrors';
import checkRetypePass from '../utils/passwordConfirmation';
import validLength from '../utils/validateLength';
type CreateAccountProps = {}
type ErrorTypes = {
username: string
password: Array<string>
password1: string
}
type StateTypes = {
username: string
show: boolean
password: string
show1: boolean
password1: string
errors: ErrorTypes
}
let hasSpecialChar = RegExp(/^(?=.*[!@#$&*])/);
let hasLetter = RegExp(/^(?=.*[A-Za-z])/);
let hasDigit = RegExp(/^(?=.*[0-9])/)
class CreateAccount extends React.Component<CreateAccountProps, StateTypes> {
constructor(props) {
super(props);
this.state = {
username: '',
show: false,
password: '',
show1: false,
password1: '',
errors: {
username: '',
password: [],
password1: '',
}
}
this.handleSubmit = this.handleSubmit.bind(this);
this.handleChange = this.handleChange.bind(this);
this.togglePassword = this.togglePassword.bind(this);
}
async handleSubmit(evt: FormEvent) {
let { username, password, password1, errors } = this.state;
evt.preventDefault();
console.log("submitting form");
if(!checkErrors(errors) || !username.length || !password.length || !password1.length) return;
await fetch('/api/password_exposed', {
method: 'POST',
body: JSON.stringify({
username,
password,
})
})
.then(res => res.json())
.then(async data => {
if (data.result) {
alert('The password you entered is exposed, please choose a new one')
}
if (!data.result) {
await fetch('api/create_new_account', {
method: 'POST',
body: JSON.stringify({
username,
password,
<PASSWORD>
})
})
.then(res => res.json())
.then(data => {
if (data.result) {
Router.push('/success')
}
})
}
})
}
handleChange(event) {
event.preventDefault();
let { name, value } = event.target;
let { errors } = this.state;
switch (name) {
case 'username':
errors.username =
value.length < 10 || value.length > 50
? 'Username must be between 10 to 50 characters long'
: '';
break;
case 'password':
errors.password = [];
hasSpecialChar.test(value)
? ''
: errors.password.push(`Must contain at least one symbol (!,@,#,$,%)`)
hasLetter.test(value)
? ''
: errors.password.push(`Must contain at least one letter`)
hasDigit.test(value)
? ''
: errors.password.push(`Must contain at least one number`)
validLength(value)
? ''
: errors.password.push(`Must be between 20 and 50 characters long`)
break;
case '<PASSWORD>':
errors.password1 =
checkRetypePass('password', '<PASSWORD>')
? ''
: `Your password doesn't match with the previously entered password`;
break;
default:
break;
}
this.setState({ errors, [name]: value } as Pick<StateTypes, keyof StateTypes>)
}
togglePassword(event) {
event.preventDefault();
let { id } = event.target
this.setState(prevState => {
return {
[id]: !prevState[id]
} as Pick<StateTypes, keyof StateTypes>
})
}
render() {
let { errors, username, password, password1, show, show1 } = this.state;
return (
<>
<Head>
<title>Create Account</title>
</Head>
<article className={styles.article}>
<div className={styles.formContainer}>
<Image
priority
src='/images/wealtfrontLogo.png'
height={100}
width={100}
alt='Wealthfront Logo'
/>
<h3>Create New Account</h3>
<form data-testid="form" className={styles.form} onSubmit={this.handleSubmit}>
{/* username */}
<div className={styles.username}>
<label className={styles.label}>Username</label>
<input type='text'
placeholder='username'
name='username'
value={username}
className={styles.input}
required={true}
onChange={this.handleChange}>
</input>
{errors.username.length > 0 &&
<span className={styles.error}>{errors.username}</span>}
</div>
{/* password */}
<div className={styles.password}>
<label className={styles.label}>Password
<span onClick={this.togglePassword} id='show'>
{show ? 'Hide Password' : 'Show Password'}
</span>
</label>
<input
type={show ? 'text' : 'password'}
placeholder='password'
name='password'
id='password'
value={password}
className={styles.input}
required={true}
onChange={this.handleChange}>
</input>
{errors.password.length > 0 &&
<span className={styles.error}>
{errors.password.map(error => <div key={error}>{error}</div>)}
</span>}
</div>
{/* confirmation password */}
<div className={styles.password}>
<label className={styles.label}>Confirm Password
<span onClick={this.togglePassword} id='show1'>
{show1 ? 'Hide Password' : 'Show Password'}
</span>
</label>
<input
type={show1 ? 'text' : 'password'}
placeholder='<PASSWORD> password'
name='password1'
id='password1'
value={password1}
className={styles.input}
required={true}
onChange={this.handleChange}>
</input>
{errors.password1.length > 0 &&
<span className={styles.error}>{errors.password1}</span>}
</div>
<button className={styles.button} role='button'>
Create Account
</button>
</form>
</div>
</article>
</>
);
}
}
export default CreateAccount;
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/service-provider.ts
|
<filename>packages/service-provider-core/src/service-provider.ts<gh_stars>0
import Readable from './readable';
import Writable from './writable';
import Closable from './closable';
import Admin from './admin';
/**
* Interface for all service providers.
*/
interface ServiceProvider extends Readable, Writable, Closable, Admin {};
export default ServiceProvider;
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/components/editor.tsx
|
<filename>packages/browser-repl/src/components/editor.tsx
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import AceEditor from 'react-ace';
import { Autocompleter } from '@mongosh/browser-runtime-core';
import { AceAutocompleterAdapter } from './ace-autocompleter-adapter';
import 'brace/ext/language_tools';
import 'brace/mode/javascript';
import './ace-theme';
import ace from 'brace';
const tools = ace.acequire('ace/ext/language_tools');
const noop = (): void => {
//
};
interface EditorProps {
onEnter?(): void | Promise<void>;
onArrowUpOnFirstLine?(): void | Promise<void>;
onArrowDownOnLastLine?(): void | Promise<void>;
onChange?(value: string): void | Promise<void>;
autocompleter?: Autocompleter;
value?: string;
}
export class Editor extends Component<EditorProps> {
static propTypes = {
onEnter: PropTypes.func,
onArrowUpOnFirstLine: PropTypes.func,
onArrowDownOnLastLine: PropTypes.func,
onChange: PropTypes.func,
value: PropTypes.string
};
static defaultProps = {
onEnter: noop,
onArrowUpOnFirstLine: noop,
onArrowDownOnLastLine: noop,
onChange: noop,
value: ''
};
private editor: any;
private onEditorLoad = (editor: any): void => {
this.editor = editor;
if (this.props.autocompleter) {
editor.commands.on('afterExec', function(e) {
if (e.command.name === 'insertstring' && /^[\w.]$/.test(e.args)) {
editor.execCommand('startAutocomplete');
}
});
tools.setCompleters([new AceAutocompleterAdapter(this.props.autocompleter)]);
}
};
render(): JSX.Element {
return (<AceEditor
showPrintMargin={false}
showGutter={false}
highlightActiveLine
setOptions={{
enableBasicAutocompletion: !!this.props.autocompleter,
enableLiveAutocompletion: !!this.props.autocompleter,
enableSnippets: false,
showLineNumbers: false,
tabSize: 2
}}
name={`mongosh-ace-${Date.now()}`}
mode="javascript"
theme="mongosh"
onChange={this.props.onChange}
onLoad={this.onEditorLoad}
commands={[
{
name: 'return',
bindKey: { win: 'Return', mac: 'Return' },
exec: (): void => {
this.props.onEnter();
}
},
{
name: 'arrowUpOnFirstLine',
bindKey: { win: 'Up', mac: 'Up' },
exec: (): void => {
const selectionRange = this.editor.getSelectionRange();
if (!selectionRange.isEmpty() || selectionRange.start.row !== 0) {
return this.editor.selection.moveCursorUp();
}
this.props.onArrowUpOnFirstLine();
}
},
{
name: 'arrowDownOnLastLine',
bindKey: { win: 'Down', mac: 'Down' },
exec: (): void => {
const selectionRange = this.editor.getSelectionRange();
const lastRowIndex = this.editor.session.getLength() - 1;
if (!selectionRange.isEmpty() || selectionRange.start.row !== lastRowIndex) {
return this.editor.selection.moveCursorDown();
}
this.props.onArrowDownOnLastLine();
}
}
]}
width="100%"
maxLines={Infinity}
editorProps={{
$blockScrolling: Infinity
}}
value={this.props.value}
/>);
}
}
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/components/utils/inspect.spec.ts
|
<reponame>MaBeuLux88/mongosh<gh_stars>0
import { ObjectId } from 'bson';
import { expect } from 'chai';
import { inspect } from './inspect';
describe('inspect', () => {
context('with simple types', () => {
it('inspects numbers', () => {
expect(
inspect(1)
).to.equal('1');
});
it('inspects strings', () => {
expect(
inspect('123')
).to.equal('\'123\'');
});
it('inspects booleans', () => {
expect(
inspect(true)
).to.equal('true');
expect(
inspect(false)
).to.equal('false');
});
it('inspects null', () => {
expect(
inspect(null)
).to.equal('null');
});
it('inspects undefined', () => {
expect(
inspect(undefined)
).to.equal('undefined');
});
it.skip('inspects Symbol', () => {
expect(
inspect(Symbol('123'))
).to.equal('Symbol(123)');
});
it.skip('inspects BigInt', () => {
expect(
// eslint-disable-next-line no-undef
inspect(BigInt(1))
).to.equal('1n');
});
});
context('with BSON types', () => {
it('inspects ObjectId', () => {
expect(
inspect(new ObjectId('0000007b3db627730e26fd0b'))
).to.equal('ObjectID("0000007b3db627730e26fd0b")');
});
});
context('with objects', () => {
context('when collapsed', () => {
it('formats objects on one line', () => {
expect(
inspect({ x: 1, y: 2 })
).to.equal('{ x: 1, y: 2 }');
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/build/src/release.ts
|
<filename>packages/build/src/release.ts<gh_stars>0
import os from 'os';
import Config from './config';
import compileExec from './compile-exec';
import uploadDownloadCenterConfig from './download-center';
import Platform from './platform';
import zip from './zip';
import S3 from 'aws-sdk/clients/s3';
/**
* Run the release process.
*
* @param {Config} config - the configuration, usually config/build.config.js.
*/
const release = async(config: Config) => {
const platform = os.platform();
// 1. Build the executable.
await compileExec(config.input, config.outputDir, platform);
// 2. Sign the executable for each OS.
// 3. Zip the executable.
const artifact = await zip(config.input, config.outputDir, platform, config.version);
// 4. Create & sign the .deb (only on linux)
// 5. Create & sign the .rpm (only on linux)
// 6. Create & sign the .msi (only on win)
//
// If this is a new release tag.
//
// 1. Publish the .deb (only on linux)
// 2. Publish the .rpm (only on linux)
// 3. Create PR for Homebrew (only on macos)
// 4. Upload artifacts to S3 for Evergreen and downloads.
// await uploadArtifactToEvergreen(
// artifact,
// config.evgAwsKey,
// config.evgAwsSecret,
// config.project,
// config.revision
// );
// await uploadArtifactToDownloads();
// 5. Create Github release.
// 6. Create download center config and upload. (only on macos)
// 7. Publish to NPM. (only on macos)
if (platform === Platform.MacOs) {
await uploadDownloadCenterConfig(
config.version,
config.downloadCenterAwsKey,
config.downloadCenterAwsSecret
);
}
};
export default release;
|
MaBeuLux88/mongosh
|
packages/shell-api/src/cursor-iteration-result.ts
|
export class CursorIterationResult extends Array {
toReplString: () => this;
shellApiType: () => string;
constructor(...args) {
super(...args);
Object.defineProperty(this, 'toReplString', {
value: () => { return this; },
enumerable: false
});
Object.defineProperty(this, 'shellApiType', {
value: () => { return 'CursorIterationResult'; },
enumerable: false
});
}
}
|
MaBeuLux88/mongosh
|
packages/shell-api/src/explainable.spec.ts
|
<reponame>MaBeuLux88/mongosh<gh_stars>0
import sinon from 'sinon';
import Mapper from '../../mapper/lib';
import { Collection, Database, Explainable } from './shell-api';
import * as signatures from './shell-api-signatures';
import { expect } from 'chai';
/**
* Test that an explainable method proxies the respective Mapper method correctly,
* with the right arguments and returning the right result.
*
* It ensures:
* - that the method is defined in the shell api and that is meant to be a function
* - that the mapper method to be proxied to exists
* - that the mapper method is called with an explainable as first argument and with
* the rest of invokation arguments.
* - that the result of mapper invokation is returned.
*
* @param {String} name - the name of the method to invoke
*/
function testWrappedMethod(name: string): void {
const attribute = signatures.Explainable.attributes[name];
expect(attribute).to.exist;
expect(attribute.type).to.equal('function');
const mock = sinon.mock();
const mapper: Mapper = sinon.createStubInstance(Mapper, {
[`explainable_${name}`]: mock
});
const args = [1, 2, 3];
const retVal = {};
const database = new Database('db1');
const collection = new Collection(mapper, database, 'coll1');
const explainable = new Explainable(mapper, collection);
mock.withArgs(explainable, ...args).returns(retVal);
const result = explainable[name](...args);
mock.verify();
expect(result).to.equal(retVal);
}
describe('Explainable', () => {
[
'getCollection',
'getVerbosity',
'setVerbosity'
].forEach((methodName) => {
describe(`#${methodName}`, () => {
it(`wraps mapper.collection_${methodName}`, () => {
testWrappedMethod(methodName);
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/shell-api/src/shell-bson.ts
|
<filename>packages/shell-api/src/shell-bson.ts
import bson from 'bson';
type DateConstructorArguments = [ any?, any?, ...any[] ];
function dateHelper(...args: DateConstructorArguments): Date {
if (args.length === 0) {
return new Date();
}
if (args.length === 1) {
return new Date(args[0]);
}
return new Date(Date.UTC(...args));
}
export default {
RegExp: RegExp,
DBRef: function(...args) {
return new bson.DBRef(...args);
},
DBPointer: function(...args) {
return new bson.DBPointer(...args);
},
Map: bson.Map,
MaxKey: function(...args) {
return new bson.MaxKey(...args);
},
MinKey: function(...args) {
return new bson.MinKey(...args);
},
ObjectId: function(...args) {
return new bson.ObjectID(...args);
},
Symbol: function(...args) {
return new bson.BSONSymbol(...args);
},
Timestamp: function(...args) {
return new bson.Timestamp(...args);
},
Code: function(c, s): bson.Code {
return new bson.Code(c, s);
},
NumberDecimal: function(s): bson.Decimal128 {
if (s === undefined) {
s = '0';
}
return bson.Decimal128.fromString(s.toString());
},
NumberInt: function(s): any {
return parseInt(s, 10);
},
NumberLong: function(v): bson.Long {
if (v === undefined) {
v = 0;
}
return bson.Long.fromNumber(v);
},
Date: function(...args: DateConstructorArguments): Date|string {
const date = dateHelper(...args);
if (new.target) {
return date;
}
return date.toString();
},
ISODate: function(...args: DateConstructorArguments): Date {
return dateHelper(...args);
},
BinData: function(subtype, b64string): bson.Binary { // this from 'help misc' in old shell
const buffer = Buffer.from(b64string, 'base64');
return new bson.Binary(buffer, subtype);
},
HexData: function(subtype, hexstr): bson.Binary {
const buffer = Buffer.from(hexstr, 'hex');
return new bson.Binary(buffer, subtype);
},
UUID: function(hexstr): bson.Binary {
const buffer = Buffer.from(hexstr, 'hex');
return new bson.Binary(buffer, bson.Binary.SUBTYPE_UUID);
},
MD5: function(hexstr): bson.Binary {
const buffer = Buffer.from(hexstr, 'hex');
return new bson.Binary(buffer, bson.Binary.SUBTYPE_MD5);
},
bsonsize: function(object): any {
return bson.calculateObjectSize(object);
}
};
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/components/shell-output-line.tsx
|
<reponame>MaBeuLux88/mongosh
import React, { Component } from 'react';
import classnames from 'classnames';
import PropTypes from 'prop-types';
import Icon from '@leafygreen-ui/icon';
import { LineWithIcon } from './utils/line-with-icon';
import { HelpOutput } from './types/help-output';
import { ShowDbsOutput } from './types/show-dbs-output';
import { ShowCollectionsOutput } from './types/show-collections-output';
import { CursorOutput } from './types/cursor-output';
import { CursorIterationResultOutput } from './types/cursor-iteration-result-output';
import { ObjectOutput } from './types/object-output';
import { SimpleTypeOutput } from './types/simple-type-output';
import { ErrorOutput } from './types/error-output';
const styles = require('./shell-output-line.less');
type ShellOutputEntryValue = any;
type Glyph = 'ChevronRight' | 'XWithCircle' | 'ChevronLeft';
export interface ShellOutputEntry {
type: 'input' | 'output' | 'error';
shellApiType?: string;
value: ShellOutputEntryValue;
}
interface ShellOutputLineProps {
entry: ShellOutputEntry;
}
export class ShellOutputLine extends Component<ShellOutputLineProps> {
static propTypes = {
entry: PropTypes.object.isRequired
};
private renderValue(): JSX.Element {
const { shellApiType, value, type } = this.props.entry;
if (type === 'input' ||
this.isPreformattedResult(value, shellApiType)) {
return <pre>{value}</pre>;
}
if (this.isPrimitiveOrFunction(value)) {
return <SimpleTypeOutput value={value} />;
}
if (shellApiType === 'Help') {
return <HelpOutput value={value} />;
}
if (shellApiType === 'ShowDatabasesResult') {
return <ShowDbsOutput value={value} />;
}
if (shellApiType === 'ShowCollectionsResult') {
return <ShowCollectionsOutput value={value} />;
}
if (shellApiType === 'Cursor') {
return <CursorOutput value={value} />;
}
if (shellApiType === 'CursorIterationResult') {
return <CursorIterationResultOutput value={value} />;
}
if (this.isError(value)) {
return <ErrorOutput value={value} />;
}
return <ObjectOutput value={value} />;
}
private isError(value: any): boolean {
return typeof value.message === 'string' && typeof value.stack === 'string';
}
private isPreformattedResult(value: any, shellApiType: string): boolean {
return typeof value === 'string' &&
shellApiType === 'Database' ||
shellApiType === 'Collection';
}
private isPrimitiveOrFunction(value: any): boolean {
// any primitive type including 'null' and 'undefined',
// function and classes
return value !== Object(value) ||
typeof value === 'function';
}
private getIconGlyph(): Glyph {
const { type } = this.props.entry;
if (type === 'input') {
return 'ChevronRight';
}
if (type === 'error') {
return 'XWithCircle';
}
return 'ChevronLeft';
}
render(): JSX.Element {
const { type } = this.props.entry;
const className = classnames(
styles['shell-output-line'],
styles[`shell-output-line-${type}`]
);
const icon = (<Icon
size={12}
glyph={this.getIconGlyph()}
className={styles['shell-output-line-icon']}
/>);
return <LineWithIcon className={className} icon={icon}>{this.renderValue()}</LineWithIcon>;
}
}
|
MaBeuLux88/mongosh
|
packages/build/src/config.ts
|
/**
* Defines the configuration interface for the build system.
*/
interface Config {
[key: string]: any;
}
export default Config;
|
MaBeuLux88/mongosh
|
packages/build/src/zip.ts
|
<filename>packages/build/src/zip.ts
import path from 'path';
import tar from 'tar';
import AdmZip from 'adm-zip';
import Platform from './platform';
/**
* Get the path to the zip.
*
* @param {string} outputDir - The output directory.
* @param {string} platform - The platform.
* @param {string} version - The version.
*
* @returns {string} The path.
*/
const zipPath = (outputDir: string, platform: string, version: string): string => {
const ext = (platform === Platform.Windows) ? 'zip' : 'tgz';
return path.join(outputDir, `mongosh-${version}-${platform}.${ext}`);
};
/**
* Filter out the archive itself when creating the tarball.
*
* @param {string} path - The path.
*
* @returns {boolean} If the file should be filtered out.
*/
const filterOut = (path) => {
return !path.match(/tgz/g)
};
/**
* Create a zip archive for posix.
*
* @param {string} outputDir - The output directory.
* @param {string} filename - the zip filename.
*/
const zipPosix = async(outputDir: string, filename: string) => {
const options = { gzip: true, file: filename, cwd: outputDir, filter: filterOut };
await tar.c(options, [ '.' ]);
};
/**
* Create a zip archive for windows.
*
* @param {string} input - The file to zip.
* @param {string} filename - the zip filename.
*/
const zipWindows = async(input: string, filename: string) => {
const admZip = new AdmZip();
admZip.addLocalFile(input)
await admZip.writeZip(filename);
};
/**
* Create a gzipped tarball or zip for the provided options.
*
* @param {string} input - The file location to zip.
* @param {string} outputDir - Where to save the zip.
* @param {string} platform - The platform.
* @param {string} version - The version.
*
* @returns {string} The filename of the zip.
*/
const zip = async(input: string, outputDir: string, platform: string, version: string): Promise<string> => {
const filename = zipPath(outputDir, platform, version);
console.log('mongosh: zipping:', filename);
if (platform === Platform.Windows) {
zipWindows(input, filename);
} else {
zipPosix(outputDir, filename);
}
return filename;
};
export default zip;
export { zipPath, zipPosix, zipWindows };
|
MaBeuLux88/mongosh
|
packages/shell-api/src/index.ts
|
import * as signatures from './shell-api-signatures';
import ShellBson from './shell-bson';
import toIterator from './toIterator';
export { signatures, ShellBson, toIterator };
export { Help } from './help';
export { CursorIterationResult } from './cursor-iteration-result';
export * from './shell-api';
|
MaBeuLux88/mongosh
|
packages/browser-runtime-electron/src/electron-runtime.ts
|
import {
ElectronInterpreterEnvironment
} from './electron-interpreter-environment';
import {
Runtime,
EvaluationResult,
OpenContextRuntime,
Completion
} from '@mongosh/browser-runtime-core';
import { ServiceProvider } from '@mongosh/service-provider-core';
export class ElectronRuntime implements Runtime {
private openContextRuntime: OpenContextRuntime;
constructor(serviceProvider: ServiceProvider, messageBus?: {
emit: (eventName: string, ...args: any[]) => void;
}) {
this.openContextRuntime = new OpenContextRuntime(
serviceProvider,
new ElectronInterpreterEnvironment({}),
messageBus
);
}
async evaluate(code: string): Promise<EvaluationResult> {
return await this.openContextRuntime.evaluate(code);
}
async getCompletions(code: string): Promise<Completion[]> {
return await this.openContextRuntime.getCompletions(code);
}
}
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/command-options.ts
|
import WriteConcern from "./write-concern";
export default interface CommandOptions {
writeConcern?: WriteConcern
}
|
MaBeuLux88/mongosh
|
packages/shell-api/src/database.spec.ts
|
<reponame>MaBeuLux88/mongosh
import sinon from 'sinon';
import Mapper from '../../mapper/lib';
import { Database, Collection } from './shell-api';
import * as signatures from './shell-api-signatures';
import { expect } from 'chai';
/**
* Test that a database method proxies the respective Mapper method correctly,
* with the right arguments and returning the right result.
*
* It ensures:
* - that the method is defined in the shell api and that is meant to be a function
* - that the mapper method to be proxied to exists
* - that the mapper method is called with a database as first argument and with
* the rest of invokation arguments.
* - that the result of mapper invokation is returned.
*
* @param {String} name - the name of the method to invoke
*/
function testWrappedMethod(name: string): void {
const attribute = signatures.Database.attributes[name];
expect(attribute).to.exist;
expect(attribute.type).to.equal('function');
const mock = sinon.mock();
const mapper: Mapper = sinon.createStubInstance(Mapper, {
[`database_${name}`]: mock
});
const args = [1, 2, 3];
const retVal = {};
const database = new Database(
mapper, 'db1');
mock.withArgs(database, ...args).returns(retVal);
const result = database[name](...args);
mock.verify();
expect(result).to.equal(retVal);
}
describe('Database', () => {
[
'getCollectionInfos',
'getCollectionNames',
'runCommand',
'adminCommand',
'aggregate',
'getSiblingDB',
'getCollection',
'dropDatabase'
].forEach((methodName) => {
describe(`#${methodName}`, () => {
it(`wraps mapper.database_${methodName}`, () => {
testWrappedMethod(methodName);
});
});
});
it('allows to get a collection as property if is not one of the existing methods', () => {
const database: any = new Database({}, 'db1');
expect(database.someCollection).to.have.instanceOf(Collection);
expect(database.someCollection._name).to.equal('someCollection');
});
it('reuses collections', () => {
const database: any = new Database({}, 'db1');
expect(database.someCollection).to.equal(database.someCollection);
});
it('does not return a collection starting with _', () => {
// this is the behaviour in the old shell
const database: any = new Database({}, 'db1');
expect(database._someProperty).to.equal(undefined);
});
it('does not return a collection for symbols', () => {
const database: any = new Database({}, 'db1');
expect(database[Symbol('someProperty')]).to.equal(undefined);
});
it('does not return a collection with invalid name', () => {
const database: any = new Database({}, 'db1');
expect(database[' ']).to.equal(undefined);
});
it('allows to access _name', () => {
const database: any = new Database({}, 'db1');
expect(database._name).to.equal('db1');
});
it('allows to access _collections', () => {
const database: any = new Database({}, 'db1');
expect(database._collections).to.deep.equal({});
});
});
|
MaBeuLux88/mongosh
|
packages/cli-repl/src/connect-info.ts
|
import getBuildInfo from 'mongodb-build-info';
interface ConnectInfo {
isAtlas: boolean;
isLocalhost: boolean;
serverVersion: string;
isEnterprise: boolean;
uri: string;
authType?: string;
isDataLake: boolean;
dlVersion?: string;
isGenuine: boolean;
serverName: string;
}
export default function getConnectInfo(uri: string, buildInfo: any, cmdLineOpts: any, topology: any): ConnectInfo {
const { isGenuine, serverName } =
getBuildInfo.getGenuineMongoDB(buildInfo, cmdLineOpts);
const { isDataLake, dlVersion } = getBuildInfo.getDataLake(buildInfo);
// get this information from topology rather than cmdLineOpts, since not all
// connections are able to run getCmdLineOpts command
const authType = topology.s.credentials
? topology.s.credentials.mechanism : null;
return {
isAtlas: getBuildInfo.isAtlas(uri),
isLocalhost: getBuildInfo.isLocalhost(uri),
serverVersion: buildInfo.version,
isEnterprise: getBuildInfo.isEnterprise(buildInfo),
uri,
authType,
isDataLake,
dlVersion,
isGenuine,
serverName
};
}
|
MaBeuLux88/mongosh
|
packages/service-provider-browser/src/stitch-transport.spec.ts
|
<gh_stars>0
import StitchTransport from './stitch-transport';
import { expect } from 'chai';
import sinon from 'sinon';
import StitchClient from './stitch-client';
import StitchMongoClient from './stitch-mongo-client';
/**
* Create a client stub from the provided collection stub.
*
* @note: We basically only care about the method under test
* which is always mocked on a new collection stub each
* test run. We we can use the boilerplate creation of the
* db and client here.
*
* @param {Stub} collectionStub - The collection stub.
*
* @returns {Stub} The client stub to pass to the transport.
*/
const createClientStub = (collectionStub) => {
const dbStub = {
collection: sinon.stub().returns(collectionStub)
};
return {
db: sinon.stub().returns(dbStub)
};
};
const createStitchClientStub = (): StitchClient => ({auth: null});
const createMongoClientStub = (): StitchMongoClient => ({db: () => {}});
describe('StitchTransport', () => {
const stitchClient = createStitchClientStub();
describe('#constructor', () => {
const mongoClient = createMongoClientStub();
const stitchTransport = new StitchTransport(stitchClient, mongoClient);
it('sets the mongo client on the instance', () => {
expect(stitchTransport.mongoClient).to.equal(mongoClient);
});
it('sets the stitch client on the instance', () => {
expect(stitchTransport.stitchClient).to.equal(stitchClient);
});
});
describe('#aggregate', () => {
let stitchTransport;
const pipeline = [{ $match: { name: '<NAME>' }}];
const aggResult = [{ name: '<NAME>' }];
const aggMock = sinon.mock().withArgs(pipeline).
returns({ toArray: () => Promise.resolve(aggResult) });
beforeEach(() => {
const collectionStub = {
aggregate: aggMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const cursor = stitchTransport.aggregate('music', 'bands', pipeline);
const result = await cursor.toArray();
expect(result).to.deep.equal(aggResult);
aggMock.verify();
});
});
describe('#bulkWrite', () => {
let stitchTransport;
const requests = [{ insertOne: { name: '<NAME>' }}];
beforeEach(() => {
stitchTransport = new StitchTransport(stitchClient, createClientStub({}));
});
afterEach(() => {
stitchTransport = null;
});
it('rejects the promise', () => {
return stitchTransport.bulkWrite('music', 'bands', requests).catch((error) => {
expect(error).to.not.equal(null);
});
});
});
describe('#countDocuments', () => {
let stitchTransport;
const countResult = 10;
const countMock = sinon.mock().once().withArgs({}).resolves(countResult);
beforeEach(() => {
const collectionStub = {
count: countMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.countDocuments('music', 'bands');
expect(result).to.deep.equal(countResult);
countMock.verify();
});
});
describe('#deleteMany', () => {
let stitchTransport;
const commandResult = { result: { n: 1, ok: 1 }};
const deleteMock = sinon.mock().once().withArgs({}).resolves(commandResult);
beforeEach(() => {
const collectionStub = {
deleteMany: deleteMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.deleteMany('music', 'bands', {});
expect(result).to.deep.equal(commandResult);
deleteMock.verify();
});
});
describe('#deleteOne', () => {
let stitchTransport;
const commandResult = { result: { n: 1, ok: 1 }};
const deleteMock = sinon.mock().once().withArgs({}).resolves(commandResult);
beforeEach(() => {
const collectionStub = {
deleteOne: deleteMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.deleteOne('music', 'bands', {});
expect(result).to.deep.equal(commandResult);
deleteMock.verify();
});
});
describe('#distinct', () => {
let stitchTransport;
beforeEach(() => {
stitchTransport = new StitchTransport(stitchClient, createClientStub({}));
});
afterEach(() => {
stitchTransport = null;
});
it('rejects the promise', () => {
return stitchTransport.distinct('music', 'bands', 'name').catch((error) => {
expect(error).to.not.equal(null);
});
});
});
describe('#estimatedDocumentCount', () => {
let stitchTransport;
beforeEach(() => {
stitchTransport = new StitchTransport(stitchClient, createClientStub({}));
});
afterEach(() => {
stitchTransport = null;
});
it('rejects the promise', () => {
return stitchTransport.estimatedDocumentCount('music', 'bands').catch((error) => {
expect(error).to.not.equal(null);
});
});
});
describe('#find', () => {
let stitchTransport;
const filter = { name: '<NAME>' };
const findResult = [{ name: '<NAME>' }];
const findMock = sinon.mock().withArgs(filter).
returns({ toArray: () => Promise.resolve(findResult) });
beforeEach(() => {
const collectionStub = {
find: findMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const cursor = stitchTransport.find('music', 'bands', filter);
const result = await cursor.toArray();
expect(result).to.deep.equal(findResult);
findMock.verify();
});
});
describe('#findOneAndDelete', () => {
let stitchTransport;
const commandResult = { result: { n: 1, ok: 1 }};
const findMock = sinon.mock().once().withArgs({}).resolves(commandResult);
beforeEach(() => {
const collectionStub = {
findOneAndDelete: findMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.findOneAndDelete('music', 'bands', {});
expect(result).to.deep.equal(commandResult);
findMock.verify();
});
});
describe('#findOneAndReplace', () => {
let stitchTransport;
const commandResult = { result: { n: 1, ok: 1 }};
const filter = { name: '<NAME>' };
const replacement = { name: '<NAME>' };
const findMock = sinon.mock().once().withArgs(filter, replacement).
resolves(commandResult);
beforeEach(() => {
const collectionStub = {
findOneAndReplace: findMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.
findOneAndReplace('music', 'bands', filter, replacement);
expect(result).to.deep.equal(commandResult);
findMock.verify();
});
});
describe('#findOneAndUpdate', () => {
let stitchTransport;
const commandResult = { result: { n: 1, ok: 1 }};
const filter = { name: '<NAME>' };
const update = { $set: { name: '<NAME>' }};
const findMock = sinon.mock().once().withArgs(filter, update).
resolves(commandResult);
beforeEach(() => {
const collectionStub = {
findOneAndUpdate: findMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.
findOneAndUpdate('music', 'bands', filter, update);
expect(result).to.deep.equal(commandResult);
findMock.verify();
});
});
describe('#insertMany', () => {
let stitchTransport;
const doc = { name: '<NAME>' };
const commandResult = { result: { n: 1, ok: 1 }};
const insertMock = sinon.mock().once().withArgs([ doc ]).resolves(commandResult);
beforeEach(() => {
const collectionStub = {
insertMany: insertMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.insertMany('music', 'bands', [ doc ]);
expect(result).to.deep.equal(commandResult);
insertMock.verify();
});
});
describe('#insertOne', () => {
let stitchTransport;
const doc = { name: '<NAME>' };
const commandResult = { result: { n: 1, ok: 1 }};
const insertMock = sinon.mock().once().withArgs(doc).resolves(commandResult);
beforeEach(() => {
const collectionStub = {
insertOne: insertMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.insertOne('music', 'bands', doc);
expect(result).to.deep.equal(commandResult);
insertMock.verify();
});
});
describe('#replaceOne', () => {
let stitchTransport;
const filter = { name: '<NAME>' };
const replacement = { name: '<NAME>' };
beforeEach(() => {
stitchTransport = new StitchTransport(stitchClient, createClientStub({}));
});
afterEach(() => {
stitchTransport = null;
});
it('rejects the promise', () => {
return stitchTransport.replaceOne('music', 'bands', filter, replacement).catch((error) => {
expect(error).to.not.equal(null);
});
});
});
describe('#runCommand', () => {
let stitchTransport;
beforeEach(() => {
stitchTransport = new StitchTransport(stitchClient, createClientStub({}));
});
afterEach(() => {
stitchTransport = null;
});
it('rejects the promise', () => {
return stitchTransport.runCommand('admin', { ismaster: 1 }).catch((error) => {
expect(error).to.not.equal(null);
});
});
});
describe('#updateOne', () => {
let stitchTransport;
const filter = { name: '<NAME>' };
const update = { $set: { name: '<NAME>' }};
const commandResult = { result: { n: 1, ok: 1 }};
const updateMock = sinon.mock().once().withArgs(filter, update).
resolves(commandResult);
beforeEach(() => {
const collectionStub = {
updateOne: updateMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.updateOne('music', 'bands', filter, update);
expect(result).to.deep.equal(commandResult);
updateMock.verify();
});
});
describe('#updateMany', () => {
let stitchTransport;
const filter = { name: '<NAME>' };
const update = { $set: { name: '<NAME>' }};
const commandResult = { result: { n: 1, ok: 1 }};
const updateMock = sinon.mock().once().withArgs(filter, update).
resolves(commandResult);
beforeEach(() => {
const collectionStub = {
updateMany: updateMock
};
stitchTransport = new StitchTransport(stitchClient, createClientStub(collectionStub));
});
afterEach(() => {
stitchTransport = null;
});
it('executes the command against the database', async() => {
const result = await stitchTransport.updateMany('music', 'bands', filter, update);
expect(result).to.deep.equal(commandResult);
updateMock.verify();
});
});
});
|
MaBeuLux88/mongosh
|
packages/shell-evaluator/src/index.ts
|
<filename>packages/shell-evaluator/src/index.ts
import ShellEvaluator from './shell-evaluator';
export default ShellEvaluator;
import shellApiSignature from './shell-api-signature';
export { shellApiSignature };
|
MaBeuLux88/mongosh
|
packages/shell-api/src/help.ts
|
<reponame>MaBeuLux88/mongosh<filename>packages/shell-api/src/help.ts<gh_stars>0
import i18n from '@mongosh/i18n';
type HelpProperties = {
help: string;
docs?: string;
attr?: HelpPropertiesAttr[];
};
type HelpPropertiesAttr = {
name?: string;
description: string;
};
type HelpOptions = {
translate?(string): string;
};
const DEFAULT_TRANSLATE = i18n.translateApiHelp.bind(i18n);
export class Help {
private help: string;
private docs: string;
private attr: HelpPropertiesAttr[] = [];
constructor(properties: HelpProperties, options: HelpOptions = { translate: DEFAULT_TRANSLATE }) {
this.help = options.translate(properties.help);
this.docs = options.translate(properties.docs);
this.attr = (properties.attr || [])
.map((attr) => ({
name: attr.name,
description: options.translate(attr.description),
})).filter(
attr => attr.description // at least the description should be there
);
}
shellApiType(): string {
return 'Help';
}
toReplString(): HelpProperties {
const { help, docs, attr } = this;
return { help, docs, attr };
}
}
|
MaBeuLux88/mongosh
|
packages/cli-repl/src/logger.ts
|
<gh_stars>0
/* eslint no-console:0, no-empty-function: 0 */
import redactInfo from 'mongodb-redact';
import Analytics from 'analytics-node';
import redactPwd from './redact-pwd';
import { ObjectId } from 'bson';
import pino from 'pino';
import path from 'path';
interface ApiEventArguments {
pipeline?: any[];
query?: object;
options?: object;
filter?: object;
}
interface ApiEvent {
method?: string;
class?: string;
db?: string;
coll?: string;
arguments?: ApiEventArguments;
}
interface UseEvent {
db: string;
}
interface AsyncRewriterEvent {
original: string;
rewritten: string;
}
interface ShowEvent {
method: string;
}
interface ConnectEvent {
uri: string;
isAtlas: boolean;
isLocalhost: boolean;
serverVersion: string;
isEnterprise: boolean;
authType: string;
isDataLake: boolean;
dlVersion?: string;
isGenuine: boolean;
serverName: string;
}
// set up a noop, in case we are not able to connect to segment.
function NoopAnalytics(): void {}
NoopAnalytics.prototype.identify = function(): void {};
NoopAnalytics.prototype.track = function(): void {};
export default function logger(bus: any, logDir: string): void {
const sessionID = new ObjectId(Date.now());
const logDest = path.join(logDir, `${sessionID}_log`);
const log = pino({ name: 'monogsh' }, pino.destination(logDest));
console.log(`Current sessionID: ${sessionID}`);
let userId;
let telemetry;
let analytics = new NoopAnalytics();
try {
// this file gets written as a part of a release
log.warn(require('./analytics-config.js').SEGMENT_API_KEY);
analytics = new Analytics(require('./analytics-config.js').SEGMENT_API_KEY);
} catch (e) {
bus.emit('mongosh:error', e);
}
bus.on('mongosh:connect', function(args: ConnectEvent) {
const connectionUri = redactPwd(args.uri);
delete args.uri;
const params = { sessionID, userId, connectionUri, ...args };
log.info('mongosh:connect', params);
if (telemetry) {
analytics.track({
userId,
event: 'mongosh:connect',
properties: { sessionID, connectionUri, ...args }
});
}
});
bus.on('mongosh:new-user', function(id, enableTelemetry) {
userId = id;
telemetry = enableTelemetry;
if (telemetry) analytics.identify({ userId });
});
bus.on('mongosh:update-user', function(id, enableTelemetry) {
userId = id;
telemetry = enableTelemetry;
log.info('mongosh:update-user', { enableTelemetry });
});
bus.on('mongosh:error', function(error: any) {
log.error(error);
if (telemetry && error.name.includes('Mongosh')) {
analytics.track({
userId,
event: 'mongosh:error',
properties: { error }
});
}
});
bus.on('mongosh:help', function() {
log.info('mongosh:help');
if (telemetry) {
analytics.track({
userId,
event: 'mongosh:help'
});
}
});
bus.on('mongosh:rewritten-async-input', function(args: AsyncRewriterEvent) {
log.info('mongosh:rewritten-async-input', args);
});
bus.on('mongosh:use', function(args: UseEvent) {
log.info('mongosh:use', args);
if (telemetry) {
analytics.track({
userId,
event: 'mongosh:use'
});
}
});
bus.on('mongosh:show', function(args: ShowEvent) {
log.info('mongosh:show', args);
if (telemetry) {
analytics.track({
userId,
event: 'mongosh:show',
properties: { method: args.method }
});
}
});
bus.on('mongosh:setCtx', function(args) {
log.info('mongosh:setCtx', args);
});
bus.on('mongosh:api-call', function(args: ApiEvent) {
log.info('mongosh:api-call', redactInfo(args));
// analytics properties to include if they are present in an api-call
const properties: ApiEvent = {};
properties.arguments = {};
if (args.method) properties.method = args.method;
if (args.class) properties.class = args.class;
if (args.arguments) properties.arguments = properties.arguments;
if (telemetry) {
analytics.track({
userId,
event: 'mongosh:api-call',
properties: redactInfo(properties)
});
}
});
}
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/iframe-runtime/iframe-runtime.ts
|
import {
IframeInterpreterEnvironment
} from './iframe-interpreter-environment';
import {
Runtime,
EvaluationResult,
Completion,
OpenContextRuntime
} from '@mongosh/browser-runtime-core';
import { ServiceProvider } from '@mongosh/service-provider-core';
export class IframeRuntime implements Runtime {
private openContextRuntime: OpenContextRuntime;
private iframe: HTMLIFrameElement;
private container: HTMLDivElement;
private serviceProvider: ServiceProvider;
constructor(serviceProvider: ServiceProvider) {
this.serviceProvider = serviceProvider;
}
async evaluate(code: string): Promise<EvaluationResult> {
if (!this.openContextRuntime) {
await this.initialize();
}
return await this.openContextRuntime.evaluate(code);
}
async getCompletions(code: string): Promise<Completion[]> {
if (!this.openContextRuntime) {
await this.initialize();
}
return await this.openContextRuntime.getCompletions(code);
}
async initialize(): Promise<void> {
if (this.iframe) {
return;
}
this.container = document.createElement('div');
this.container.style.display = 'none';
// NOTE: inserting the iframe directly as dom element does not work with sandboxing.
this.container.insertAdjacentHTML(
'beforeend',
'<iframe src="about:blank" style="display: none" sandbox="allow-same-origin" />');
this.iframe = this.container.firstElementChild as HTMLIFrameElement;
const ready: Promise<void> = new Promise((resolve) => {
this.iframe.onload = (): void => resolve();
});
document.body.appendChild(this.container);
const environment = new IframeInterpreterEnvironment(this.iframe.contentWindow);
this.openContextRuntime = new OpenContextRuntime(this.serviceProvider, environment);
return await ready;
}
destroy(): Promise<void> {
if (!this.iframe) {
return;
}
const parent = this.iframe.parentNode;
if (!parent) {
return;
}
parent.removeChild(this.iframe);
return Promise.resolve();
}
}
|
MaBeuLux88/mongosh
|
packages/browser-runtime-core/src/interpreter/interpreter.ts
|
import { Preprocessor } from './preprocessor';
const LAST_EXPRESSION_CALLBACK_FUNCTION_NAME = '___MONGOSH_LAST_EXPRESSION_CALLBACK';
const LEXICAL_CONTEXT_VARIABLE_NAME = '___MONGOSH_LEXCON';
export type ContextValue = any;
export type EvaluationResult = {
shellApiType: string;
value: ContextValue;
};
export interface InterpreterEnvironment {
sloppyEval(code: string): EvaluationResult;
getContextObject(): ContextValue;
}
export class Interpreter {
private environment: InterpreterEnvironment;
private preprocessor: Preprocessor;
constructor(environment: InterpreterEnvironment) {
this.environment = environment;
const contextObjext = this.environment.getContextObject();
contextObjext[LEXICAL_CONTEXT_VARIABLE_NAME] = {};
this.preprocessor = new Preprocessor({
lastExpressionCallbackFunctionName: LAST_EXPRESSION_CALLBACK_FUNCTION_NAME,
lexicalContextStoreVariableName: LEXICAL_CONTEXT_VARIABLE_NAME
});
}
async evaluate(code: string): Promise<EvaluationResult> {
let result;
const contextObjext = this.environment.getContextObject();
contextObjext[LAST_EXPRESSION_CALLBACK_FUNCTION_NAME] = (val): void => {
result = val;
};
const preprocessedCode = this.preprocessor.preprocess(code);
await this.environment.sloppyEval(preprocessedCode);
return await result;
}
}
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/cursor.ts
|
<gh_stars>0
import Document from './document';
interface Cursor {
/**
* Add a cursor flag as an option to the cursor.
*
* @param {number} option - The flag number.
*
* @returns {Cursor} The cursor.
*/
addOption(option: number): Cursor
/**
* Set cursor to allow partial results.
*
* @returns {Cursor} The cursor.
*/
allowPartialResults(): Cursor;
/**
* Set the cursor batch size.
*
* @param {number} size - The batch size.
*
* @returns {Cursor} The cursor.
*/
batchSize(size: number): Cursor;
/**
* Close the cursor.
*
* @returns {Cursor} The cursor.
*/
close(options: Document): Promise<void>;
/**
* Determine if the cursor has been closed.
*
* @returns {boolean} If the cursor is closed.
*/
isClosed(): boolean;
/**
* Set the collation on the cursor.
*
* @param {Document} spec - The collation.
*
* @returns {Cursor} The cursor.
*/
collation(spec: Document): Cursor;
/**
* Add a comment to the cursor.
*
* @param {string} cmt - The comment.
*
* @returns {Cursor} The cursor.
*/
comment(cmt: string): Cursor;
/**
* Get the count from the cursor.
*
* @returns {Promise<number>} The count.
*/
count(): Promise<number>;
forEach(f): Promise<void>;
/**
* Does the cursor have a next document?
*
* @returns {Promise<boolean>} If there is a next document.
*/
hasNext(): Promise<boolean>;
/**
* Set a hint for indexes on the cursor.
*
* @param {string} index - The index hint.
*
* @returns {Cursor} The cursor.
*/
hint(index: string): Cursor;
/**
* cursor.isExhausted() returns true if the cursor is closed and there are no
* remaining objects in the batch.
*
* @returns Promise<boolean> - whether the cursor is exhausted
*/
isExhausted(): Promise<boolean>;
itcount(): Promise<number>;
/**
* Set the limit of documents to return.
*
* @param {number} value - The limit value.
*
* @returns {Cursor} The cursor.
*/
limit(value: number): Cursor;
map(f): Cursor;
/**
* Set the max index bounds.
*
* @param {Document} indexBounds - The max bounds.
*
* @returns {Cursor} The cursor.
*/
max(indexBounds: Document): Cursor;
/**
* Set the maxTimeMS value.
*
* @param {number} The maxTimeMS value.
*
* @returns {Cursor} The cursor.
*/
maxTimeMS(value: number): Cursor;
/**
* Set the min index bounds.
*
* @param {Document} indexBounds - The min bounds.
*
* @returns {Cursor} The cursor.
*/
min(indexBounds: Document): Cursor;
next(): Promise<any>;
/**
* Tell the cursor not to timeout.
*
* @returns {Cursor} The cursor.
*/
noCursorTimeout(): Cursor;
/**
* Flag the cursor as an oplog replay.
*
* @returns {Cursor} The cursor.
*/
oplogReplay(): Cursor;
/**
* Set the projection on the cursor.
*
* @param {Document} spec - The projection.
*
* @returns {Cursor} The cursor.
*/
projection(spec: Document): Cursor;
/**
* Set the cursor to return the index field.
*
* @param {boolean} enabled - Whether to enable return key.
*
* @returns {Cursor} The cursor.
*/
returnKey(enabled: boolean): Cursor;
size(): Promise<number>;
/**
* Set the skip value.
*
* @param {number} value - The number of docs to skip.
*
* @returns {Cursor} The cursor.
*/
skip(value: number): Cursor;
/**
* Set the sort on the cursor.
*
* @param {Document} spec - The sort.
*
* @returns {Cursor} The cursor.
*/
sort(spec: Document): Cursor;
/**
* Flag the cursor as tailable.
*
* @returns {Cursor} The cursor.
*/
tailable(): Cursor;
/**
* Set read preference for the cursor.
*
* @param {string} mode - the read preference mode
* @param {Document[]} [tagSet] - the tag set
* @returns {Cursor}
*/
readPref(mode: string, tagSet?: Document[]): Cursor;
/**
* Get the documents from the cursor as an array of objects.
*/
toArray(): Promise<Document[]>;
/**
* Get the explain of the cursor.
*
* @param {string} verbosity - the explain verbosity.
* @returns {Promise<any>}
*/
explain(verbosity: string): Promise<any>;
}
export default Cursor;
|
MaBeuLux88/mongosh
|
packages/build/src/download-center.spec.ts
|
import path from 'path';
import { promises as fs } from 'fs';
import { expect } from 'chai';
import { createDownloadCenterConfig } from './download-center';
describe('download center module', () => {
describe('.createDownloadCenterConfig', () => {
let config;
before(() => {
config = createDownloadCenterConfig('1.2.2');
});
it('returns the string with the macos version injected', () => {
expect(config).to.include('mongosh-1.2.2-darwin.tgz');
});
it('returns the string with the linux version injected', () => {
expect(config).to.include('mongosh-1.2.2-linux.tgz');
});
it('returns the string with the win version injected', () => {
expect(config).to.include('mongosh-1.2.2-win.zip');
});
});
});
|
MaBeuLux88/mongosh
|
packages/shell-api/src/shell-api.spec.ts
|
<reponame>MaBeuLux88/mongosh
import {
AggregationCursor,
BulkWriteResult,
Collection,
Cursor,
Database,
DeleteResult,
InsertManyResult,
InsertOneResult,
ReplicaSet,
Shard,
UpdateResult
} from './shell-api';
import { expect } from 'chai';
[
AggregationCursor,
BulkWriteResult,
Collection,
Cursor,
Database,
DeleteResult,
InsertManyResult,
InsertOneResult,
ReplicaSet,
Shard,
UpdateResult
].forEach((Type) => {
describe(Type.name, () => {
describe('#shellApi', () => {
it('returns the correct type', () => {
expect(new Type().shellApiType()).to.equal(Type.name);
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/mapper/src/mapper.integration.spec.ts
|
<filename>packages/mapper/src/mapper.integration.spec.ts
import { expect } from 'chai';
import { CliServiceProvider } from '@mongosh/service-provider-server';
import Mapper from './mapper';
import { Collection, Cursor, Database, Explainable, AggregationCursor } from '@mongosh/shell-api';
const mongodbRunnerBefore = require('mongodb-runner/mocha/before');
const mongodbRunnerAfter = require('mongodb-runner/mocha/after');
describe('Mapper (integration)', function() {
this.timeout(60000);
before(function(done) {
try {
mongodbRunnerBefore({ port: 27018, timeout: 60000 }).call(this, done);
} catch (e) {
done(e);
}
});
after(mongodbRunnerAfter({ port: 27018 }));
let serviceProvider: CliServiceProvider;
const getIndexNames = async(dbName: string, collectionName: string): Promise<any> => {
const specs = await serviceProvider.getIndexes(
dbName,
collectionName
);
return specs.map(spec => spec.name);
};
const findAllWithoutId = (dbName: string, collectionName: string): any => serviceProvider.find(
dbName,
collectionName,
{},
{ projection: { _id: 0 } }
).toArray();
const expectCollectionToExist = async(dbName: any, collectionName: any): Promise<void> => {
const collectionNames = (await serviceProvider.listCollections(dbName)).map(({ name }) => name);
expect(collectionNames).to.include(collectionName);
};
const expectCollectionNotToExist = async(dbName: any, collectionName: any): Promise<void> => {
const collectionNames = (await serviceProvider.listCollections(dbName)).map(({ name }) => name);
expect(collectionNames).to.not.include(collectionName);
};
// TODO: replace with serviceProvider.createCollection()
const createCollection = async(dbName: string, collectionName: string): Promise<any> => {
const now = Date.now();
await serviceProvider.insertOne(dbName, collectionName, { _id: now });
await serviceProvider.deleteOne(dbName, collectionName, { _id: now });
};
before(async() => {
serviceProvider = await CliServiceProvider.connect('mongodb://localhost:27018');
});
after(() => {
return serviceProvider.close(true);
});
let mapper: Mapper;
let dbName;
let database;
let collection;
let collectionName;
beforeEach(async() => {
dbName = `test-${Date.now()}`;
collectionName = 'docs';
mapper = new Mapper(serviceProvider);
mapper.context = { db: new Database(mapper, 'test') };
mapper.use(dbName);
database = new Database(mapper, dbName);
collection = new Collection(
mapper,
database,
collectionName
);
});
afterEach(async() => {
await serviceProvider.dropDatabase(dbName);
});
describe('commands', () => {
describe('it', () => {
beforeEach(async() => {
const docs = [];
let i = 1;
while (i <= 21) {
docs.push({ doc: i });
i++;
}
await serviceProvider.insertMany(dbName, collectionName, docs);
});
describe('when calling it after find', () => {
it('returns next batch of docs', async() => {
mapper.collection_find(collection, {}, { _id: 0 });
await mapper.it();
expect(await mapper.it()).to.deep.equal([{
doc: 21
}]);
});
});
describe('when calling limit after skip', () => {
let cursor: Cursor;
beforeEach(() => {
cursor = mapper
.collection_find(collection, {}, { _id: 0 })
.skip(1)
.limit(1);
});
describe('when calling toArray on the cursor', () => {
it('returns the right documents', async() => {
expect(await cursor.toArray()).to.deep.equal([{ doc: 2 }]);
});
});
describe('when calling toReplString on the cursor', () => {
it('returns the right documents', async() => {
expect(await cursor.toReplString()).to.deep.equal([{ doc: 2 }]);
});
});
});
});
});
describe('collection', () => {
describe('bulkWrite', () => {
context('with an insertOne request', () => {
let requests;
let result;
beforeEach(async() => {
requests = [
{
insertOne: {
document: {
doc: 1
}
}
}
];
result = await mapper.collection_bulkWrite(
collection,
requests
);
});
it('returns acknowledged = true', () => {
expect(result.acknowledged).to.be.true;
});
it('returns insertedCount = 1', () => {
expect(result.insertedCount).to.equal(1);
});
it('returns insertedIds', () => {
expect(Object.keys(result.insertedIds)).to.have.lengthOf(1);
});
it('performs insert', async() => {
const docs = await serviceProvider.find(
dbName,
collectionName,
{},
{ projection: { _id: 0 } }
).toArray();
expect(docs).to.deep.equal([
{ doc: 1 }
]);
});
});
});
describe('updateOne', () => {
beforeEach(async() => {
await serviceProvider.insertMany(dbName, collectionName, [
{ doc: 1 },
{ doc: 1 },
{ doc: 2 }
]);
});
context('without upsert', () => {
let result;
beforeEach(async() => {
result = await mapper.collection_updateOne(
collection, { doc: 1 }, { $inc: { x: 1 } }
);
});
it('updates only one existing document matching filter', async() => {
const docs = await findAllWithoutId(dbName, collectionName);
expect(docs).to.deep.equal([
{ doc: 1, x: 1 },
{ doc: 1 },
{ doc: 2 }
]);
});
it('returns update result correctly', () => {
const {
acknowleged,
insertedId,
matchedCount,
modifiedCount,
upsertedCount
} = result;
expect({
acknowleged,
insertedId,
matchedCount,
modifiedCount,
upsertedCount
}).to.deep.equal({
acknowleged: 1,
insertedId: null,
matchedCount: 1,
modifiedCount: 1,
upsertedCount: 0
});
});
});
context('with upsert', () => {
let result;
beforeEach(async() => {
result = await mapper.collection_updateOne(
collection, { _id: 'new-doc' }, { $set: { _id: 'new-doc', doc: 3 } }, { upsert: true }
);
});
it('inserts a document', async() => {
const docs = await findAllWithoutId(dbName, collectionName);
expect(docs).to.deep.equal([
{ doc: 1 },
{ doc: 1 },
{ doc: 2 },
{ doc: 3 }
]);
});
it('returns update result correctly', () => {
const {
acknowleged,
insertedId,
matchedCount,
modifiedCount,
upsertedCount
} = result;
expect({
acknowleged,
insertedId,
matchedCount,
modifiedCount,
upsertedCount
}).to.deep.equal({
acknowleged: 1,
insertedId: { index: 0, _id: 'new-doc' },
matchedCount: 0,
modifiedCount: 0,
upsertedCount: 1
});
});
});
});
describe('converToCapped', () => {
let result;
beforeEach(async() => {
await createCollection(dbName, collectionName);
expect(await serviceProvider.isCapped(
dbName,
collectionName
)).to.be.false;
result = await mapper.collection_convertToCapped(
collection,
1000
);
});
it('returns ok = 1', () => {
expect(result.ok).to.equal(1);
});
it('converts the collection', async() => {
expect(await serviceProvider.isCapped(
dbName,
collectionName
)).to.be.true;
});
});
describe('createIndexes', () => {
let result;
beforeEach(async() => {
await createCollection(dbName, collectionName);
expect(await getIndexNames(dbName, collectionName)).not.to.contain('index-1');
result = await mapper.collection_createIndexes(collection, [{ x: 1 }], {
name: 'index-1'
});
});
it('returns creation result', () => {
expect(result).to.contain({
createdCollectionAutomatically: false,
numIndexesBefore: 1,
numIndexesAfter: 2,
ok: 1
});
});
it('creates the index', async() => {
expect(await getIndexNames(dbName, collectionName)).to.contain('index-1');
});
});
describe('getIndexes', () => {
let result;
beforeEach(async() => {
await createCollection(dbName, collectionName);
await serviceProvider.createIndexes(dbName, collectionName, [
{ key: { x: 1 } }
]);
result = await mapper.collection_getIndexes(collection);
});
it('returns indexes for the collection', () => {
expect(result).to.deep.equal([
{
key: {
_id: 1
},
name: '_id_',
ns: `${dbName}.${collectionName}`,
v: 2
},
{
key: {
x: 1
},
name: 'x_1',
ns: `${dbName}.${collectionName}`,
v: 2
}
]);
});
});
describe('dropIndexes', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
await serviceProvider.createIndexes(dbName, collectionName, [
{ key: { x: 1 }, name: 'index-1' }
]);
});
it('removes indexes', async() => {
expect(await getIndexNames(dbName, collectionName)).to.contain('index-1');
await mapper.collection_dropIndexes(collection, '*');
expect(await getIndexNames(dbName, collectionName)).not.to.contain('index-1');
});
});
describe('#reIndex', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('runs against the db', async() => {
const result = await mapper.collection_reIndex(collection);
expect(
result
).to.deep.equal({
nIndexesWas: 1,
nIndexes: 1,
indexes: [
{
v: 2,
key: {
'_id': 1
},
name: '_id_',
ns: `${dbName}.${collectionName}`
}
],
ok: 1
});
});
});
describe('totalIndexSize', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('returns total index size', async() => {
expect(typeof await mapper.collection_totalIndexSize(collection)).to.equal('number');
});
});
describe('dataSize', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('returns total index size', async() => {
expect(typeof await mapper.collection_dataSize(collection)).to.equal('number');
});
});
describe('storageSize', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('returns total index size', async() => {
expect(typeof await mapper.collection_storageSize(collection)).to.equal('number');
});
});
describe('totalSize', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('returns total index size', async() => {
expect(typeof await mapper.collection_totalSize(collection)).to.equal('number');
});
});
describe('stats', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
await serviceProvider.insertOne(dbName, collectionName, { x: 1 });
});
it('returns stats', async() => {
const stats = await mapper.collection_stats(collection);
expect(stats).to.contain.keys(
'avgObjSize',
'capped',
'count',
'indexBuilds',
'indexDetails',
'indexSizes',
'nindexes',
'ns',
'ok',
'scaleFactor',
'size',
'storageSize',
'totalIndexSize',
'wiredTiger'
);
});
});
describe('drop', () => {
context('when a collection exists', () => {
let result;
beforeEach(async() => {
await createCollection(dbName, collectionName);
result = await mapper.collection_drop(collection);
});
it('returns true', async() => {
expect(result).to.be.true;
});
it('deletes the collection', async() => {
await expectCollectionNotToExist(dbName, collectionName);
});
});
context('when a collection does not exist', () => {
it('returns false', async() => {
expect(await mapper.collection_drop(collection)).to.be.false;
});
});
});
describe('exists', () => {
context('when a collection exists', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('returns the collection object', async() => {
expect((await mapper.collection_exists(collection)).name).to.equal(collectionName);
});
});
context('when a collection does not exist', () => {
it('returns false', async() => {
expect(await mapper.collection_drop(collection)).to.be.false;
});
});
});
describe('runCommand', () => {
beforeEach(async() => {
await createCollection(dbName, collectionName);
});
it('runs a command with the collection as parameter and returns the result', async() => {
expect(await mapper.collection_runCommand(collection, 'collStats')).to.include({
ok: 1,
ns: `${dbName}.${collectionName}`
});
});
});
describe('findAndModify', () => {
beforeEach(async() => {
await serviceProvider.insertMany(
dbName,
collectionName,
[
{ doc: 1, foo: 1 },
{ doc: 2, foo: 1 }
]
);
});
it('changes only a matching document', async() => {
await mapper.collection_findAndModify(
collection,
{
query: { doc: 1 },
update: { foo: 'bar' }
}
);
expect(await findAllWithoutId(dbName, collectionName)).to.deep.equal([
{ foo: 'bar' },
{ doc: 2, foo: 1 }
]);
});
it('removes only a matching document', async() => {
await mapper.collection_findAndModify(
collection,
{
query: { doc: 1 },
remove: true
}
);
expect(await findAllWithoutId(dbName, collectionName)).to.deep.equal([
{ doc: 2, foo: 1 }
]);
});
it('changes the first matching document with sort', async() => {
await mapper.collection_findAndModify(
collection,
{
query: { foo: 1 },
sort: { doc: -1 },
update: { changed: true }
}
);
expect(await findAllWithoutId(dbName, collectionName)).to.deep.equal([
{ doc: 1, foo: 1 },
{ changed: true }
]);
});
it('returns the old document if new is not passed', async() => {
expect(
await mapper.collection_findAndModify(collection, { query: { doc: 1 }, update: { changed: true } })
).to.deep.include({ doc: 1 });
expect(
await mapper.collection_findAndModify(collection, { query: { doc: 2 }, remove: true })
).to.deep.include({ doc: 2 });
});
it('returns the new document if new is passed', async() => {
expect(
await mapper.collection_findAndModify(collection, {
query: { doc: 1 }, new: true, update: { changed: true }
})
).to.deep.include({ changed: true });
});
it('allows upserts', async() => {
await mapper.collection_findAndModify(collection, {
query: { doc: 3 }, new: true, update: { doc: 3 }, upsert: true
});
expect(
await findAllWithoutId(dbName, collectionName)
).to.deep.include({ doc: 3 });
});
});
describe('renameCollection', () => {
context('without dropTarget', () => {
beforeEach(async() => {
await serviceProvider.insertOne(dbName, collectionName, { doc: 1 });
await mapper.collection_renameCollection(
collection,
'newName'
);
});
it('renames a collection', async() => {
await expectCollectionToExist(dbName, 'newName');
await new Promise((resolve) => { setTimeout(resolve, 2000); });
await expectCollectionNotToExist(dbName, collectionName);
});
it('does not drop documents', async() => {
expect(
await findAllWithoutId(
dbName,
'newName'
)
).to.deep.include({
doc: 1
});
});
});
context('with dropTarget = true', () => {
beforeEach(async() => {
await serviceProvider.insertOne(dbName, collectionName, { doc: 1 });
await mapper.collection_renameCollection(
collection,
'newName',
true
);
});
it('renames a collection', async() => {
await expectCollectionToExist(dbName, 'newName');
await new Promise((resolve) => { setTimeout(resolve, 2000); });
await expectCollectionNotToExist(dbName, collectionName);
});
it('drops documents', async() => {
expect(
await findAllWithoutId(
dbName,
'newName'
)
).to.deep.include({
doc: 1
});
});
});
});
describe('aggregate', () => {
it('runs an aggregate pipeline on the database', async() => {
await serviceProvider.insertOne(dbName, collectionName, { x: 1 });
const cursor = await mapper.collection_aggregate(collection, [{
$count: 'count'
}]);
expect(await (cursor as AggregationCursor).toArray()).to.deep.equal([{ count: 1 }]);
});
it('runs an explain with explain: true', async() => {
await serviceProvider.insertOne(dbName, collectionName, { x: 1 });
const cursor = await mapper.collection_aggregate(collection, [{
$count: 'count'
}]);
expect(await (cursor as AggregationCursor).toArray()).to.deep.equal([{ count: 1 }]);
});
});
});
describe('db', () => {
describe('getCollectionInfos', () => {
it('returns an array with collection infos', async() => {
await createCollection(dbName, collectionName);
expect(await mapper.database_getCollectionInfos(database, {}, { nameOnly: true })).to.deep.equal([{
name: collectionName,
type: 'collection'
}]);
});
});
describe('getCollectionNames', () => {
it('returns an array with collection names', async() => {
await createCollection(dbName, collectionName);
expect(
await mapper.database_getCollectionNames(database)
).to.deep.equal([collectionName]);
});
});
describe('adminCommand', () => {
it('runs an adminCommand', async() => {
const result = await mapper.database_adminCommand(
database, { serverStatus: 1 }
);
expect(result.ok).to.equal(1);
expect(result.process).to.match(/^mongo/);
});
});
describe('aggregate', () => {
it('runs an aggregate pipeline on the database', async() => {
const cursor = await mapper.database_aggregate(database, [{
$listLocalSessions: {}
}]);
expect((await (cursor as AggregationCursor).toArray())[0]).to.have.keys('_id', 'lastUse');
});
});
describe('dropDatabase', () => {
let otherDbName;
beforeEach(() => {
otherDbName = `${dbName}-2`;
});
afterEach(async() => {
await serviceProvider.dropDatabase(otherDbName);
});
const listDatabases = async(): Promise<string> => {
const { databases } = await serviceProvider.listDatabases('admin');
return databases.map(db => db.name);
};
it('drops only the target database', async() => {
await createCollection(dbName, collectionName);
await createCollection(otherDbName, collectionName);
expect(
await listDatabases()
).to.contain(dbName);
await mapper.database_dropDatabase(database);
expect(
await listDatabases()
).not.to.contain(dbName);
expect(
await listDatabases()
).to.contain(otherDbName);
});
it('returns the drop database result', async() => {
expect(
await mapper.database_dropDatabase(database)
).to.deep.equal({ 'dropped': dbName, 'ok': 1 });
});
});
});
describe('explainable', () => {
let explainable;
beforeEach(() => {
explainable = new Explainable(
mapper,
collection,
'queryPlanner'
);
});
describe('find', () => {
it('returns a cursor that has the explain as result of toReplString', async() => {
const cursor = await mapper.explainable_find(explainable)
.skip(1)
.limit(1);
const result = await cursor.toReplString();
expect(result).to.have.keys([
'ok',
'queryPlanner',
'serverInfo'
]);
});
});
describe('aggregate', () => {
it('returns a cursor that has the explain as result of toReplString', async() => {
const cursor = await mapper.explainable_find(explainable)
.skip(1)
.limit(1);
const result = await cursor.toReplString();
expect(result).to.have.keys([
'ok',
'queryPlanner',
'serverInfo'
]);
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/i18n/src/catalog.ts
|
<reponame>MaBeuLux88/mongosh<filename>packages/i18n/src/catalog.ts
interface Catalog {
[prop: string]: any;
}
export default Catalog;
|
MaBeuLux88/mongosh
|
packages/mapper/src/index.ts
|
import Mapper from './mapper';
export default Mapper;
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/components/editor.spec.tsx
|
<filename>packages/browser-repl/src/components/editor.spec.tsx
import sinon from 'sinon';
import React from 'react';
import { expect } from '../../testing/chai';
import { mount } from '../../testing/enzyme';
import { Editor } from './editor';
import AceEditor from 'react-ace';
describe('<Editor />', () => {
const getAceEditorInstance = (wrapper): any => {
const aceEditor = wrapper.find(AceEditor);
return aceEditor.instance().editor as any;
};
const execCommandBoundTo = (aceEditor: any, key: string): void => {
const commands = Object.values(aceEditor.commands.commands);
const command: any = commands.find(({ bindKey }) => {
if (!bindKey) {
return false;
}
if (typeof bindKey === 'string') {
return key === bindKey;
}
const { win, mac } = bindKey as {win: string; mac: string};
return win === key && mac === key;
});
if (!command) {
throw new Error(`No command bound to ${key}.`);
}
aceEditor.execCommand(command.name);
};
it('allows to set the value', () => {
const wrapper = mount(<Editor value={'some value'}/>);
const aceEditor = getAceEditorInstance(wrapper);
expect(aceEditor.getValue()).to.equal('some value');
});
it('calls onChange when the content changes', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onChange={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
expect(spy).not.to.have.been.called;
aceEditor.setValue('value');
expect(spy).to.have.been.calledWith('value');
});
it('calls onEnter when enter is pressed', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onEnter={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
expect(spy).not.to.have.been.called;
execCommandBoundTo(aceEditor, 'Return');
expect(spy).to.have.been.calledOnce;
});
it('calls onArrowUpOnFirstLine when arrow up is pressed and cursor on fisrt row', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowUpOnFirstLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
expect(spy).not.to.have.been.called;
execCommandBoundTo(aceEditor, 'Up');
expect(spy).to.have.been.calledOnce;
});
it('does not call onArrowUpOnFirstLine when arrow up is pressed and row > 0', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowUpOnFirstLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
aceEditor.setValue('row 0\nrow 1');
aceEditor.moveCursorToPosition({ row: 1, column: 0 });
aceEditor.clearSelection();
execCommandBoundTo(aceEditor, 'Up');
expect(spy).not.to.have.been.called;
});
it('calls onArrowDownOnLastLine when arrow down is pressed and cursor on last row', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowDownOnLastLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
aceEditor.setValue('row 0\nrow 1');
aceEditor.moveCursorToPosition({ row: 1, column: 0 });
aceEditor.clearSelection();
expect(spy).not.to.have.been.called;
execCommandBoundTo(aceEditor, 'Down');
expect(spy).to.have.been.calledOnce;
});
it('does not call onArrowDownOnLastLine when arrow down is pressed and cursor not on last row', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowDownOnLastLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
aceEditor.setValue('row 0\nrow 1');
execCommandBoundTo(aceEditor, 'Down');
expect(spy).not.to.have.been.called;
});
it('does not call onArrowUpOnFirstLine if text is selected', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowUpOnFirstLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
aceEditor.setValue('text');
aceEditor.selectAll();
execCommandBoundTo(aceEditor, 'Up');
expect(spy).not.to.have.been.called;
});
it('does not call onArrowDownOnLastLine if text is selected', () => {
const spy = sinon.spy();
const wrapper = mount(<Editor onArrowDownOnLastLine={spy} />);
const aceEditor = getAceEditorInstance(wrapper);
aceEditor.setValue('text');
aceEditor.selectAll();
execCommandBoundTo(aceEditor, 'Down');
expect(spy).not.to.have.been.called;
});
});
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/bulk-write-result.ts
|
<gh_stars>0
export default interface BulkWriteResult {
result: {
ok: number
};
/**
* The number of documents inserted.
*/
insertedCount: number;
/**
* The number of existing documents selected for update or replacement.
*/
matchedCount: number;
/**
* The number of existing documents updated or replaced.
*/
modifiedCount: number;
/**
* The number of documents removed.
*/
deletedCount: number;
/**
* The number of upserted documents.
*/
upsertedCount: number;
/**
* Ids of upserted documents.
*/
upsertedIds: {[index: number]: any}
/**
* Ids of inserted documents.
*/
insertedIds: {[index: number]: any}
}
|
MaBeuLux88/mongosh
|
packages/mapper/src/mapper.spec.ts
|
<gh_stars>0
/* eslint-disable @typescript-eslint/camelcase */
import chai from 'chai';
import sinonChai from 'sinon-chai';
import { stubInterface, StubbedInstance } from 'ts-sinon';
chai.use(sinonChai);
const { expect } = chai;
import Mapper from './mapper';
import sinon from 'sinon';
import { ServiceProvider, Cursor as ServiceProviderCursor } from '@mongosh/service-provider-core';
import { Collection, Database, Explainable, AggregationCursor } from '@mongosh/shell-api';
describe('Mapper', () => {
let mapper: Mapper;
let serviceProvider: StubbedInstance<ServiceProvider>;
let collection: Collection;
let database: Database;
beforeEach(() => {
serviceProvider = stubInterface<ServiceProvider>();
mapper = new Mapper(serviceProvider);
mapper.context = { db: new Database(mapper, 'test') };
database = new Database(mapper, 'db1');
collection = new Collection(mapper, database, 'coll1');
});
describe('commands', () => {
describe('show databases', () => {
it('lists databases', async() => {
serviceProvider.listDatabases.resolves({
databases: [
{ name: 'db1', sizeOnDisk: 10000, empty: false },
{ name: 'db2', sizeOnDisk: 20000, empty: false },
{ name: 'db3', sizeOnDisk: 30000, empty: false }
],
totalSize: 50000,
ok: 1
});
const expectedOutput = [
{ name: 'db1', sizeOnDisk: 10000, empty: false },
{ name: 'db2', sizeOnDisk: 20000, empty: false },
{ name: 'db3', sizeOnDisk: 30000, empty: false }
];
expect(
(await mapper.show('dbs')).toReplString()
).to.deep.equal(expectedOutput);
expect(
(await mapper.show('databases')).toReplString()
).to.deep.equal(expectedOutput);
});
['collections', 'tables'].forEach((showArgument) => {
describe(`show ${showArgument}`, () => {
it('lists collection names', async() => {
serviceProvider.listCollections.resolves([
{ name: 'coll1' },
{ name: 'coll2' }
]);
const expectedOutput = ['coll1', 'coll2'];
expect(
(await mapper.show(showArgument)).toReplString()
).to.deep.equal(expectedOutput);
});
});
});
});
describe('it', () => {
describe('when cursor is not present', () => {
it('returns an empty CursorIterationResult', async() => {
const result = await mapper.it();
expect(result.shellApiType()).to.equal('CursorIterationResult');
expect(result).to.have.lengthOf(0);
});
});
describe('when cursor is present', () => {
let cursor;
beforeEach(async() => {
cursor = {
isClosed: (): boolean => false,
hasNext: (): Promise<boolean> => Promise.resolve(true),
next: (): Promise<any> => Promise.resolve({})
};
serviceProvider.find.returns(cursor);
await mapper.collection_find(collection, {}, {});
});
it('returns CursorIterationResult', async() => {
const result = await mapper.it();
expect(result.shellApiType()).to.equal('CursorIterationResult');
});
it('returns the next 20 documents', async() => {
const result = await mapper.it();
expect(result).to.have.lengthOf(20);
});
describe('when hasNext returns false', () => {
beforeEach(() => {
let i = 3;
cursor.hasNext = (): Promise<boolean> => Promise.resolve(i-- > 0);
});
it('stops', async() => {
const result = await mapper.it();
expect(result).to.have.lengthOf(3);
});
});
describe('when invoked with a closed cursor', () => {
beforeEach(() => {
cursor.isClosed = (): boolean => true;
cursor.hasNext = (): any => { throw new Error(''); };
});
it('returns an empty CursorIterationResult', async() => {
const result = await mapper.it();
expect(result.shellApiType()).to.equal('CursorIterationResult');
expect(result).to.have.lengthOf(0);
});
});
});
});
});
describe('collection', () => {
describe('aggregate', () => {
let serviceProviderCursor: StubbedInstance<ServiceProviderCursor>;
beforeEach(() => {
serviceProviderCursor = stubInterface<ServiceProviderCursor>();
});
it('calls serviceProvider.aggregate with pipeline and no options', async() => {
await mapper.collection_aggregate(
collection,
[{ $piplelineStage: {} }]
);
expect(serviceProvider.aggregate).to.have.been.calledWith(
collection._database._name,
collection._name,
[{ $piplelineStage: {} }],
{}
);
});
it('calls serviceProvider.aggregate with no pipeline and no options', async() => {
await mapper.collection_aggregate(
collection
);
expect(serviceProvider.aggregate).to.have.been.calledWith(
collection._database._name,
collection._name,
[],
{}
);
});
it('calls serviceProvider.aggregate with stages as arguments', async() => {
await mapper.collection_aggregate(
collection,
{ $option1: 1 },
{ $option2: 2 },
{ $option3: 3 }
);
expect(serviceProvider.aggregate).to.have.been.calledWith(
collection._database._name,
collection._name,
[{ $option1: 1 }, { $option2: 2 }, { $option3: 3 }],
{}
);
});
it('calls serviceProvider.aggregate with pipleline and options', async() => {
await mapper.collection_aggregate(
collection,
[{ $piplelineStage: {} }],
{ options: true });
expect(serviceProvider.aggregate).to.have.been.calledWith(
collection._database._name,
collection._name,
[{ $piplelineStage: {} }],
{ options: true }
);
});
it('returns an AggregationCursor that wraps the service provider one', async() => {
const toArrayResult = [];
serviceProviderCursor.toArray.resolves(toArrayResult);
serviceProvider.aggregate.returns(serviceProviderCursor);
const cursor = await mapper.collection_aggregate(collection, [{
$piplelineStage: {}
}]);
expect(await (cursor as AggregationCursor).toArray()).to.equal(toArrayResult);
});
it('throws if serviceProvider.aggregate rejects', async() => {
const expectedError = new Error();
serviceProvider.aggregate.throws(expectedError);
expect(
await mapper.collection_aggregate(
collection, [{ $piplelineStage: {} }]
).catch(e => e)
).to.equal(expectedError);
});
it('pass readConcern and writeConcern as dbOption', async() => {
mapper.collection_aggregate(
collection,
[],
{ otherOption: true, readConcern: { level: 'majority' }, writeConcern: { w: 1 } }
);
expect(serviceProvider.aggregate).to.have.been.calledWith(
collection._database._name,
collection._name,
[],
{ otherOption: true },
{ readConcern: { level: 'majority' }, w: 1 }
);
});
it('runs explain if explain true is passed', async() => {
const expectedExplainResult = {};
serviceProviderCursor.explain.resolves(expectedExplainResult);
serviceProvider.aggregate.returns(serviceProviderCursor as any);
const explainResult = await mapper.collection_aggregate(
collection,
[],
{ explain: true }
);
expect(explainResult).to.equal(expectedExplainResult);
expect(serviceProviderCursor.explain).to.have.been.calledOnce;
});
it('wont run explain if explain is not passed', async() => {
serviceProvider.aggregate.returns(serviceProviderCursor as any);
const cursor = await mapper.collection_aggregate(
collection,
[],
{}
);
await cursor.toReplString();
expect(cursor.shellApiType()).to.equal('AggregationCursor');
expect(serviceProviderCursor.explain).not.to.have.been.called;
});
});
describe('bulkWrite', () => {
let requests;
beforeEach(async() => {
requests = [
{ insertOne: { 'document': { doc: 1 } } }
];
});
it('calls service provider bulkWrite', async() => {
serviceProvider.bulkWrite = sinon.spy(() => Promise.resolve({
result: { ok: 1 }
})) as any;
await mapper.collection_bulkWrite(collection, requests);
expect(serviceProvider.bulkWrite).to.have.been.calledWith(
'db1',
'coll1',
requests
);
});
it('adapts the result', async() => {
serviceProvider.bulkWrite.resolves({
result: { ok: 1 },
insertedCount: 1,
matchedCount: 2,
modifiedCount: 3,
deletedCount: 4,
upsertedCount: 5,
insertedIds: [ 6 ],
upsertedIds: [ 7 ]
});
const result = await mapper.collection_bulkWrite(collection, requests);
expect(await result.toReplString()).to.be.deep.equal({
acknowledged: true,
insertedCount: 1,
matchedCount: 2,
modifiedCount: 3,
deletedCount: 4,
upsertedCount: 5,
insertedIds: [ 6 ],
upsertedIds: [ 7 ]
});
});
});
describe('convertToCapped', () => {
it('calls service provider convertToCapped', async() => {
serviceProvider.convertToCapped.resolves({ ok: 1 });
const result = await mapper.collection_convertToCapped(collection, 1000);
expect(serviceProvider.convertToCapped).to.have.been.calledWith(
'db1',
'coll1',
1000
);
expect(result).to.deep.equal({ ok: 1 });
});
});
describe('createIndexes', () => {
beforeEach(async() => {
serviceProvider.createIndexes.resolves({ ok: 1 });
});
context('when options is not passed', () => {
it('calls serviceProvider.createIndexes using keyPatterns as keys', async() => {
await mapper.collection_createIndexes(collection, [{ x: 1 }]);
expect(serviceProvider.createIndexes).to.have.been.calledWith(
'db1',
'coll1',
[{ key: { x: 1 } }]
);
});
});
context('when options is an object', () => {
it('calls serviceProvider.createIndexes merging options', async() => {
await mapper.collection_createIndexes(collection, [{ x: 1 }], { name: 'index-1' });
expect(serviceProvider.createIndexes).to.have.been.calledWith(
'db1',
'coll1',
[{ key: { x: 1 }, name: 'index-1' }]
);
});
});
context('when options is not an object', () => {
it('throws an error', async() => {
const error = await mapper.collection_createIndexes(
collection, [{ x: 1 }], 'unsupported' as any
).catch(e => e);
expect(error).to.be.instanceOf(Error);
expect(error.message).to.equal('The "options" argument must be an object.');
});
});
});
['ensureIndex', 'createIndex'].forEach((method) => {
describe(method, () => {
beforeEach(async() => {
serviceProvider.createIndexes.resolves({ ok: 1 });
});
context('when options is not passed', () => {
it('calls serviceProvider.createIndexes using keys', async() => {
await mapper[`collection_${method}`](collection, { x: 1 });
expect(serviceProvider.createIndexes).to.have.been.calledWith(
'db1',
'coll1',
[{ key: { x: 1 } }]
);
});
});
context('when options is an object', () => {
it('calls serviceProvider.createIndexes merging options', async() => {
await mapper[`collection_${method}`](collection, { x: 1 }, { name: 'index-1' });
expect(serviceProvider.createIndexes).to.have.been.calledWith(
'db1',
'coll1',
[{ key: { x: 1 }, name: 'index-1' }]
);
});
});
context('when options is not an object', () => {
it('throws an error', async() => {
const error = await mapper[`collection_${method}`](
collection, { x: 1 }, 'unsupported' as any
).catch(e => e);
expect(error).to.be.instanceOf(Error);
expect(error.message).to.equal('The "options" argument must be an object.');
});
});
});
});
['getIndexes', 'getIndexSpecs', 'getIndices'].forEach((method) => {
describe(method, () => {
let result;
beforeEach(async() => {
result = [{
v: 2,
key: {
_id: 1
},
name: '_id_',
ns: 'test.coll1'
}];
serviceProvider.getIndexes.resolves(result);
});
it('returns serviceProvider.getIndexes using keys', async() => {
expect(await mapper[`collection_${method}`](collection)).to.deep.equal(result);
});
});
});
describe('getIndexKeys', () => {
let result;
beforeEach(async() => {
result = [{
v: 2,
key: {
_id: 1
},
name: '_id_',
ns: 'test.coll1'
},
{
v: 2,
key: {
name: 1
},
name: '_name_',
ns: 'test.coll1'
}];
serviceProvider.getIndexes.resolves(result);
});
it('returns only indexes keys', async() => {
expect(await mapper.collection_getIndexKeys(collection)).to.deep.equal([
{ _id: 1 },
{ name: 1 }
]);
});
});
describe('dropIndexes', () => {
context('when serviceProvider.dropIndexes resolves', () => {
let result;
beforeEach(async() => {
result = { nIndexesWas: 3, ok: 1 };
serviceProvider.dropIndexes.resolves(result);
});
it('returns the result of serviceProvider.dropIndexes', async() => {
expect(await mapper.collection_dropIndexes(collection, 'index_1')).to.deep.equal(result);
});
});
context('when serviceProvider.dropIndexes rejects IndexNotFound', () => {
beforeEach(async() => {
const error = new Error('index not found with name [index_1]');
Object.assign(error, {
ok: 0,
errmsg: 'index not found with name [index_1]',
code: 27,
codeName: 'IndexNotFound',
name: 'MongoError'
});
serviceProvider.dropIndexes.rejects(error);
});
it('returns the error as object', async() => {
expect(await mapper.collection_dropIndexes(collection, 'index_1')).to.deep.equal({
ok: 0,
errmsg: 'index not found with name [index_1]',
code: 27,
codeName: 'IndexNotFound'
});
});
});
context('when serviceProvider.dropIndexes rejects any other error', () => {
let error;
beforeEach(async() => {
error = new Error('Some error');
serviceProvider.dropIndexes.rejects(new Error('Some error'));
});
it('rejects with error', async() => {
let catched;
await mapper.collection_dropIndexes(collection, 'index_1').catch(err => { catched = err; });
expect(catched.message).to.equal(error.message);
});
});
});
describe('dropIndex', () => {
context('when mapper.collection_dropIndexes resolves', () => {
let result;
beforeEach(async() => {
result = { nIndexesWas: 3, ok: 1 };
mapper.collection_dropIndexes = sinon.mock().resolves(result);
});
it('returns the result of serviceProvider.dropIndexes', async() => {
expect(await mapper.collection_dropIndex(collection, 'index_1')).to.deep.equal(result);
});
it('throws if index is "*"', async() => {
let catched;
await mapper.collection_dropIndex(collection, '*').catch(err => { catched = err; });
expect(catched.message).to.equal(
'To drop indexes in the collection using \'*\', use db.collection.dropIndexes().'
);
});
it('throws if index is an array', async() => {
let catched;
await mapper.collection_dropIndex(collection, ['index-1']).catch(err => { catched = err; });
expect(catched.message).to.equal(
'The index to drop must be either the index name or the index specification document.'
);
});
});
});
describe('totalIndexSize', () => {
beforeEach(() => {
serviceProvider.stats.resolves({
totalIndexSize: 1000
});
});
it('returns totalIndexSize', async() => {
expect(await mapper.collection_totalIndexSize(collection)).to.equal(1000);
expect(serviceProvider.stats).to.have.been.calledOnceWith('db1', 'coll1');
});
it('throws an error if called with verbose', async() => {
let catched;
await mapper.collection_totalIndexSize(collection, true)
.catch(err => { catched = err; });
expect(catched.message).to.equal(
'"totalIndexSize" takes no argument. Use db.collection.stats to get detailed information.'
);
});
});
describe('reIndex', () => {
let result;
beforeEach(() => {
result = { ok: 1 };
serviceProvider.reIndex.resolves(result);
});
it('returns the result of serviceProvider.dropIndexes', async() => {
expect(await mapper.collection_reIndex(collection)).to.deep.equal(result);
expect(serviceProvider.reIndex).to.have.been.calledWith('db1', 'coll1');
});
});
describe('stats', () => {
let result;
beforeEach(() => {
result = {};
serviceProvider.stats.resolves(result);
});
it('returns stats', async() => {
expect(await mapper.collection_stats(collection, { scale: 1 })).to.equal(result);
expect(serviceProvider.stats).to.have.been.calledOnceWith('db1', 'coll1', { scale: 1 });
});
});
describe('dataSize', () => {
let result;
beforeEach(() => {
result = { size: 1000 };
serviceProvider.stats.resolves(result);
});
it('returns stats.size', async() => {
expect(await mapper.collection_dataSize(collection)).to.equal(1000);
expect(serviceProvider.stats).to.have.been.calledOnceWith('db1', 'coll1');
});
});
describe('storageSize', () => {
let result;
beforeEach(() => {
result = { storageSize: 1000 };
serviceProvider.stats.resolves(result);
});
it('returns stats.storageSize', async() => {
expect(await mapper.collection_storageSize(collection)).to.equal(1000);
expect(serviceProvider.stats).to.have.been.calledOnceWith('db1', 'coll1');
});
});
describe('totalSize', () => {
let result;
beforeEach(() => {
result = { storageSize: 1000, totalIndexSize: 1000 };
serviceProvider.stats.resolves(result);
});
it('returns sum of storageSize and totalIndexSize', async() => {
expect(await mapper.collection_totalSize(collection)).to.equal(2000);
expect(serviceProvider.stats).to.have.been.calledOnceWith('db1', 'coll1');
});
});
describe('drop', () => {
it('re-throws an error that is not NamespaceNotFound', async() => {
const error = new Error();
serviceProvider.dropCollection.rejects(error);
expect(await (mapper.collection_drop(collection).catch((e) => e))).to.equal(error);
});
});
describe('getFullName', () => {
it('returns the namespaced collection name', async() => {
expect(mapper.collection_getFullName(collection)).to.equal('db1.coll1');
});
});
describe('getName', () => {
it('returns the namespaced collection name', async() => {
expect(mapper.collection_getName(collection)).to.equal('coll1');
});
});
describe('findAndModify', () => {
let mockResult;
beforeEach(() => {
mockResult = { value: {} };
serviceProvider.findAndModify.resolves(mockResult);
});
it('returns result.value from serviceProvider.findAndModify', async() => {
expect(await mapper.collection_findAndModify(collection, {})).to.equal(mockResult.value);
});
it('calls the service provider with an empty query if none is provided', async() => {
await mapper.collection_findAndModify(collection, {});
expect(serviceProvider.findAndModify).to.have.been.calledWith(
collection._database._name,
collection._name,
{}
);
});
it('calls the service provider with the correct options', async() => {
const options = {
remove: true,
new: true,
fields: { projection: 1 },
upsert: true,
bypassDocumentValidation: true,
writeConcern: { writeConcern: 1 },
collation: { collation: 1 },
arrayFilters: [ { filter: 1 } ]
};
await mapper.collection_findAndModify(collection, {
query: { query: 1 },
sort: { sort: 1 },
update: { update: 1 },
...options
});
expect(serviceProvider.findAndModify).to.have.been.calledWith(
collection._database._name,
collection._name,
{ query: 1 },
{ sort: 1 },
{ update: 1 },
options
);
});
});
describe('renameCollection', () => {
let mockResult;
beforeEach(() => {
mockResult = {};
serviceProvider.renameCollection.resolves(mockResult);
});
it('returns { ok: 1 } if the operation is successful', async() => {
expect(
await mapper.collection_renameCollection(
collection, 'newName'
)
).to.deep.equal({ ok: 1 });
});
it('calls the service provider with dropTarget=false if none is provided', async() => {
await mapper.collection_renameCollection(collection, 'newName');
expect(serviceProvider.renameCollection).to.have.been.calledWith(
collection._database._name,
collection._name,
'newName',
{ dropTarget: false }
);
});
it('calls the service provider with the correct options', async() => {
await mapper.collection_renameCollection(collection, 'newName', true);
expect(serviceProvider.renameCollection).to.have.been.calledWith(
collection._database._name,
collection._name,
'newName',
{ dropTarget: true }
);
});
it('rethrows a generic error', async() => {
const error: any = new Error();
serviceProvider.renameCollection.rejects(error);
expect(
await mapper.collection_renameCollection(
collection, 'newName'
).catch(e => e)
).to.equal(error);
});
it('returns a MongoError with { ok: 0 } instead of throwing', async() => {
const error: any = new Error();
error.name = 'MongoError';
error.code = 123;
error.errmsg = 'msg';
error.codeName = 'NamespaceNotFound';
serviceProvider.renameCollection.rejects(error);
expect(
await mapper.collection_renameCollection(
collection, 'newName'
)
).to.deep.equal({
code: error.code,
errmsg: error.errmsg,
codeName: error.codeName,
ok: 0
});
});
it('throws an error if newName is not a string', async() => {
expect(
(await mapper.collection_renameCollection(
collection, {} as any
).catch(e => e)).message
).to.equal('The "newName" argument must be a string.');
});
});
describe('runCommand', () => {
it('calls serviceProvider.runCommand with the collection set', async() => {
await mapper.collection_runCommand(collection, 'someCommand', {
someOption: 1
});
expect(serviceProvider.runCommand).to.have.been.calledWith(
collection._database._name,
{
someCommand: collection._name,
someOption: 1
}
);
});
it('can be called without options', async() => {
await mapper.collection_runCommand(collection, 'someCommand');
expect(serviceProvider.runCommand).to.have.been.calledWith(
collection._database._name,
{
someCommand: collection._name
}
);
});
it('throws an error if commandName is not a string', async() => {
expect(
(await mapper.collection_runCommand(
collection, {} as any
).catch(e => e)).message
).to.equal('The "commandName" argument must be a string.');
});
it('throws an error if commandName is passed as option', async() => {
expect(
(await mapper.collection_runCommand(
collection, 'commandName', { commandName: 1 } as any
).catch(e => e)).message
).to.equal('The "commandName" argument cannot be passed as an option to "runCommand".');
});
});
describe('explain', () => {
it('returns an Explainable object', () => {
expect(mapper.collection_explain(collection)).to.have.instanceOf(Explainable);
});
it('accepts valid verbosity', () => {
expect(
mapper.collection_explain(collection, 'queryPlanner')._verbosity
).to.equal('queryPlanner');
expect(
mapper.collection_explain(collection, 'executionStats')._verbosity
).to.equal('executionStats');
expect(
mapper.collection_explain(collection, 'allPlansExecution')._verbosity
).to.equal('allPlansExecution');
});
it('throws in case of non valid verbosity', () => {
expect(() => {
mapper.collection_explain(collection, 'badVerbosityArgument');
}).to.throw('verbosity can only be one of queryPlanner, executionStats, allPlansExecution. Received badVerbosityArgument.');
});
it('sets the right default verbosity', () => {
const explainable = mapper.collection_explain(collection);
expect(explainable._verbosity).to.equal('queryPlanner');
});
});
});
describe('database', () => {
describe('getCollectionInfos', () => {
it('returns the result of serviceProvider.listCollections', async() => {
const filter = { name: 'abc' };
const options = { nameOnly: true };
const result = [{ name: 'coll1' }];
serviceProvider.listCollections.resolves(result);
expect(await mapper.database_getCollectionInfos(
database,
filter,
options)).to.deep.equal(result);
expect(serviceProvider.listCollections).to.have.been.calledOnceWith('db1', filter, options);
});
});
describe('getCollectionNames', () => {
it('returns the result of serviceProvider.listCollections', async() => {
const result = [{ name: 'coll1' }];
serviceProvider.listCollections.resolves(result);
expect(await mapper.database_getCollectionNames(
database)).to.deep.equal(['coll1']);
expect(serviceProvider.listCollections).to.have.been.calledOnceWith(
'db1', {}, { nameOnly: true });
});
});
describe('runCommand', () => {
it('calls serviceProvider.runCommand on the database', async() => {
await mapper.database_runCommand(database, { someCommand: 'someCollection' });
expect(serviceProvider.runCommand).to.have.been.calledWith(
database._name,
{
someCommand: 'someCollection'
}
);
});
it('returns whatever serviceProvider.runCommand returns', async() => {
const expectedResult = { ok: 1 };
serviceProvider.runCommand.resolves(expectedResult);
const result = await mapper.database_runCommand(database, { someCommand: 'someCollection' });
expect(result).to.deep.equal(expectedResult);
});
it('throws if serviceProvider.runCommand rejects', async() => {
const expectedError = new Error();
serviceProvider.runCommand.rejects(expectedError);
const catchedError = await mapper.database_runCommand(database, { someCommand: 'someCollection' })
.catch(e => e);
expect(catchedError).to.equal(expectedError);
});
});
describe('adminCommand', () => {
it('calls serviceProvider.runCommand with the admin database', async() => {
await mapper.database_adminCommand(database, { someCommand: 'someCollection' });
expect(serviceProvider.runCommand).to.have.been.calledWith(
'admin',
{
someCommand: 'someCollection'
}
);
});
it('returns whatever serviceProvider.runCommand returns', async() => {
const expectedResult = { ok: 1 };
serviceProvider.runCommand.resolves(expectedResult);
const result = await mapper.database_adminCommand(database, { someCommand: 'someCollection' });
expect(result).to.deep.equal(expectedResult);
});
it('throws if serviceProvider.runCommand rejects', async() => {
const expectedError = new Error();
serviceProvider.runCommand.rejects(expectedError);
const catchedError = await mapper.database_adminCommand(database, { someCommand: 'someCollection' })
.catch(e => e);
expect(catchedError).to.equal(expectedError);
});
});
describe('aggregate', () => {
let serviceProviderCursor: StubbedInstance<ServiceProviderCursor>;
beforeEach(() => {
serviceProviderCursor = stubInterface<ServiceProviderCursor>();
});
it('calls serviceProvider.aggregateDb with pipleline and options', async() => {
await mapper.database_aggregate(
database, [{ $piplelineStage: {} }], { options: true });
expect(serviceProvider.aggregateDb).to.have.been.calledWith(
database._name,
[{ $piplelineStage: {} }],
{ options: true }
);
});
it('returns an AggregationCursor that wraps the service provider one', async() => {
const toArrayResult = [];
serviceProviderCursor.toArray.resolves(toArrayResult);
serviceProvider.aggregateDb.returns(serviceProviderCursor);
const cursor = await mapper.database_aggregate(database, [{ $piplelineStage: {} }]);
expect(await (cursor as AggregationCursor).toArray()).to.equal(toArrayResult);
});
it('throws if serviceProvider.aggregateDb rejects', async() => {
const expectedError = new Error();
serviceProvider.aggregateDb.throws(expectedError);
expect(
await mapper.database_aggregate(
database, [{ $piplelineStage: {} }]
).catch(e => e)
).to.equal(expectedError);
});
it('pass readConcern and writeConcern as dbOption', async() => {
mapper.database_aggregate(
database,
[],
{ otherOption: true, readConcern: { level: 'majority' }, writeConcern: { w: 1 } }
);
expect(serviceProvider.aggregateDb).to.have.been.calledWith(
database._name,
[],
{ otherOption: true },
{ readConcern: { level: 'majority' }, w: 1 }
);
});
it('runs explain if explain true is passed', async() => {
const expectedExplainResult = {};
serviceProviderCursor.explain.resolves(expectedExplainResult);
serviceProvider.aggregateDb.returns(serviceProviderCursor as any);
const explainResult = await mapper.database_aggregate(
database,
[],
{ explain: true }
);
expect(explainResult).to.equal(expectedExplainResult);
expect(serviceProviderCursor.explain).to.have.been.calledOnce;
});
it('wont run explain if explain is not passed', async() => {
serviceProvider.aggregateDb.returns(serviceProviderCursor as any);
const cursor = await mapper.database_aggregate(
database,
[],
{}
);
await cursor.toReplString();
expect(cursor.shellApiType()).to.equal('AggregationCursor');
expect(serviceProviderCursor.explain).not.to.have.been.called;
});
});
describe('getSiblingDB', () => {
it('returns a database', async() => {
const otherDb = mapper.database_getSiblingDB(database, 'otherdb');
expect(otherDb).to.be.instanceOf(Database);
expect(otherDb._name).to.equal('otherdb');
});
it('does not change the context', () => {
const contextDbBefore = mapper.context.db;
mapper.database_getSiblingDB(database, 'otherdb');
expect(mapper.context.db).to.equal(contextDbBefore);
});
it('throws if name is not a string', () => {
expect(() => {
mapper.database_getSiblingDB(database, undefined);
}).to.throw('Database name must be a string. Received undefined.');
});
it('throws if name is empty', () => {
expect(() => {
mapper.database_getSiblingDB(database, '');
}).to.throw('Database name cannot be empty.');
});
it('reuses db instances', () => {
const otherDb = mapper.database_getSiblingDB(database, 'otherdb');
expect(
mapper.database_getSiblingDB(database, 'otherdb')
).to.equal(otherDb);
});
});
describe('getCollection', () => {
it('returns a collection for the database', async() => {
const coll = mapper.database_getCollection(database, 'coll');
expect(coll).to.be.instanceOf(Collection);
expect(coll._name).to.equal('coll');
expect(coll._database).to.equal(database);
});
it('throws if name is not a string', () => {
expect(() => {
mapper.database_getCollection(database, undefined);
}).to.throw('Collection name must be a string. Received undefined.');
});
it('throws if name is empty', () => {
expect(() => {
mapper.database_getCollection(database, '');
}).to.throw('Collection name cannot be empty.');
});
it('allows to use collection names that would collide with methods', () => {
const coll = mapper.database_getCollection(database, 'getCollection');
expect(coll).to.be.instanceOf(Collection);
expect(coll._name).to.equal('getCollection');
});
it('allows to use collection names that starts with _', () => {
const coll = mapper.database_getCollection(database, '_coll1');
expect(coll).to.be.instanceOf(Collection);
expect(coll._name).to.equal('_coll1');
});
it('reuses collections', () => {
expect(
mapper.database_getCollection(database, 'coll')
).to.equal(mapper.database_getCollection(database, 'coll'));
});
});
describe('dropDatabase', () => {
it('calls serviceProvider.dropDatabase on the database', async() => {
await mapper.database_dropDatabase(database, { w: 1 });
expect(serviceProvider.dropDatabase).to.have.been.calledWith(
database._name,
{ w: 1 }
);
});
it('returns whatever serviceProvider.dropDatabase returns', async() => {
const expectedResult = { ok: 1 };
serviceProvider.dropDatabase.resolves(expectedResult);
const result = await mapper.database_dropDatabase(database);
expect(result).to.deep.equal(expectedResult);
});
it('throws if serviceProvider.dropDatabase rejects', async() => {
const expectedError = new Error();
serviceProvider.dropDatabase.rejects(expectedError);
const catchedError = await mapper.database_dropDatabase(database)
.catch(e => e);
expect(catchedError).to.equal(expectedError);
});
});
});
describe('explainable', () => {
let explainable: Explainable;
beforeEach(() => {
explainable = new Explainable(mapper, collection, 'queryPlanner');
});
describe('getCollection', () => {
it('returns the explainable collection', () => {
expect(
explainable.getCollection(explainable)
).to.equal(collection);
});
});
describe('getVerbosity', () => {
it('returns the explainable verbosity', () => {
expect(
mapper.explainable_getVerbosity(explainable)
).to.equal('queryPlanner');
});
});
describe('setVerbosity', () => {
it('sets the explainable verbosity', () => {
expect(explainable._verbosity).not.to.equal('allPlansExecution');
mapper.explainable_setVerbosity(explainable, 'allPlansExecution');
expect(explainable._verbosity).to.equal('allPlansExecution');
});
it('validates the verbosity', () => {
expect(() => {
mapper.explainable_setVerbosity(explainable, 'badVerbosityArgument');
}).to.throw('verbosity can only be one of queryPlanner, executionStats, allPlansExecution. Received badVerbosityArgument.');
});
});
describe('find', () => {
let cursorStub;
let explainResult;
beforeEach(async() => {
explainResult = { ok: 1 };
collection.find = sinon.spy(() => ({
explain: sinon.spy(() => explainResult)
}));
cursorStub = await mapper.explainable_find(
explainable,
{ query: 1 },
{ projection: 1 }
);
});
it('calls collection.find with arguments', () => {
expect(collection.find).to.have.been.calledOnceWithExactly(
{ query: 1 },
{ projection: 1 }
);
});
it('returns an cursor that has shellApiType when evaluated', () => {
expect(cursorStub.shellApiType()).to.equal('ExplainableCursor');
});
context('when calling toReplString on the result', () => {
it('calls explain with verbosity', async() => {
await cursorStub.toReplString();
expect(
cursorStub.explain
).to.have.been.calledOnceWithExactly('queryPlanner');
});
it('returns the explain result', async() => {
expect(
await cursorStub.toReplString()
).to.equal(explainResult);
});
});
});
describe('aggregate', () => {
let explainResult;
let expectedExplainResult;
let cursor;
beforeEach(async() => {
explainResult = { ok: 1 };
cursor = {
explain: sinon.spy(() => Promise.resolve(expectedExplainResult))
};
collection.aggregate = sinon.spy(() => Promise.resolve(cursor));
explainResult = await mapper.explainable_aggregate(
explainable,
{ pipeline: 1 },
{ aggregate: 1 }
);
});
it('calls collection.aggregate with arguments', () => {
expect(collection.aggregate).to.have.been.calledOnceWithExactly(
{ pipeline: 1 },
{ aggregate: 1, explain: false }
);
});
it('calls explain with verbosity', async() => {
expect(
cursor.explain
).to.have.been.calledOnceWithExactly('queryPlanner');
});
it('returns the explain result', () => {
expect(explainResult).to.equal(expectedExplainResult);
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/browser-repl/src/components/shell-output-line.spec.tsx
|
<gh_stars>0
import React from 'react';
import { expect } from '../../testing/chai';
import { shallow, mount } from '../../testing/enzyme';
import { ShellOutputLine } from './shell-output-line';
import { HelpOutput } from './types/help-output';
import { CursorOutput } from './types/cursor-output';
import { CursorIterationResultOutput } from './types/cursor-iteration-result-output';
import { SimpleTypeOutput } from './types/simple-type-output';
import { ObjectOutput } from './types/object-output';
import { ErrorOutput } from './types/error-output';
describe('<ShellOutputLine />', () => {
it('renders a string value', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: 'some text' }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders an integer value', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: 1 }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders an object', () => {
const object = { x: 1 };
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: object }} />);
expect(wrapper.find(ObjectOutput)).to.have.lengthOf(1);
});
it('renders undefined', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: undefined }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders null', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: null }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders function', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: (x): any => x }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders class', () => {
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: class C {} }} />);
expect(wrapper.find(SimpleTypeOutput)).to.have.lengthOf(1);
});
it('renders Help', () => {
const wrapper = shallow(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'Help',
value: {
help: 'Help',
docs: '#',
attr: []
} }
} />);
expect(wrapper.find(HelpOutput)).to.have.lengthOf(1);
});
it('renders Cursor', () => {
const wrapper = shallow(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'Cursor',
value: []
}} />);
expect(wrapper.find(CursorOutput)).to.have.lengthOf(1);
});
it('renders CursorIterationResult', () => {
const wrapper = shallow(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'CursorIterationResult',
value: []
}} />);
expect(wrapper.find(CursorIterationResultOutput)).to.have.lengthOf(1);
});
it('renders Database', () => {
const wrapper = mount(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'Database',
value: 'value string'
}} />);
expect(wrapper.text()).to.contain('value string');
});
it('renders Collection', () => {
const wrapper = mount(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'Collection',
value: 'value string'
}} />);
expect(wrapper.text()).to.contain('value string');
});
it('renders ShowCollectionsResult', () => {
const wrapper = mount(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'ShowCollectionsResult',
value: 'value string'
}} />);
expect(wrapper.text()).to.contain('value string');
});
it('renders ShowDatabasesResult', () => {
const wrapper = mount(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'ShowDatabasesResult',
value: [
{ name: 'admin', sizeOnDisk: 45056, empty: false },
{ name: 'dxl', sizeOnDisk: 8192, empty: false },
{ name: 'supplies', sizeOnDisk: 2236416, empty: false },
{ name: 'test', sizeOnDisk: 5664768, empty: false },
{ name: 'test', sizeOnDisk: 599999768000, empty: false }
]
}} />);
expect(wrapper.text()).to.contain('admin 45.1 kB\ndxl 8.19 kB\nsupplies 2.24 MB\ntest 5.66 MB\ntest 600 GB');
});
it('renders ShowCollectionsResult', () => {
const wrapper = mount(<ShellOutputLine entry={{
type: 'output',
shellApiType: 'ShowCollectionsResult',
value: [
'nested_documents', 'decimal128', 'coll', 'people_imported', 'cats'
]
}} />);
expect(wrapper.text()).to.contain('nested_documents\ndecimal128\ncoll\npeople_imported\ncats');
});
it('renders an error', () => {
const err = new Error('x');
const wrapper = shallow(<ShellOutputLine entry={{ type: 'output', value: err }} />);
expect(wrapper.find(ErrorOutput)).to.have.lengthOf(1);
});
it('renders an input line', () => {
const wrapper = mount(<ShellOutputLine entry={{ type: 'input', value: 'some text' }} />);
expect(wrapper.text()).to.contain('some text');
});
});
|
MaBeuLux88/mongosh
|
packages/service-provider-browser/src/stitch-service-provider-browser.ts
|
import {
ServiceProvider,
Result,
BulkWriteResult,
Document,
Cursor,
DatabaseOptions,
CommandOptions,
WriteConcern
} from '@mongosh/service-provider-core';
import StitchTransport from './stitch-transport';
import i18n from '@mongosh/i18n';
import {
AnonymousCredential,
RemoteMongoClient,
Stitch,
StitchAppClient
} from 'mongodb-stitch-browser-sdk';
/**
* Init error.
*/
const INIT_ERROR = 'transport-browser.stitch-browser-transport.auth-error';
/**
* Atlas id.
*/
const ATLAS = 'mongodb-atlas';
/**
* Encapsulates logic for communicating with a MongoDB instance via
* Stitch in the browser.
*/
class StitchServiceProviderBrowser implements ServiceProvider {
readonly stitchTransport: StitchTransport<StitchAppClient, RemoteMongoClient>;
/**
* Create a StitchBrowserTransport from a Stitch app id.
*
* @param {String} stitchAppId - The Stitch app id.
* @param {String} serviceName - The Stitch service name.
*
* @returns {Promise} The promise of the Stitch server transport.
*/
static async fromAppId(
stitchAppId: string,
serviceName: string) : Promise<StitchServiceProviderBrowser> {
const client = Stitch.initializeDefaultAppClient(stitchAppId);
try {
await client.auth.loginWithCredential(new AnonymousCredential());
} catch (err) {
/* eslint no-console:0 */
console.log(i18n.__(INIT_ERROR), err);
}
return new StitchServiceProviderBrowser(client, serviceName);
}
/**
* Run an aggregation pipeline.
*
* @note: Passing a null collection will cause the
* aggregation to run on the DB.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Array} pipeline - The aggregation pipeline.
* @param {Object} options - The pipeline options.
*
* @returns {Cursor} The aggregation cursor.
*/
aggregate(
database: string,
collection: string,
pipeline: object[] = []) : Cursor {
return this.stitchTransport.aggregate(database, collection, pipeline);
}
/**
* Not implemented in Stitch.
*
* @returns {Promise} The rejected promise.
*/
bulkWrite() : Promise<BulkWriteResult> {
return this.stitchTransport.bulkWrite();
}
/**
* Get an exact document count from the collection.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} options - The count options.
*
* @returns {Promise} The promise of the count.
*/
countDocuments(
database: string,
collection: string,
filter: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.countDocuments(database, collection, filter, options);
}
/**
* Instantiate a new Stitch server transport with a connected stitch
* client instance.
*
* @param {Client} stitchClient - The Stitch client instance.
* @param {String} serviceName - The Mongo service name.
*/
constructor(stitchClient: StitchAppClient, serviceName: string = ATLAS) {
const mongoClient = stitchClient.
getServiceClient(RemoteMongoClient.factory, serviceName);
this.stitchTransport =
new StitchTransport<StitchAppClient, RemoteMongoClient>(stitchClient, mongoClient);
}
aggregateDb(database: string, pipeline: Document[], options?: Document, dbOptions?: DatabaseOptions): Cursor {
throw new Error("Method not implemented.");
}
count(db: string, coll: string, query?: Document, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
isCapped(database: string, collection: string, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
getIndexes(database: string, collection: string, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
listCollections(database: string, filter?: Document, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
stats(database: string, collection: string, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
dropDatabase(database: string, writeConcern?: WriteConcern, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
findAndModify(database: string, collection: string, query: Document, sort: any[] | Document, update: Document, options?: Document, dbOptions?: DatabaseOptions) {
throw new Error("Method not implemented.");
}
save(database: string, collection: string, doc: Document, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
remove(database: string, collection: string, query: Document, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
convertToCapped(database: string, collection: string, size: number, options?: CommandOptions): Promise<any> {
throw new Error("Method not implemented.");
}
createIndexes(database: string, collection: string, indexSpecs: Document[], options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
dropIndexes(database: string, collection: string, indexes: string | Document | Document[] | string[], commandOptions?: CommandOptions, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
reIndex(database: string, collection: string, options?: CommandOptions, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
dropCollection(database: string, collection: string, dbOptions?: DatabaseOptions): Promise<boolean> {
throw new Error("Method not implemented.");
}
renameCollection(database: string, oldName: string, newName: string, options?: Document, dbOptions?: DatabaseOptions): Promise<any> {
throw new Error("Method not implemented.");
}
close(boolean: any): Promise<void> {
throw new Error("Method not implemented.");
}
getCmdLineOpts(): Promise<Result> {
throw new Error('Method not implemented.');
}
getTopology(): any {
throw new Error('Method not implemented.');
}
buildInfo(): Promise<Result> {
throw new Error('Method not implemented.');
}
listDatabases(database: string): Promise<any> {
throw new Error("Method not implemented.");
}
/**
* Delete multiple documents from the collection.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} options - The delete many options.
*
* @returns {Promise} The promise of the result.
*/
deleteMany(
database: string,
collection: string,
filter: object = {}) : Promise<Result> {
return this.stitchTransport.deleteMany(database, collection, filter);
}
/**
* Delete one document from the collection.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} options - The delete one options.
*
* @returns {Promise} The promise of the result.
*/
deleteOne(
database: string,
collection: string,
filter: object = {}) : Promise<Result> {
return this.stitchTransport.deleteOne(database, collection, filter);
}
/**
* Not implemented in Stitch.
*
* @returns {Cursor} The unsupported cursor.
*/
distinct() : Promise<any> {
return this.stitchTransport.distinct();
}
/**
* Not implemented in Stitch.
*
* @returns {Promise} The rejected promise.
*/
estimatedDocumentCount() : Promise<Result> {
return this.stitchTransport.estimatedDocumentCount();
}
/**
* Find documents in the collection.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} options - The find options.
*
* @returns {Cursor} The cursor.
*/
find(
database: string,
collection: string,
filter: object = {},
options: object = {}) : any {
return this.stitchTransport.find(database, collection, filter, options);
}
/**
* Find one document and delete it.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} options - The find options.
*
* @returns {Promise} The promise of the result.
*/
findOneAndDelete(
database: string,
collection: string,
filter: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.findOneAndDelete(database, collection, filter, options);
}
/**
* Find one document and replace it.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {Object} replacement - The replacement.
* @param {Object} options - The find options.
*
* @returns {Promise} The promise of the result.
*/
findOneAndReplace(
database: string,
collection: string,
filter: object = {},
replacement: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.findOneAndReplace(database, collection, filter, replacement, options);
}
/**
* Find one document and update it.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The update.
* @param {Object} options - The find options.
*
* @returns {Promise} The promise of the result.
*/
findOneAndUpdate(
database: string,
collection: string,
filter: object = {},
update: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.findOneAndUpdate(database, collection, filter, update, options);
}
/**
* Insert many documents into the colleciton.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Array} docs - The documents.
* @param {Object} options - The insert many options.
*
* @returns {Promise} The promise of the result.
*/
insertMany(
database: string,
collection: string,
docs: object[] = [],
options: object = {}) : Promise<Result> {
return this.stitchTransport.insertMany(database, collection, docs);
}
/**
* Insert one document into the collection.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} doc - The document.
* @param {Object} options - The insert one options.
*
* @returns {Promise} The promise of the result.
*/
insertOne(
database: string,
collection: string,
doc: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.insertOne(database, collection, doc);
}
/**
* Not implemented in Stitch.
*
* @returns {Promise} The rejected promise.
*/
replaceOne() : Promise<Result> {
return this.stitchTransport.replaceOne();
}
/**
* Not implemented in Stitch.
*
* @returns {Promise} The rejected promise.
*/
runCommand() : Promise<Result> {
return this.stitchTransport.runCommand();
}
/**
* Update many document.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The updates.
* @param {Object} options - The update options.
*
* @returns {Promise} The promise of the result.
*/
updateMany(
database: string,
collection: string,
filter: object = {},
update: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.updateMany(database, collection, filter, update, options);
}
/**
* Update a document.
*
* @param {String} database - The database name.
* @param {String} collection - The collection name.
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The updates.
* @param {Object} options - The update options.
*
* @returns {Promise} The promise of the result.
*/
updateOne(
database: string,
collection: string,
filter: object = {},
update: object = {},
options: object = {}) : Promise<Result> {
return this.stitchTransport.updateOne(database, collection, filter, update, options);
}
/**
* Get the current user id.
*
* @returns {String} The user id.
*/
get userId() : string {
return this.stitchTransport.userId;
}
}
export default StitchServiceProviderBrowser;
|
MaBeuLux88/mongosh
|
packages/history/src/history.ts
|
import redactInfo from 'mongodb-redact';
/**
* Modifies command history array based on sensitive information.
* If redact option is passed, also redacts sensitive info.
*
* @param {array} History - Array of commands, where the first command is the
* most recent.
*
* @param {boolean} Redact - Option to redact sensitive info.
*/
export function changeHistory(history: string[], redact = false): void {
const hiddenCommands =
RegExp('createUser|auth|updateUser|changeUserPassword', 'g');
if (hiddenCommands.test(history[0])) {
history.shift();
return;
}
if (redact) history[0] = redactInfo(history[0]);
}
|
MaBeuLux88/mongosh
|
packages/cli-repl/test/helpers.ts
|
<gh_stars>0
import { spawn } from 'child_process';
import path from 'path';
import stripAnsi from 'strip-ansi';
export async function eventually(fn, options: { frequency?: number; timeout?: number } = {}): Promise<any> {
options = {
frequency: 100,
timeout: 10000,
...options
};
let attempts = Math.round(options.timeout / options.frequency);
let err;
while (attempts) {
attempts--;
try {
await fn();
return;
} catch (e) {
err = e;
}
await new Promise(resolve => setTimeout(resolve, options.frequency));
}
throw err;
}
const openShells = [];
export function startShell(...args): any {
const execPath = path.resolve(__dirname, '..', 'bin', 'mongosh.js');
const shell = spawn('node', [execPath, ...args], {
stdio: [ 'pipe', 'pipe', 'pipe' ]
});
const stdio = {
stdin: shell.stdin,
stdout: '',
stderr: ''
};
shell.stdout.on('data', (chunk) => {
const plainChunk = stripAnsi(chunk.toString());
stdio.stdout += plainChunk;
});
shell.stderr.on('data', (chunk) => {
const plainChunk = stripAnsi(chunk.toString());
stdio.stderr += plainChunk;
});
openShells.push(shell);
return {
process: shell,
stdio,
};
}
export function killOpenShells(): any {
while (openShells.length) {
openShells.pop().kill();
}
}
|
MaBeuLux88/mongosh
|
packages/service-provider-server/src/compass/compass-service-provider.ts
|
<filename>packages/service-provider-server/src/compass/compass-service-provider.ts
import CliServiceProvider from '../cli-service-provider';
import { MongoClient } from 'mongodb';
interface DataService {
client: {
client: MongoClient;
};
}
/**
* A service provider that is meant to be used in compass.
*/
class CompassServiceProvider extends CliServiceProvider {
/**
* Creates a new CompassServiceProvider that uses compass
* data service (https://www.npmjs.com/package/mongodb-data-service) for
* transport.
*
* @param {DataService} dataService - a DataService instance
* @returns {CompassServiceProvider} - a new CompassServiceProvider
*/
static fromDataService(dataService: DataService): CompassServiceProvider {
const mongoClient = dataService.client.client;
return new CompassServiceProvider(mongoClient);
}
}
export default CompassServiceProvider;
|
MaBeuLux88/mongosh
|
packages/shell-api/src/help.spec.ts
|
<filename>packages/shell-api/src/help.spec.ts
import sinon from 'sinon';
import { Help } from './help';
import { expect } from 'chai';
describe('Help', () => {
let translate;
beforeEach(() => {
translate = sinon.fake((x) => `translated: ${x}`);
});
describe('#shellApiType', () => {
it('returns Help', () => {
expect(new Help({ help: 'help' }, { translate }).shellApiType()).to.equal('Help');
});
});
describe('#toReplString', () => {
it('returns the Help a plain object', () => {
const properties = {
help: 'help'
};
const help = new Help(properties, { translate });
expect(help.toReplString().constructor.name).to.equal('Object');
expect(help.toReplString()).to.not.equal(help);
});
it('returns translated help', () => {
const properties = {
help: 'help'
};
expect(
new Help(properties, { translate })
.toReplString()
.help
).to.equal('translated: help');
});
it('returns docs', () => {
const properties = {
help: 'help',
docs: 'https://example.com'
};
expect(
new Help(properties, { translate })
.toReplString()
.docs
).to.equal('translated: https://example.com');
});
it('returns default attr', () => {
const properties = {
help: 'help'
};
const help = new Help(properties, { translate });
expect(help.toReplString().attr).to.deep.equal([]);
});
it('returns attr with translated description', () => {
const properties = {
help: 'help',
attr: [{ name: 'key', description: 'description' }]
};
expect(
new Help(properties, { translate })
.toReplString()
.attr
).to.deep.equal([{ name: 'key', description: 'translated: description' }]);
});
});
});
|
MaBeuLux88/mongosh
|
packages/shell-evaluator/src/shell-evaluator.ts
|
import AsyncWriter from '@mongosh/async-rewriter';
import Mapper from '@mongosh/mapper';
import { signatures, Help, ShellBson, toIterator } from '@mongosh/shell-api';
import { ServiceProvider } from '@mongosh/service-provider-core';
interface Bus {
emit(...args: any[]): void;
}
interface Container {
toggleTelemetry(boolean): void;
}
interface Result {
type: string;
value: any;
}
class ShellEvaluator {
private mapper: Mapper;
private asyncWriter: AsyncWriter;
private bus: Bus;
private container: Container;
constructor(
serviceProvider: ServiceProvider,
bus: Bus,
container?: Container
) {
this.mapper = new Mapper(serviceProvider, bus);
this.asyncWriter = new AsyncWriter(signatures);
this.bus = bus;
this.container = container;
}
public toReplString(): string {
return JSON.parse(JSON.stringify(this));
}
public shellApiType(): string {
return 'ShellEvaluator';
}
public help(): Help {
this.bus.emit('mongosh:help');
return new Help({
help: 'shell-api.help.description',
docs: 'https://docs.mongodb.com/manual/reference/method',
attr: [
{
name: 'use',
description: 'shell-api.help.help.use'
},
{
name: 'it',
description: 'shell-api.help.help.it'
},
{
name: 'show databases',
description: 'shell-api.help.help.show-databases'
},
{
name: 'show collections',
description: 'shell-api.help.help.show-collections'
},
{
name: '.exit',
description: 'shell-api.help.help.exit'
}
]
});
}
/**
* Returns true if a value is a shell api type
*
* @param {any} evaluationResult - The result of evaluation
*/
private isShellApiType(evaluationResult: any): boolean {
return evaluationResult &&
typeof evaluationResult.shellApiType === 'function' &&
typeof evaluationResult.toReplString === 'function';
}
public revertState(): void {
this.asyncWriter.symbols.revertState();
}
public saveState(): void {
this.asyncWriter.symbols.saveState();
}
/**
* Checks for linux-style commands then evaluates input using originalEval.
*
* @param {function} originalEval - the javascript evaluator.
* @param {String} input - user input.
* @param {Context} context - the execution context.
* @param {String} filename
*/
private async innerEval(originalEval: any, input: string, context: any, filename: string): Promise<any> {
const argv = input.trim().replace(/;$/, '').split(' ');
const cmd = argv[0];
argv.shift();
switch (cmd) {
case 'use':
return this.mapper.use(argv[0]);
case 'show':
return this.mapper.show(argv[0]);
case 'it':
return this.mapper.it();
case 'help':
return this.help();
case 'enableTelemetry()':
if (this.container) {
return this.container.toggleTelemetry(true);
}
return;
case 'disableTelemetry()':
if (this.container) {
return this.container.toggleTelemetry(false);
}
return;
default:
this.saveState();
const rewrittenInput = this.asyncWriter.process(input);
this.bus.emit(
'mongosh:rewritten-async-input',
{ original: input.trim(), rewritten: rewrittenInput.trim() }
);
try {
return await originalEval(rewrittenInput, context, filename);
} catch (err) {
// This is for browser/Compass
this.revertState();
throw err;
}
}
}
/**
* Evaluates the input code and wraps the result with the type
*
* @param {function} originalEval - the javascript evaluator.
* @param {String} input - user input.
* @param {Context} context - the execution context.
* @param {String} filename
*/
public async customEval(originalEval, input, context, filename): Promise<Result> {
const evaluationResult = await this.innerEval(
originalEval,
input,
context,
filename
);
if (this.isShellApiType(evaluationResult)) {
return {
type: evaluationResult.shellApiType(),
value: await evaluationResult.toReplString()
};
}
return { value: evaluationResult, type: null };
}
/**
* Prepare a `contextObject` as global context and set it as context
* for the mapper. Add each attribute to the AsyncRewriter also.
*
* The `contextObject` is prepared so that it can be used as global object
* for the repl evaluationi.
*
* @note The `contextObject` is mutated, it will retain all of its existing
* properties but also have the global shell api objects and functions.
*
* @param {Object} - contextObject an object used as global context.
*/
setCtx(contextObject: any): void {
// Add API methods for VSCode and scripts
contextObject.use = this.mapper.use.bind(this.mapper);
contextObject.show = this.mapper.show.bind(this.mapper);
contextObject.it = this.mapper.it.bind(this.mapper);
contextObject.help = this.help.bind(this);
contextObject.toIterator = toIterator;
contextObject.print = async(arg) => {
if (arg.toReplString) {
console.log(await arg.toReplString());
} else {
console.log(arg);
}
};
contextObject.printjson = contextObject.print;
Object.assign(contextObject, ShellBson);
// Add global shell objects
contextObject.db = this.mapper.databases.test;
this.asyncWriter.symbols.initializeApiObjects({ db: signatures.Database });
// Update mapper and log
this.mapper.context = contextObject;
this.bus.emit(
'mongosh:setCtx',
{ method: 'setCtx', arguments: { db: this.mapper.context.db } }
);
}
}
export default ShellEvaluator;
|
MaBeuLux88/mongosh
|
packages/browser-runtime-core/src/interpreter/preprocessor/wrap-in-async-function-call.spec.ts
|
import { expect } from 'chai';
import { parse } from '@babel/parser';
import generate from '@babel/generator';
import { wrapInAsyncFunctionCall } from './wrap-in-async-function-call';
describe('wrapInAsyncFunctionCall', () => {
const testAllowTopLevelAwait = (code: string): string => {
const ast = wrapInAsyncFunctionCall(parse(code));
return generate(ast).code;
};
it('wraps code in function call', () => {
expect(testAllowTopLevelAwait('1')).to.equal('(async () => {\n 1;\n})();');
});
});
|
MaBeuLux88/mongosh
|
packages/build/src/compile-exec.ts
|
<filename>packages/build/src/compile-exec.ts<gh_stars>0
import path from 'path';
import { exec as compile } from 'pkg';
import Platform from './platform';
/**
* The executable name enum.
*/
enum ExecName {
Windows = 'mongosh.exe',
Posix = 'mongosh'
};
/**
* Target enum.
*/
enum Target {
Windows = 'win',
MacOs = 'macos',
Linux = 'linux'
}
/**
* Determine the name of the executable based on the
* provided platform.
*
* @param {string} platform - The platform.
*
* @returns {string} The name.
*/
function determineExecName(platform: string): string {
if (platform === Platform.Windows) {
return ExecName.Windows;
}
return ExecName.Posix;
}
/**
* Determine the target name.
*
* @param {string} platform - The platform.
*
* @returns {string} The target name.
*/
const determineTarget = (platform: string): string => {
switch(platform) {
case Platform.Windows: return Target.Windows;
case Platform.MacOs: return Target.MacOs;
default: return Target.Linux;
}
};
/**
* Get the path to the executable itself.
*
* @param {string} outputDir - The directory to save in.
* @param {string} platform - The platform.
*
* @returns {string} The path.
*/
const executablePath = (outputDir: string, platform: string): string => {
return path.join(outputDir, determineExecName(platform));
};
/**
* Compile the executable.
*
* @param {string} input - The root js of the app.
* @param {string} outputDir - The output directory for the executable.
* @param {string} platform - The platform.
*/
const compileExec = async(input: string, outputDir: string, platform: string) => {
const executable = executablePath(outputDir, platform);
console.log('mongosh: creating binary:', executable);
await compile([
input,
'-o',
executable,
'-t',
determineTarget(platform)
]);
};
export default compileExec;
export {
ExecName,
Target,
determineExecName,
determineTarget,
executablePath
};
|
MaBeuLux88/mongosh
|
packages/shell-api/src/collection.spec.ts
|
import sinon from 'sinon';
import Mapper from '../../mapper';
import { Collection, Database } from './shell-api';
import * as signatures from './shell-api-signatures';
import { expect } from 'chai';
/**
* Test that a collection method proxies the respective Mapper method correctly,
* with the right arguments and returning the right result.
*
* It ensures:
* - that the method is defined in the shell api and that is meant to be a function
* - that the mapper method to be proxied to exists
* - that the mapper method is called with a collection as first argument and with
* the rest of invokation arguments.
* - that the result of mapper invokation is returned.
*
* @param {String} name - the name of the method to invoke
*/
function testWrappedMethod(name: string): void {
const attribute = signatures.Collection.attributes[name];
expect(attribute).to.exist;
expect(attribute.type).to.equal('function');
const mock = sinon.mock();
const mapper: Mapper = sinon.createStubInstance(Mapper, {
[`collection_${name}`]: mock
});
const args = [1, 2, 3];
const retVal = {};
const database = new Database('db1');
const collection = new Collection(mapper, database, 'coll1');
mock.withArgs(collection, ...args).returns(retVal);
const result = collection[name](...args);
mock.verify();
expect(result).to.equal(retVal);
}
describe('Collection', () => {
describe('#help', () => {
const collection = new Collection();
it('returns the translated text', () => {
expect((collection.findOne as any).help().help).to.include('db.collection.findOne(query, projection)');
});
});
[
'convertToCapped',
'find',
'findOne',
'createIndexes',
'createIndex',
'ensureIndex',
'getIndexes',
'getIndexSpecs',
'getIndices',
'getIndexKeys',
'dropIndexes',
'totalIndexSize',
'dropIndex',
'reIndex',
'stats',
'dataSize',
'storageSize',
'totalSize',
'drop',
'exists',
'getFullName',
'getName',
'explain'
].forEach((methodName) => {
describe(`#${methodName}`, () => {
it(`wraps mapper.collection_${methodName}`, () => {
testWrappedMethod(methodName);
});
});
});
describe('#getDB', () => {
it('returns the db', () => {
const database = new Database('db1');
const collection = new Collection(new Mapper({}), database, 'coll1');
expect(collection.getDB()).to.equal(database);
});
});
});
|
MaBeuLux88/mongosh
|
packages/browser-runtime-core/src/runtime.ts
|
<gh_stars>0
import { Completion } from './autocompleter/autocompleter';
export type ContextValue = any;
export type EvaluationResult = {
shellApiType: string;
value: any;
};
export interface Runtime {
/**
* Evaluates code
*
* @param {string} code - A string of code
* @return {Promise<EvaluationResult>} the result of the evaluation
*/
evaluate(code: string): Promise<EvaluationResult>;
/**
* Get shell api completions give a code prefix
*
* @param {string} code - The code to be completed
*/
getCompletions(code: string): Promise<Completion[]>;
}
|
MaBeuLux88/mongosh
|
packages/shell-api/src/cursor.spec.ts
|
<gh_stars>0
import sinon from 'sinon';
import { Cursor } from './shell-api';
import { expect } from 'chai';
describe('Cursor', () => {
describe('fluent interface', () => {
['limit', 'skip'].forEach((method) => {
describe(method, () => {
let wrappee;
let cursor;
beforeEach(() => {
wrappee = {
[method]: sinon.spy()
};
cursor = new Cursor({}, wrappee);
});
it('returns the same cursor', () => {
expect(cursor[method]()).to.equal(cursor);
});
it(`calls wrappee.${method} with arguments`, () => {
const arg = {};
cursor[method](arg);
expect(wrappee[method].calledWith(arg)).to.equal(true);
});
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/service-provider-server/src/index.ts
|
import NodeOptions from './node/node-options';
import CliServiceProvider from './cli-service-provider';
import CompassServiceProvider from './compass/compass-service-provider';
export {
CliServiceProvider,
CompassServiceProvider,
NodeOptions
};
|
MaBeuLux88/mongosh
|
packages/service-provider-server/src/node/node-options.ts
|
import NodeAuthOptions from './node-auth-options';
import NodeFleOptions from './node-fle-options';
/**
* Valid options that can be used with the Node driver. This is a
* partial list of things that need to be mapped.
*/
export default interface NodeOptions {
auth?: NodeAuthOptions;
authSource?: string;
authMechanism?: string;
autoEncryption?: NodeFleOptions;
explicitlyIgnoreSession?: boolean;
loggerLevel?: string;
retryWrites?: boolean;
tls?: boolean;
tlsAllowInvalidCertificates?: boolean;
tlsAllowInvalidHostnames?: boolean;
tlsCAFile?: string;
tlsCertificateKeyFile?: string;
tlsCertificateKeyFilePassword?: string;
}
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/closable.ts
|
<gh_stars>0
export default interface Closable {
/**
* Close the connection.
*
* @param {boolean} force - Whether to force close.
*/
close(boolean): Promise<void>;
}
|
MaBeuLux88/mongosh
|
packages/mapper/src/mapper.ts
|
/* eslint-disable @typescript-eslint/camelcase */
import {
AggregationCursor,
BulkWriteResult,
Collection,
Cursor,
Database,
DeleteResult,
InsertManyResult,
InsertOneResult,
UpdateResult,
CursorIterationResult,
CommandResult,
Explainable
} from '@mongosh/shell-api';
import {
ServiceProvider,
Document,
DatabaseOptions,
Cursor as ServiceProviderCursor,
WriteConcern
} from '@mongosh/service-provider-core';
import { EventEmitter } from 'events';
import { MongoshInvalidInputError, MongoshInternalError } from '@mongosh/errors';
export default class Mapper {
private serviceProvider: ServiceProvider;
private currentCursor: Cursor | AggregationCursor;
public databases: any;
private messageBus: EventEmitter;
public context: any;
constructor(serviceProvider, messageBus?) {
this.serviceProvider = serviceProvider;
/* Internal state gets stored in mapper, state that is visible to the user
* is stored in ctx */
this.currentCursor = null;
this.databases = { test: new Database(this, 'test') };
this.messageBus = messageBus || new EventEmitter();
}
private _emitExplainableApiCall(explainable: Explainable, methodName: string, methodArguments: Document = {}): void {
this._emitApiCall({
method: methodName,
class: 'Explainable',
db: explainable._collection._database._name,
coll: explainable._collection._name,
arguments: methodArguments
});
}
private _emitDatabaseApiCall(database: Database, methodName: string, methodArguments: Document = {}): void {
this._emitApiCall({
method: methodName,
class: 'Database',
db: database._name,
arguments: methodArguments
});
}
private _emitCollectionApiCall(collection: Collection, methodName: string, methodArguments: Document = {}): void {
this._emitApiCall({
method: methodName,
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: methodArguments
});
}
private _emitApiCall(event: {
method: string;
class: string;
arguments: Document;
[otherProps: string]: any;
}): void {
this.messageBus.emit('mongosh:api-call', event);
}
private _getDatabase(name: string): Database {
if (typeof name !== 'string') {
throw new MongoshInvalidInputError(
`Database name must be a string. Received ${typeof name}.`);
}
if (!name.trim()) {
throw new MongoshInvalidInputError('Database name cannot be empty.');
}
if (!(name in this.databases)) {
this.databases[name] = new Database(this, name);
}
return this.databases[name];
}
use(db: string): any {
this.messageBus.emit( 'mongosh:use', { db });
this.context.db = this._getDatabase(db);
return `switched to db ${db}`;
}
async show(arg): Promise<CommandResult> {
this.messageBus.emit( 'mongosh:show', { method: `show ${arg}` });
switch (arg) {
case 'databases':
case 'dbs':
return await this._showDatabases();
case 'collections':
case 'tables':
return await this._showCollections();
default:
const validArguments = [
'databases',
'dbs',
'collections',
'tables'
];
const err = new MongoshInvalidInputError(
`'${arg}' is not a valid argument for "show".\nValid arguments are: ${validArguments.join(', ')}`
);
this.messageBus.emit('mongosh:error', err);
throw err;
}
}
private async _showCollections(): Promise<CommandResult> {
const collectionNames = await this.database_getCollectionNames(this.context.db);
return new CommandResult('ShowCollectionsResult', collectionNames);
}
private async _showDatabases(): Promise<CommandResult> {
const result = await this.serviceProvider.listDatabases('admin');
if (!('databases' in result)) {
const err = new MongoshInternalError('Got invalid result from "listDatabases"');
this.messageBus.emit('mongosh:error', err);
throw err;
}
return new CommandResult('ShowDatabasesResult', result.databases);
}
async it(): Promise<any> {
const results = new CursorIterationResult();
if (
!this.currentCursor ||
this.currentCursor.isClosed()
) {
return results;
}
for (let i = 0; i < 20; i++) { // TODO: ensure that assigning cursor doesn't iterate
if (!await this.currentCursor.hasNext()) {
break;
}
results.push(await this.currentCursor.next());
}
return results;
}
private async _aggregate(
databaseName: string,
collectionName: string,
pipeline: Document[],
options?: Document
): Promise<AggregationCursor|CommandResult> {
const {
providerOptions,
dbOptions,
explain
} = this._adaptAggregateOptions(options);
let providerCursor: ServiceProviderCursor;
if (collectionName) {
providerCursor = this.serviceProvider.aggregate(
databaseName,
collectionName,
pipeline,
providerOptions,
dbOptions
);
} else {
providerCursor = this.serviceProvider.aggregateDb(
databaseName,
pipeline,
providerOptions,
dbOptions
);
}
const cursor = new AggregationCursor(this, providerCursor);
if (explain) {
return await cursor.explain();
}
this.currentCursor = cursor;
return cursor;
}
/**
* Run a command against the db.
*
* @param {Database} database - the db object.
* @param {Object} cmd - the command spec.
*
* @returns {Promise} The promise of command results. TODO: command result object
*/
database_runCommand(database: Database, cmd: Document): Promise<any> {
this._emitDatabaseApiCall(database, 'runCommand', { cmd });
return this.serviceProvider.runCommand(database._name, cmd);
}
database_adminCommand(database: Database, cmd: Document): Promise<any> {
this._emitDatabaseApiCall(database, 'adminCommand', { cmd });
return this.serviceProvider.runCommand('admin', cmd);
}
async database_aggregate(
database: Database,
pipeline: Document[],
options?: Document
): Promise<AggregationCursor|CommandResult> {
this._emitDatabaseApiCall(database, 'aggregate', { pipeline, options });
return this._aggregate(database._name, null, pipeline, options);
}
/**
* Returns an array of collection infos
*
* @param {String} database - The database.
* @param {Document} filter - The filter.
* @param {Document} options - The options.
*
* @return {Promise}
*/
async database_getCollectionInfos(
database: Database,
filter: Document = {},
options: Document = {}): Promise<any> {
this._emitDatabaseApiCall(database, 'getCollectionInfos', { filter, options });
return await this.serviceProvider.listCollections(
database._name,
filter,
options
);
}
/**
* Returns an array of collection names
*
* @param {String} database - The database.
* @param {Document} filter - The filter.
* @param {Document} options - The options.
*
* @return {Promise}
*/
async database_getCollectionNames(
database: Database
): Promise<any> {
this._emitDatabaseApiCall(database, 'getCollectionNames');
const infos = await this.database_getCollectionInfos(
database,
{},
{ nameOnly: true }
);
return infos.map(collection => collection.name);
}
/**
* Run an aggregation pipeline.
*
* @note: Passing a null coll will cause the aggregation to run on the DB.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: Shell API sets readConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: CRUD API provides batchSize and maxAwaitTimeMS which the shell does not.
*
*
* @param {Collection} collection - The collection class.
* @param {Array} pipeline - The aggregation pipeline.
* @param {Object} options - The pipeline options.
* <allowDiskUse, cursor, maxTimeMS, bypassDocumentValidation,
* readConcern, collation, hint, comment, writeConcern>
*
* OR each stage can be passed as an argument.
*
* @returns {AggregationCursor} The promise of the aggregation cursor.
*/
collection_aggregate(
collection: Collection,
...args: any[]
): Promise<AggregationCursor|CommandResult> {
let options;
let pipeline;
if (args.length === 0 || Array.isArray(args[0])) {
options = args[1] || {};
pipeline = args[0] || [];
} else {
options = {};
pipeline = args;
}
this._emitCollectionApiCall(
collection,
'aggregate',
{ options, pipeline }
);
return this._aggregate(
collection._database._name,
collection._name,
pipeline,
options
);
}
private _adaptAggregateOptions(options: any = {}): {
providerOptions: Document;
dbOptions: DatabaseOptions;
explain: boolean;
} {
const providerOptions = { ...options };
const dbOptions: DatabaseOptions = {};
let explain = false;
if ('readConcern' in providerOptions) {
dbOptions.readConcern = options.readConcern;
delete providerOptions.readConcern;
}
if ('writeConcern' in providerOptions) {
Object.assign(dbOptions, options.writeConcern);
delete providerOptions.writeConcern;
}
if ('explain' in providerOptions) {
explain = providerOptions.explain;
delete providerOptions.explain;
}
return { providerOptions, dbOptions, explain };
}
/**
* Execute a mix of write operations.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection - The collection class.
* @param {Array} operations - The bulk write requests.
* @param {Object} options - The bulk write options.
* <writeConcern, ordered>
*
* @returns {BulkWriteResult} The promise of the result.
*/
async collection_bulkWrite(
collection: Collection,
operations: Document,
options: Document = {}
): Promise<BulkWriteResult> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'bulkWrite',
class: 'Collection',
db, coll, arguments: { options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.bulkWrite(
db,
coll,
operations,
options,
dbOptions
);
return new BulkWriteResult(
!!result.result.ok, // acknowledged
result.insertedCount,
result.insertedIds,
result.matchedCount,
result.modifiedCount,
result.deletedCount,
result.upsertedCount,
result.upsertedIds
);
}
/**
* Deprecated count command.
*
* @note: Shell API passes readConcern via options, data provider API via
* collection options.
*
* @param {Collection} collection - The collection class.
* @param {Object} query - The filter.
* @param {Object} options - The count options.
* <limit, skip, hint, maxTimeMS, readConcern, collation>
* @returns {Integer} The promise of the count.
*/
collection_count(collection, query = {}, options: any = {}): any {
const dbOpts: any = {};
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'count',
class: 'Collection',
db, coll, arguments: { query, options }
}
);
if ('readConcern' in options) {
dbOpts.readConcern = options.readConcern;
}
return this.serviceProvider.count(db, coll, query, options, dbOpts);
}
/**
* Get an exact document count from the coll.
*
* @param {Collection} collection - The collection class.
* @param {Object} query - The filter.
* @param {Object} options - The count options.
* <limit, skip, hint, maxTimeMS>
*
* @returns {Integer} The promise of the count.
*/
collection_countDocuments(collection, query, options: any = {}): any {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'countDocuments',
class: 'Collection',
db, coll, arguments: { query, options }
}
);
return this.serviceProvider.countDocuments(db, coll, query, options);
}
/**
* Delete multiple documents from the coll.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection - The collection class.
* @param {Object} filter - The filter.
* @param {Object} options - The delete many options.
* <collation, writeConcern>
*
* @returns {DeleteResult} The promise of the result.
*/
async collection_deleteMany(collection, filter, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'deleteMany',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.deleteMany(
db,
coll,
filter,
options,
dbOptions
);
return new DeleteResult(
result.result.ok,
result.deletedCount
);
}
/**
* Delete one document from the coll.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection - The collection class.
* @param {Object} filter - The filter.
* @param {Object} options - The delete one options.
* <collation, writeConcern>
*
* @returns {DeleteResult} The promise of the result.
*/
async collection_deleteOne(collection, filter, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'deleteOne',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.deleteOne(
db,
coll,
filter,
options,
dbOptions
);
return new DeleteResult(
result.result.ok,
result.deletedCount
);
}
/**
* Get distinct values for the field.
*
* @note Data Provider API also provides maxTimeMS option.
*
* @param {Collection} collection - The collection class.
* @param {String} field - The field name.
* @param {Object} query - The filter.
* @param {Object} options - The distinct options.
* <collation>
*
* @returns {Array} The promise of the result. TODO: make sure returned type is the same
*/
collection_distinct(collection, field, query, options: any = {}): any {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'distinct',
class: 'Collection',
db, coll, arguments: { field, query, options }
}
);
return this.serviceProvider.distinct(db, coll, field, query, options);
}
/**
* Get an estimated document count from the coll.
*
* @param {Collection} collection - The collection class.
* @param {Object} options - The count options.
* <maxTimeMS>
*
* @returns {Integer} The promise of the count.
*/
collection_estimatedDocumentCount(collection, options = {}): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'estimatedDocumentCount',
class: 'Collection',
db, coll, arguments: { options }
}
);
return this.serviceProvider.estimatedDocumentCount(db, coll, options,);
}
/**
* Find documents in the collection.
*
* @note: Shell API passes filter and projection to find, data provider API
* uses a options object.
*
* @param {Collection} collection - The collection class.
* @param {Object} query - The filter.
* @param {Object} projection - The projection.
*
* @returns {Cursor} The promise of the cursor.
*/
collection_find(collection, query, projection): Cursor {
const options: any = {};
const db = collection._database._name;
const coll = collection._name;
if (projection) {
options.projection = projection;
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'find',
class: 'Collection',
db, coll, arguments: { query, options }
}
);
const cursor = new Cursor(
this,
this.serviceProvider.find(db, coll, query, options)
);
this.currentCursor = cursor;
return cursor;
}
/**
* Find one document in the collection.
*
* @note: findOne is just find with limit.
*
* @param {Collection} collection - The collection class.
* @param {Object} query - The filter.
* @param {Object} projection - The projection.
*
* @returns {Cursor} The promise of the cursor.
*/
collection_findOne(collection, query, projection): Promise<any> {
const options: any = {};
const db = collection._database._name;
const coll = collection._name;
if (projection) {
options.projection = projection;
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'findOne',
class: 'Collection',
db, coll, arguments: { query, options }
}
);
return new Cursor(
this,
this.serviceProvider.find(db, coll, query, options)
).limit(1).next();
}
async collection_findAndModify(
collection: Collection,
options: {
query?: Document;
sort?: Document | Document[];
remove?: boolean;
update?: Document | Document[];
new?: boolean;
fields?: Document;
upsert?: boolean;
bypassDocumentValidation?: boolean;
writeConcern?: Document;
collation?: Document;
arrayFilters?: Document[];
} = {}
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'findAndModify',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { options: { ...options, update: !!options.update } }
}
);
const providerOptions = {
...options
};
delete providerOptions.query;
delete providerOptions.sort;
delete providerOptions.update;
const result = await this.serviceProvider.findAndModify(
collection._database._name,
collection._name,
options.query || {},
options.sort,
options.update,
providerOptions
);
return result.value;
}
async collection_renameCollection(
collection: Collection,
newName: string,
dropTarget?: boolean
): Promise<any> {
if (typeof newName !== 'string') {
throw new MongoshInvalidInputError('The "newName" argument must be a string.');
}
try {
await this.serviceProvider.renameCollection(
collection._database._name,
collection._name,
newName,
{ dropTarget: !!dropTarget }
);
return {
ok: 1
};
} catch (e) {
if (e.name === 'MongoError') {
return {
ok: 0,
errmsg: e.errmsg,
code: e.code,
codeName: e.codeName
};
}
throw e;
}
}
/**
* Find one document and delete it.
*
* @param {Collection} collection - The collection class.
* @param {Object} filter - The filter.
* @param {Object} options - The find options.
* <projection, sort, collation, maxTimeMS>
*
* @returns {Document} The promise of the result.
*/
async collection_findOneAndDelete(collection, filter, options = {}): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'findOneAndDelete',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined filter to an update command');
}
const result = await this.serviceProvider.findOneAndDelete(
db,
coll,
filter,
options,
);
return result.value;
}
/**
* Find one document and replace it.
*
* @note: Shell API uses option 'returnNewDocument' while data provider API
* expects 'returnDocument'.
* @note: Data provider API provides bypassDocumentValidation option that shell does not have.
*
* @param {Collection} collection - The collection class.
* @param {Object} filter - The filter.
* @param {Object} replacement - The replacement.
* @param {Object} options - The find options.
* <projection, sort, upsert, maxTimeMS, returnNewDocument, collation>
*
* @returns {Document} The promise of the result.
*/
async collection_findOneAndReplace(collection, filter, replacement, options = {}): Promise<any> {
const findOneAndReplaceOptions: any = { ...options };
const db = collection._database._name;
const coll = collection._name;
if ('returnNewDocument' in findOneAndReplaceOptions) {
findOneAndReplaceOptions.returnDocument = findOneAndReplaceOptions.returnNewDocument;
delete findOneAndReplaceOptions.returnNewDocument;
}
if (filter === undefined || replacement === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'findOneAndReplace',
class: 'Collection',
db,
coll,
arguments: { filter, findOneAndReplaceOptions }
}
);
const result = await this.serviceProvider.findOneAndReplace(
db,
coll,
filter,
replacement,
findOneAndReplaceOptions
);
return result.value;
}
/**
* Find one document and update it.
*
* @note: Shell API uses option 'returnNewDocument' while data provider API
* expects 'returnDocument'.
*
* @param {Collection} collection - The collection class.
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The update.
* @param {Object} options - The find options.
* <projection, sort,maxTimeMS,upsert,returnNewDocument,collation, arrayFilters>
*
* @returns {Document} The promise of the result.
*/
async collection_findOneAndUpdate(collection, filter, update, options = {}): Promise<any> {
const findOneAndUpdateOptions: any = { ...options };
const db = collection._database._name;
const coll = collection._name;
if ('returnNewDocument' in findOneAndUpdateOptions) {
findOneAndUpdateOptions.returnDocument = findOneAndUpdateOptions.returnNewDocument;
delete findOneAndUpdateOptions.returnNewDocument;
}
if (filter === undefined || update === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'findOneAndUpdate',
class: 'Collection',
db,
coll,
arguments: { filter, findOneAndUpdateOptions }
}
);
const result = await this.serviceProvider.findOneAndUpdate(
db,
coll,
filter,
update,
findOneAndUpdateOptions,
);
return result.value;
}
/**
* Alias for insertMany.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection
* @param {Object|Array} docs
* @param {Object} options
* <writeConcern, ordered>
* @return {InsertManyResult}
*/
async collection_insert(collection, docs, options: any = {}): Promise<any> {
const d = Object.prototype.toString.call(docs) === '[object Array]' ? docs : [docs];
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
if (docs === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'insert',
class: 'Collection',
db, coll, arguments: { options }
}
);
const result = await this.serviceProvider.insertMany(
db,
coll,
d,
options,
dbOptions
);
return new InsertManyResult(
result.result.ok,
result.insertedIds
);
}
/**
* Insert multiple documents.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: Data provider API allows for bypassDocumentValidation as argument,
* shell API doesn't.
*
* @param {Collection} collection
* @param {Object|Array} docs
* @param {Object} options
* <writeConcern, ordered>
* @return {InsertManyResult}
*/
async collection_insertMany(collection, docs, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (docs === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'insertMany',
class: 'Collection',
db, coll, arguments: { options }
}
);
const result = await this.serviceProvider.insertMany(
db,
coll,
docs,
options,
dbOptions
);
return new InsertManyResult(
result.result.ok,
result.insertedIds
);
}
/**
* Insert one document.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: Data provider API allows for bypassDocumentValidation as argument,
* shell API doesn't.
*
* @param {Collection} collection
* @param {Object} doc
* @param {Object} options
* <writeConcern>
* @return {InsertOneResult}
*/
async collection_insertOne(collection, doc, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (doc === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'insertOne',
class: 'Collection',
db, coll, arguments: { options }
}
);
const result = await this.serviceProvider.insertOne(
db,
coll,
doc,
options,
dbOptions
);
return new InsertOneResult(
result.result.ok,
result.insertedId
);
}
/**
* Is collection capped?
*
* @param {Collection} collection
* @return {Boolean}
*/
collection_isCapped(collection): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{ method: 'isCapped', class: 'Collection', db, coll }
);
return this.serviceProvider.isCapped(db, coll);
}
/**
* Deprecated remove command.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: Shell API accepts second argument as a bool, indicating justOne.
*
* @param {Collection} collection
* @param {Object} query
* @param {Object|Boolean} options
* <justOne, writeConcern, collation>
* @return {Promise}
*/
collection_remove(collection, query, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (query === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
let removeOptions: any = {};
if (typeof options === 'boolean') {
removeOptions.justOne = options;
} else {
removeOptions = options;
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'remove',
class: 'Collection',
db, coll, arguments: { query, removeOptions }
}
);
return this.serviceProvider.remove(
db,
coll,
query,
removeOptions,
dbOptions
);
}
// TODO
collection_save(collection, doc, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (doc === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'save',
class: 'Collection',
db, coll, arguments: { options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
return this.serviceProvider.save(db, coll, doc, options, dbOptions);
}
/**
* Replace a document with another.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
* @note: Data provider API allows for bypassDocumentValidation as argument,
* shell API doesn't.
*
* @param {Collection} collection
* @param {Object} filter - The filter.
* @param {Object} replacement - The replacement document for matches.
* @param {Object} options - The replace options.
* <upsert, writeConcern, collation, hint>
*
* @returns {UpdateResult} The promise of the result.
*/
async collection_replaceOne(collection, filter, replacement, options: any = {}): Promise<any> {
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'replaceOne',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.replaceOne(
db,
coll,
filter,
replacement,
options,
dbOptions
);
return new UpdateResult(
result.result.ok,
result.matchedCount,
result.modifiedCount,
result.upsertedCount,
result.upsertedId
);
}
async collection_update(collection, filter, update, options: any = {}): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'update',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
let result;
if (options.multi) {
result = await this.serviceProvider.updateMany(
db,
coll,
filter,
update,
options,
);
} else {
result = await this.serviceProvider.updateOne(
db,
coll,
filter,
update,
options,
);
}
return new UpdateResult(
result.result.ok,
result.matchedCount,
result.modifiedCount,
result.upsertedCount,
result.upsertedId
);
}
/**
* Update many documents.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The updates.
* @param {Object} options - The update options.
* <upsert, writeConcern, collation, arrayFilters, hint>
*
* @returns {UpdateResult} The promise of the result.
*/
async collection_updateMany(collection, filter, update, options: any = {}): Promise<any> {
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
const dbOptions: DatabaseOptions = {};
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'updateMany',
class: 'Collection',
db, coll, arguments: { filter, options }
}
);
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.updateMany(
db,
coll,
filter,
update,
options,
dbOptions
);
return new UpdateResult(
result.result.ok,
result.matchedCount,
result.modifiedCount,
result.upsertedCount,
result.upsertedId
);
}
/**
* Update one document.
*
* @note: Shell API sets writeConcern via options in object, data provider API
* expects it as a dbOption object.
*
* @param {Collection} collection
* @param {Object} filter - The filter.
* @param {(Object|Array)} update - The updates.
* @param {Object} options - The update options.
* <upsert, writeConcern, collation, arrayFilters, hint>
*
* @returns {UpdateResult} The promise of the result.
*/
async collection_updateOne(
collection: Collection,
filter: Document,
update: Document,
options: Document = {}
): Promise<any> {
if (filter === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this._emitCollectionApiCall(collection, 'updateOne', { filter, options });
const dbOptions: DatabaseOptions = {};
if ('writeConcern' in options) {
Object.assign(dbOptions, options.writeConcern);
}
const result = await this.serviceProvider.updateOne(
collection._database._name,
collection._name,
filter,
update,
options,
dbOptions
);
return new UpdateResult(
result.result.ok,
result.matchedCount,
result.modifiedCount,
result.upsertedCount,
result.upsertedId
);
}
/**
* Converts a collection to capped
*
* @param {Collection} collection
* @param {String} size - The maximum size, in bytes, for the capped collection.
*
* @return {Promise}
*/
async collection_convertToCapped(collection: Collection, size: number): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'convertToCapped',
class: 'Collection',
db, coll, arguments: { size }
}
);
return await this.serviceProvider.convertToCapped(
db,
coll,
size
);
}
/**
* Create indexes for a collection
*
* @param {Collection} collection
* @param {Document} keyPatterns - An array of documents that contains
* the field and value pairs where the field is the index key and the
* value describes the type of index for that field.
* @param {Document} options - createIndexes options (
* name, background, sparse ...)
* @return {Promise}
*/
async collection_createIndexes(
collection: Collection,
keyPatterns: Document[],
options: Document = {}
): Promise<any> {
if (keyPatterns === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
const db = collection._database._name;
const coll = collection._name;
if (typeof options !== 'object' || Array.isArray(options)) {
throw new MongoshInvalidInputError('The "options" argument must be an object.');
}
const specs = keyPatterns.map((pattern) => ({
...options, key: pattern
}));
this.messageBus.emit(
'mongosh:api-call',
{
method: 'createIndexes',
class: 'Collection',
db, coll, arguments: { specs }
}
);
return await this.serviceProvider.createIndexes(db, coll, specs);
}
/**
* Create index for a collection
*
* @param {Collection} collection
* @param {Document} keys - An document that contains
* the field and value pairs where the field is the index key and the
* value describes the type of index for that field.
* @param {Document} options - createIndexes options (
* name, background, sparse ...)
*
* @return {Promise}
*/
async collection_createIndex(
collection: Collection,
keys: Document,
options: Document = {}
): Promise<any> {
if (keys === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'createIndex',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { keys, options }
}
);
return await this.collection_createIndexes(
collection,
[keys],
options
);
}
/**
* Create index for a collection (alias for createIndex)
*
* @param {Collection} collection
* @param {Document} keys - An document that contains
* the field and value pairs where the field is the index key and the
* value describes the type of index for that field.
* @param {Document} options - createIndexes options (
* name, background, sparse ...)
*
* @return {Promise}
*/
async collection_ensureIndex(
collection: Collection,
keys: Document,
options: Document
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'ensureIndex',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { keys, options }
}
);
return await this.collection_createIndex(
collection,
keys,
options
);
}
/**
* Returns an array that holds a list of documents that identify and
* describe the existing indexes on the collection.
*
* @param {Collection} collection
*
* @return {Promise}
*/
async collection_getIndexes(
collection: Collection,
): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{ method: 'getIndexes', class: 'Collection', db, coll }
);
return await this.serviceProvider.getIndexes(db, coll);
}
/**
* Returns an array that holds a list of documents that identify and
* describe the existing indexes on the collection. (alias for getIndexes)
*
* @param {Collection} collection
*
* @return {Promise}
*/
async collection_getIndexSpecs(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getIndexSpecs',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return await this.collection_getIndexes(collection);
}
/**
* Returns an array that holds a list of documents that identify and
* describe the existing indexes on the collection. (alias for getIndexes)
*
* @param {Collection} collection
*
* @return {Promise}
*/
async collection_getIndices(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getIndices',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return await this.collection_getIndexes(collection);
}
/**
* Returns an array of key patterns for the indexes defined on the collection.
*
* @param {Collection} collection
*
* @return {Promise}
*/
async collection_getIndexKeys(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getIndexKeys',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return (await this.collection_getIndexes(collection)).map(i => i.key);
}
/**
* Drops the specified index or indexes (except the index on the _id field)
* from a collection.
*
* @param {Collection} collection
* @param {string|string[]|Object|Object[]} indexes the indexes to be removed.
* @return {Promise}
*/
async collection_dropIndexes(
collection: Collection,
indexes: string|string[]|Document|Document[]
): Promise<any> {
if (indexes === undefined) {
throw new MongoshInvalidInputError('Cannot pass an undefined argument to an update command');
}
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'dropIndexes',
class: 'Collection',
db, coll, arguments: { indexes }
}
);
try {
return await this.serviceProvider.dropIndexes(db, coll, indexes);
} catch (error) {
if (error.codeName === 'IndexNotFound') {
return {
ok: error.ok,
errmsg: error.errmsg,
code: error.code,
codeName: error.codeName
};
}
throw error;
}
}
/**
* Drops the specified index from a collection.
*
* @param {Collection} collection
* @param {string|Object} index the index to be removed.
* @return {Promise}
*/
async collection_dropIndex(
collection: Collection,
index: string|Document
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'dropIndex',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { index }
}
);
if (index === '*') {
throw new MongoshInvalidInputError('To drop indexes in the collection using \'*\', use db.collection.dropIndexes().');
}
if (Array.isArray(index)) {
throw new MongoshInvalidInputError('The index to drop must be either the index name or the index specification document.');
}
return await this.collection_dropIndexes(collection, index);
}
/**
* Returns the total size of all indexes for the collection.
*
* @param {Collection} collection
* @return {Promise}
*/
async collection_totalIndexSize(
collection: Collection,
...args: any[]
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'totalIndexSize',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
if (args.length) {
throw new MongoshInvalidInputError(
'"totalIndexSize" takes no argument. Use db.collection.stats to get detailed information.'
);
}
const stats = await this.collection_stats(collection);
return stats.totalIndexSize;
}
/**
* Drops and recreate indexes for a collection.
*
* @param {Collection} collection
* @return {Promise}
*/
async collection_reIndex(
collection: Collection
): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{ method: 'reIndex', class: 'Collection', db, coll }
);
return await this.serviceProvider.reIndex(db, coll);
}
/**
* Returns the collection database.
*
* @param {Collection} collection
* @return {Database}
*/
collection_getDB(
collection: Collection
): Database {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getDB',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return collection._database;
}
/**
* Get all the collection statistics.
*
* @param {Collection} collection - The collection name.
* @param {Object} options - The stats options.
* @return {Promise} returns Promise
*/
async collection_stats(
collection: Collection,
options: Document = {}
): Promise<any> {
const db = collection._database._name;
const coll = collection._name;
this.messageBus.emit(
'mongosh:api-call',
{
method: 'stats',
class: 'Collection',
db, coll, arguments: { options }
}
);
return await this.serviceProvider.stats(db, coll, options);
}
/**
* Get the collection dataSize.
*
* @param {Collection} collection - The collection name.
* @return {Promise} returns Promise
*/
async collection_dataSize(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'dataSize',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return (await this.collection_stats(collection)).size;
}
/**
* Get the collection storageSize.
*
* @param {Collection} collection - The collection name.
* @return {Promise} returns Promise
*/
async collection_storageSize(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'storageSize',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return (await this.collection_stats(collection)).storageSize;
}
/**
* Get the collection totalSize.
*
* @param {Collection} collection - The collection.
* @return {Promise} returns Promise
*/
async collection_totalSize(
collection: Collection,
): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'totalSize',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
const stats = await this.collection_stats(collection);
return (stats.storageSize || 0) + (stats.totalIndexSize || 0);
}
/**
* Drop a collection.
*
* @param {Collection} collection - The collection.
* @return {Promise} returns Promise
*/
async collection_drop(
collection: Collection,
): Promise<boolean> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'drop',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
try {
return await this.serviceProvider.dropCollection(
collection._database._name,
collection._name
);
} catch (error) {
if (error.codeName === 'NamespaceNotFound') {
this.messageBus.emit(
'mongosh:warn',
{
method: 'drop',
class: 'Collection',
message: `Namespace not found: ${collection._name}`
}
);
return false;
}
throw error;
}
}
/**
* Collection exists.
*
* @param {Collection} collection - The collection name.
* @return {Promise} returns Promise
*/
async collection_exists(collection: Collection): Promise<any> {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'exists',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
const collectionInfos = await this.serviceProvider.listCollections(
collection._database._name,
{
name: collection._name
}
);
return collectionInfos[0] || null;
}
collection_getFullName(collection: Collection): string {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getFullName',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return `${collection._database._name}.${collection._name}`;
}
collection_getName(collection: Collection): string {
this.messageBus.emit(
'mongosh:api-call',
{
method: 'getName',
class: 'Collection',
db: collection._database._name,
coll: collection._name
}
);
return `${collection._name}`;
}
async collection_runCommand(
collection: Collection,
commandName: string,
options?: Document
): Promise<any> {
if (typeof commandName !== 'string') {
throw new MongoshInvalidInputError('The "commandName" argument must be a string.');
}
if (options && commandName in options) {
throw new MongoshInvalidInputError('The "commandName" argument cannot be passed as an option to "runCommand".');
}
this.messageBus.emit(
'mongosh:api-call',
{
method: 'runCommand',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { commandName }
}
);
return await this.database_runCommand(
collection._database,
{
...options,
[commandName]: collection._name
}
);
}
private _validateExplainableVerbosity(verbosity: string): void {
const allowedVerbosity = [
'queryPlanner',
'executionStats',
'allPlansExecution'
];
if (!allowedVerbosity.includes(verbosity)) {
throw new MongoshInvalidInputError(
`verbosity can only be one of ${allowedVerbosity.join(', ')}. Received ${verbosity}.`
);
}
}
collection_explain(collection: Collection, verbosity = 'queryPlanner'): Explainable {
this._validateExplainableVerbosity(verbosity);
this.messageBus.emit(
'mongosh:api-call',
{
method: 'explain',
class: 'Collection',
db: collection._database._name,
coll: collection._name,
arguments: { verbosity }
}
);
return new Explainable(this, collection, verbosity);
}
explainable_getCollection(explainable: Explainable): Collection {
this._emitExplainableApiCall(explainable, 'getCollection');
return explainable._collection;
}
explainable_getVerbosity(explainable: Explainable): string {
this._emitExplainableApiCall(explainable, 'getVerbosity');
return explainable._verbosity;
}
explainable_setVerbosity(explainable: Explainable, verbosity: string): void {
this._validateExplainableVerbosity(verbosity);
this._emitExplainableApiCall(explainable, 'setVerbosity', { verbosity });
explainable._verbosity = verbosity;
}
// TODO: turn this into proper types and constructors in shell api.
private _makeExplainableCursor<T extends AggregationCursor|Cursor>(
cursor: T,
verbosity?: string
): T {
cursor.shellApiType = (): string => 'ExplainableCursor';
cursor.toReplString = (): Promise<any> => {
return cursor.explain(verbosity);
};
return cursor;
}
explainable_find(explainable: Explainable, query?: any, projection?: any): Cursor {
this._emitExplainableApiCall(explainable, 'find', { query, projection });
// TODO: turn ExplainableCursor in a proper type.
const cursor = explainable._collection.find(query, projection) as Cursor;
return this._makeExplainableCursor(
cursor,
explainable._verbosity
);
}
async explainable_aggregate(explainable: Explainable, pipeline?: any, options?: any): Promise<any> {
this._emitExplainableApiCall(explainable, 'aggregate', { pipeline, options });
const cursor = await explainable._collection.aggregate(pipeline, {
...options,
explain: false
}) as AggregationCursor;
return await cursor.explain(explainable._verbosity);
}
database_getSiblingDB(database: Database, name: string): Database {
return this._getDatabase(name);
}
database_getCollection(database: Database, name: string): Collection {
if (typeof name !== 'string') {
throw new MongoshInvalidInputError(
`Collection name must be a string. Received ${typeof name}.`);
}
if (!name.trim()) {
throw new MongoshInvalidInputError('Collection name cannot be empty.');
}
const collections: Record<string, Collection> = (database as any)._collections;
if (!collections[name]) {
collections[name] = new Collection(this, database, name);
}
return collections[name];
}
async database_dropDatabase(database: Database, writeConcern?: WriteConcern): Promise<any> {
return await this.serviceProvider.dropDatabase(
database._name,
writeConcern
);
}
}
|
MaBeuLux88/mongosh
|
packages/service-provider-core/src/document.ts
|
interface Document {
[prop: string]: any
};
export default Document;
|
MaBeuLux88/mongosh
|
packages/cli-repl/test/e2e.spec.ts
|
<reponame>MaBeuLux88/mongosh
import { expect } from 'chai';
import { MongoClient } from 'mongodb';
import { eventually, startShell, killOpenShells } from './helpers';
describe('e2e', function() {
before(require('mongodb-runner/mocha/before')({ port: 27018, timeout: 60000 }));
after(require('mongodb-runner/mocha/after')({ port: 27018 }));
afterEach(() => killOpenShells());
describe('--version', () => {
it('shows version', async() => {
const shell = startShell('--version');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
expect(shell.stdio.stdout).to.contain(
require('../package.json').version
);
});
});
});
describe('with connection string', () => {
let db;
let client;
let shell;
let dbName;
beforeEach(async() => {
dbName = `test-${Date.now()}`;
const connectionString = `mongodb://localhost:27018/${dbName}`;
shell = startShell(connectionString);
client = await (MongoClient as any).connect(
connectionString,
{ useNewUrlParser: true }
);
db = client.db(dbName);
});
afterEach(async() => {
await db.dropDatabase();
client.close();
});
it.skip('connects to the right database', async() => {
shell.stdio.stdin.write('db\n');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
expect(shell.stdio.stdout).to.contain(`> ${dbName}\n`);
});
});
it('throws multiline input with a single line string', async() => {
// this is an unterminated string constant and should throw, since it does
// not pass: https://www.ecma-international.org/ecma-262/#sec-line-terminators
shell.stdio.stdin.write('"this is a multi\nline string"\n');
await eventually(() => {
expect(shell.stdio.stderr).to.exist;
});
});
it('throws when a syntax error is encountered', async() => {
shell.stdio.stdin.write('<x>\n');
await eventually(() => {
expect(shell.stdio.stderr).to.exist;
});
});
it('does not throw for a repl await function', async() => {
shell.stdio.stdin.write('await Promise.resolve(\'Nori-cat\');');
await eventually(() => {
expect(shell.stdio.stderr).to.be.equal('');
});
});
it('runs an unterminated function', async() => {
shell.stdio.stdin.write('function x () {\nconsole.log(\'y\')\n }\n');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
});
});
it('runs an unterminated function', async() => {
shell.stdio.stdin.write('function x () {\n');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
});
});
it('runs help command', async() => {
shell.stdio.stdin.write('help\n');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
expect(shell.stdio.stdout).to.contain('Shell Help');
});
});
it('allows to find documents', async() => {
shell.stdio.stdin.write(`use ${dbName}\n`);
await db.collection('test').insertMany([
{ doc: 1 },
{ doc: 2 },
{ doc: 3 }
]);
shell.stdio.stdin.write('db.test.find()\n');
await eventually(() => {
expect(shell.stdio.stderr).to.be.empty;
expect(shell.stdio.stdout).to.contain('doc: 1');
expect(shell.stdio.stdout).to.contain('doc: 2');
expect(shell.stdio.stdout).to.contain('doc: 3');
});
});
});
});
|
MaBeuLux88/mongosh
|
packages/cli-repl/src/cli-repl.ts
|
<gh_stars>0
/* eslint no-console: 0, no-sync: 0*/
import { CliServiceProvider, NodeOptions } from '@mongosh/service-provider-server';
import formatOutput, { formatError } from './format-output';
import ShellEvaluator from '@mongosh/shell-evaluator';
import isRecoverableError from 'is-recoverable-error';
import { MongoshWarning } from '@mongosh/errors';
import { changeHistory } from '@mongosh/history';
import { REPLServer, Recoverable } from 'repl';
import getConnectInfo from './connect-info';
import { TELEMETRY, MONGOSH_WIKI } from './constants';
import CliOptions from './cli-options';
import completer from './completer';
import i18n from '@mongosh/i18n';
import { ObjectId } from 'bson';
import repl from 'pretty-repl';
import Nanobus from 'nanobus';
import logger from './logger';
import mkdirp from 'mkdirp';
import clr from './clr';
import path from 'path';
import util from 'util';
import read from 'read';
import os from 'os';
import fs from 'fs';
import { redactPwd } from '.';
/**
* Connecting text key.
*/
const CONNECTING = 'cli-repl.cli-repl.connecting';
/**
* The REPL used from the terminal.
*/
class CliRepl {
private serviceProvider: CliServiceProvider;
private ShellEvaluator: ShellEvaluator;
private buildInfo: any;
private repl: REPLServer;
private bus: Nanobus;
private enableTelemetry: boolean;
private disableGreetingMessage: boolean;
private userId: ObjectId;
private options: CliOptions;
private mongoshDir: string;
/**
* Instantiate the new CLI Repl.
*/
constructor(driverUri: string, driverOptions: NodeOptions, options: CliOptions) {
this.options = options;
this.mongoshDir = path.join(os.homedir(), '.mongodb/mongosh/');
this.createMongoshDir();
this.bus = new Nanobus('mongosh');
logger(this.bus, this.mongoshDir);
this.generateOrReadTelemetryConfig();
if (this.isPasswordMissing(driverOptions)) {
this.requirePassword(driverUri, driverOptions);
} else {
this.setupRepl(driverUri, driverOptions).catch((error) => {
this.bus.emit('mongosh:error', error);
console.log(formatError(error));
return;
});
}
}
/**
* setup CLI environment: serviceProvider, ShellEvaluator, log connection
* information, and finally start the repl.
*
* @param {string} driverUri - The driver URI.
* @param {NodeOptions} driverOptions - The driver options.
*/
async setupRepl(driverUri: string, driverOptions: NodeOptions): Promise<void> {
this.serviceProvider = await this.connect(driverUri, driverOptions);
this.ShellEvaluator = new ShellEvaluator(this.serviceProvider, this.bus, this);
this.buildInfo = await this.serviceProvider.buildInfo();
this.logBuildInfo(driverUri);
this.start();
}
/**
* Connect to the cluster.
*
* @param {string} driverUri - The driver URI.
* @param {NodeOptions} driverOptions - The driver options.
*/
async connect(driverUri: string, driverOptions: NodeOptions): Promise<any> {
console.log(i18n.__(CONNECTING), clr(redactPwd(driverUri), ['bold', 'green']));
return await CliServiceProvider.connect(driverUri, driverOptions);
}
/**
* Start the REPL.
*/
start(): void {
this.greet();
const version = this.buildInfo.version;
this.repl = repl.start({
prompt: '> ',
writer: this.writer,
completer: completer.bind(null, version),
});
this.repl.defineCommand('clear', {
help: '',
action: () => {
this.repl.displayPrompt();
}
});
const originalEval = util.promisify(this.repl.eval);
const customEval = async(input, context, filename, callback): Promise<any> => {
let result;
try {
result = await this.ShellEvaluator.customEval(originalEval, input, context, filename);
} catch (err) {
if (isRecoverableError(input)) {
return callback(new Recoverable(err));
}
result = err;
}
callback(null, result);
};
(this.repl as any).eval = customEval;
const historyFile = path.join(this.mongoshDir, '.mongosh_repl_history');
const redactInfo = this.options.redactInfo;
// eslint thinks we are redefining this.repl here, we are not.
// eslint-disable-next-line no-shadow
this.repl.setupHistory(historyFile, function(err, repl) {
const warn = new MongoshWarning('Unable to set up history file. History will not be persisting in this session');
if (err) this.writer(warn);
// repl.history is an array of previous commands. We need to hijack the
// value we just typed, and shift it off the history array if the info is
// sensitive.
repl.on('flushHistory', function() {
changeHistory((repl as any).history, redactInfo);
});
});
this.repl.on('exit', () => {
this.serviceProvider.close(true);
process.exit();
});
this.ShellEvaluator.setCtx(this.repl.context);
}
/**
* Log information about the current connection using buildInfo, topology,
* current driverUri, and cmdLineOpts.
*
* @param {string} driverUri - The driver URI.
*/
async logBuildInfo(driverUri: string): Promise<void> {
const cmdLineOpts = await this.getCmdLineOpts();
const topology = this.serviceProvider.getTopology();
const connectInfo = getConnectInfo(
driverUri,
this.buildInfo,
cmdLineOpts,
topology
);
this.bus.emit('mongosh:connect', connectInfo);
}
/**
* run getCmdLineOpts() command to get cmdLineOpts necessary for logging.
*/
async getCmdLineOpts(): Promise<any> {
try {
const cmdLineOpts = await this.serviceProvider.getCmdLineOpts();
return cmdLineOpts;
} catch (e) {
// error is thrown here for atlas and DataLake connections.
// don't actually throw, as this is only used to log out non-genuine
// mongodb connections
this.bus.emit('mongosh:error', e);
return null;
}
}
/**
* Creates a directory to store all mongosh logs, history and config
*/
createMongoshDir(): void {
try {
mkdirp.sync(this.mongoshDir);
} catch (e) {
this.bus.emit('mongosh:error', e);
throw e;
}
}
/**
* Checks if config file exists.
*
* If exists: sets userId and enabledTelemetry to this.
* If does not exist: writes a new file with a newly generated ObjectID for
* userid and enableTelemetry set to false.
*/
generateOrReadTelemetryConfig(): void {
const configPath = path.join(this.mongoshDir, 'config');
let fd;
try {
fd = fs.openSync(configPath, 'wx');
this.userId = new ObjectId(Date.now());
this.enableTelemetry = true;
this.disableGreetingMessage = false;
this.bus.emit('mongosh:new-user', this.userId, this.enableTelemetry);
this.writeConfigFileSync(configPath);
} catch (err) {
if (err.code === 'EEXIST') {
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
this.userId = config.userId;
this.disableGreetingMessage = true;
this.enableTelemetry = config.enableTelemetry;
this.bus.emit('mongosh:update-user', this.userId, this.enableTelemetry);
return;
}
this.bus.emit('mongosh:error', err);
throw err;
} finally {
if (fd !== undefined) fs.closeSync(fd);
}
}
/**
* sets CliRepl.enableTelemetry based on a bool, and writes the selection to
* config file.
*
* @param {boolean} enabled - enabled or disabled status
*
* @returns {string} Status of telemetry logging: disabled/enabled
*/
toggleTelemetry(enabled: boolean): string {
this.enableTelemetry = enabled;
this.disableGreetingMessage = true;
this.bus.emit('mongosh:update-user', this.userId, this.enableTelemetry);
const configPath = path.join(this.mongoshDir, 'config');
this.writeConfigFileSync(configPath);
if (enabled) {
return i18n.__('cli-repl.cli-repl.enabledTelemetry');
}
return i18n.__('cli-repl.cli-repl.disabledTelemetry');
}
/** write file sync given path and contents
*
* @param {string} filePath - path to file
*/
writeConfigFileSync(filePath: string): void {
const config = {
userId: this.userId,
enableTelemetry: this.enableTelemetry,
disableGreetingMessage: this.disableGreetingMessage
};
try {
fs.writeFileSync(filePath, JSON.stringify(config));
} catch (err) {
this.bus.emit('mongosh:error', err);
throw err;
}
}
/**
* Format the result to a string so it can be written to the output stream.
*/
writer = (result: any): string => {
// This checks for error instances.
// The writer gets called immediately by the internal `this.repl.eval`
// in case of errors.
if (result && result.message && typeof result.stack === 'string') {
this.bus.emit('mongosh:error', result);
this.ShellEvaluator.revertState();
return formatOutput({ type: 'Error', value: result });
}
return formatOutput(result);
};
/**
* The greeting for the shell.
*/
greet(): void {
console.log(`Using MongoDB: ${this.buildInfo.version}`);
console.log(`${MONGOSH_WIKI}`);
if (!this.disableGreetingMessage) console.log(TELEMETRY);
}
/**
* Is the password missing from the options?
*
* @param {NodeOptions} driverOptions - The driver options.
*
* @returns {boolean} If the password is missing.
*/
isPasswordMissing(driverOptions: NodeOptions): boolean {
return driverOptions.auth &&
driverOptions.auth.user &&
!driverOptions.auth.password;
}
/**
* Require the user to enter a password.
*
* @param {string} driverUrl - The driver URI.
* @param {NodeOptions} driverOptions - The driver options.
*/
requirePassword(driverUri: string, driverOptions: NodeOptions): void {
const readOptions = {
prompt: 'Enter password: ',
silent: true,
replace: '*'
};
read(readOptions, (error, password) => {
if (error) {
this.bus.emit('mongosh:error', error);
return console.log(formatError(error));
}
driverOptions.auth.password = password;
this.setupRepl(driverUri, driverOptions).catch((e) => {
this.bus.emit('mongosh:error', e);
console.log(formatError(e));
return;
});
});
}
}
export default CliRepl;
|
MaBeuLux88/mongosh
|
packages/shell-evaluator/src/shell-evaluator.spec.ts
|
<gh_stars>0
import chai from 'chai';
import sinon from 'sinon';
import sinonChai from 'sinon-chai';
chai.use(sinonChai);
const { expect } = chai;
import ShellEvaluator from './shell-evaluator';
import { EventEmitter } from 'events';
describe('ShellEvaluator', () => {
let shellEvaluator: ShellEvaluator;
let serviceProviderMock;
let containerMock;
let busMock;
beforeEach(() => {
serviceProviderMock = {} as any;
busMock = new EventEmitter();
containerMock = { toggleTelemetry: sinon.spy() };
shellEvaluator = new ShellEvaluator(
serviceProviderMock,
busMock,
containerMock
);
});
describe('setCtx', () => {
let ctx;
beforeEach(() => {
ctx = {};
shellEvaluator.setCtx(ctx);
});
it('sets shell api globals', () => {
expect(ctx).to.include.all.keys('it', 'help', 'show', 'use');
});
it('sets db', () => {
expect(ctx.db.constructor.name).to.equal('Database');
});
it('sets the object as context for the mapper', () => {
expect((shellEvaluator as any).mapper.context).to.equal(ctx);
});
});
describe('customEval', () => {
it('strips trailing spaces and ; before calling commands', async() => {
const use = sinon.spy();
(shellEvaluator as any).mapper.use = use;
await shellEvaluator.customEval(null, 'use somedb; ', {}, '');
expect(use).to.have.been.calledWith('somedb');
});
it('calls original eval for plain javascript', async() => {
const originalEval = sinon.spy();
await shellEvaluator.customEval(originalEval, 'doSomething();', {}, '');
expect(originalEval).to.have.been.calledWith(
'doSomething();',
{},
''
);
});
it('reverts state if error thrown', async() => {
const originalEval = (): any => { throw new Error(); };
const revertSpy = sinon.spy();
const saveSpy = sinon.spy();
shellEvaluator.revertState = revertSpy;
shellEvaluator.saveState = saveSpy;
try {
await shellEvaluator.customEval(originalEval, 'anything()', {}, '');
// eslint-disable-next-line no-empty
} catch (e) {
}
expect(revertSpy.calledOnce).to.be.true;
expect(saveSpy.calledOnce).to.be.true;
});
it('does not revert state with no error', async() => {
const originalEval = (): any => { return 1; };
const revertSpy = sinon.spy();
const saveSpy = sinon.spy();
shellEvaluator.revertState = revertSpy;
shellEvaluator.saveState = saveSpy;
await shellEvaluator.customEval(originalEval, 'anything()', {}, '');
expect(revertSpy.calledOnce).to.be.false;
expect(saveSpy.calledOnce).to.be.true;
});
});
});
|
MaBeuLux88/mongosh
|
packages/shell-api/src/command-result.spec.ts
|
<gh_stars>0
import { CommandResult } from './shell-api';
import { expect } from 'chai';
describe('CommandResult', () => {
describe('#shellApiType', () => {
it('returns the type', () => {
const commandResult = new CommandResult('ResultType', 'value');
expect(commandResult.shellApiType()).to.equal('ResultType');
});
});
describe('#toReplString', () => {
it('returns the value', () => {
const commandResult = new CommandResult('ResultType', 'value');
expect(commandResult.toReplString()).to.equal('value');
});
});
});
|
MaBeuLux88/mongosh
|
packages/service-provider-browser/src/unsupported-cursor.ts
|
<gh_stars>0
import { Document, Cursor } from '@mongosh/service-provider-core';
/**
* Defines a cursor for an unsupported operation.
*/
class UnsupportedCursor implements Cursor {
private readonly message: string;
/**
* Create the unsupported cursor with a rejection message.
*
* @param {String} message - The message.
*/
constructor(message: string) {
this.message = message;
}
addOption(option: number): Cursor {
throw new Error("Method not implemented.");
}
allowPartialResults(): Cursor {
throw new Error("Method not implemented.");
}
batchSize(size: number): Cursor {
throw new Error("Method not implemented.");
}
close(options: Document): Promise<void> {
throw new Error("Method not implemented.");
}
isClosed(): boolean {
throw new Error("Method not implemented.");
}
collation(spec: Document): Cursor {
throw new Error("Method not implemented.");
}
comment(cmt: string): Cursor {
throw new Error("Method not implemented.");
}
count(): Promise<number> {
throw new Error("Method not implemented.");
}
forEach(f: any): Promise<void> {
throw new Error("Method not implemented.");
}
hasNext(): Promise<boolean> {
throw new Error("Method not implemented.");
}
hint(index: string): Cursor {
throw new Error("Method not implemented.");
}
isExhausted(): Promise<boolean> {
throw new Error("Method not implemented.");
}
itcount(): Promise<number> {
throw new Error("Method not implemented.");
}
limit(value: number): Cursor {
throw new Error("Method not implemented.");
}
map(f: any): Cursor {
throw new Error("Method not implemented.");
}
max(indexBounds: Document): Cursor {
throw new Error("Method not implemented.");
}
maxTimeMS(value: number): Cursor {
throw new Error("Method not implemented.");
}
min(indexBounds: Document): Cursor {
throw new Error("Method not implemented.");
}
next(): Promise<any> {
throw new Error("Method not implemented.");
}
noCursorTimeout(): Cursor {
throw new Error("Method not implemented.");
}
oplogReplay(): Cursor {
throw new Error("Method not implemented.");
}
projection(spec: Document): Cursor {
throw new Error("Method not implemented.");
}
readPref(preference: string, tagSet?: Document[]): Cursor {
throw new Error("Method not implemented.");
}
returnKey(enabled: boolean): Cursor {
throw new Error("Method not implemented.");
}
size(): Promise<number> {
throw new Error("Method not implemented.");
}
skip(value: number): Cursor {
throw new Error("Method not implemented.");
}
sort(spec: Document): Cursor {
throw new Error("Method not implemented.");
}
tailable(): Cursor {
throw new Error("Method not implemented.");
}
explain(verbosity: string): Promise<any> {
throw new Error("Method not implemented.");
}
/**
* When the cursor is for an unsupported operation,
* this method will reject.
*
* @returns {Promise} The rejected promise.
*/
async toArray(): Promise<Document[]> {
throw new Error(this.message);
}
}
export default UnsupportedCursor;
|
MaBeuLux88/mongosh
|
packages/build/src/compile-exec.spec.ts
|
import path from 'path';
import os from 'os';
import fs from 'fs';
import { expect } from 'chai';
import compileExec, {
ExecName,
Target,
determineExecName,
determineTarget,
executablePath
} from './compile-exec';
import Platform from './platform';
describe('compile module', () => {
describe('ExecName', () => {
describe('ExecName.Windows', () => {
it('returns mongosh.exe', () => {
expect(ExecName.Windows).to.equal('mongosh.exe');
});
});
describe('ExecName.Posix', () => {
it('returns mongosh', () => {
expect(ExecName.Posix).to.equal('mongosh');
});
});
});
describe('Target', () => {
describe('Target.Windows', () => {
it('returns win', () => {
expect(Target.Windows).to.equal('win');
});
});
describe('Target.MacOs', () => {
it('returns macos', () => {
expect(Target.MacOs).to.equal('macos');
});
});
describe('Target.Linux', () => {
it('returns linux', () => {
expect(Target.Linux).to.equal('linux');
});
});
});
describe('.determineExecName', () => {
context('when the platform is windows', () => {
it('returns mongosh.exe', () => {
expect(determineExecName(Platform.Windows)).to.equal(ExecName.Windows);
});
});
context('when the platform is not windows', () => {
it('returns mongosh', () => {
expect(determineExecName(Platform.Linux)).to.equal(ExecName.Posix);
});
});
});
describe('.determineTarget', () => {
context('when the platform is windows', () => {
it('returns win', () => {
expect(determineTarget(Platform.Windows)).to.equal(Target.Windows);
});
});
context('when the platform is macos', () => {
it('returns macos', () => {
expect(determineTarget(Platform.MacOs)).to.equal(Target.MacOs);
});
});
context('when the platform is linux', () => {
it('returns linux', () => {
expect(determineTarget(Platform.Linux)).to.equal(Target.Linux);
});
});
});
describe('.executablePath', () => {
context('when the platform is windows', () => {
it('returns the path', () => {
expect(executablePath('', Platform.Windows)).to.equal('mongosh.exe');
});
});
context('when the platform is macos', () => {
it('returns the path', () => {
expect(executablePath('', Platform.MacOs)).to.equal('mongosh');
});
});
context('when the platform is linux', () => {
it('returns the path', () => {
expect(executablePath('', Platform.Linux)).to.equal('mongosh');
});
});
});
describe('.compileExec', () => {
const platform = os.platform();
const expectedExecutable = executablePath(__dirname, platform);
const inputFile = path.join(__dirname, '..', 'examples', 'input.js');
before(() => {
return compileExec(inputFile, __dirname, platform);
});
after((done) => {
fs.unlink(expectedExecutable, done);
});
it('builds the executable', (done) => {
fs.stat(expectedExecutable, (error, stats) => {
expect(error).to.equal(null);
expect(stats.size).to.be.above(0);
done();
});
});
});
});
|
avgust13/Account-Manager
|
src/renderer/utils/toast.tsx
|
import React, {ReactNode} from 'react';
import {toast} from 'react-toastify';
import Toast, {ToastType} from '@renderer/components/Toast';
export const displayErrorToast = (error: any) => {
let errorStr: string;
if (typeof error === 'string') {
errorStr = error;
} else if (error?.response?.data) {
errorStr = JSON.stringify(error.response.data);
} else if (error?.message) {
errorStr = error.message;
} else {
errorStr = JSON.stringify(error);
}
displayToast(errorStr);
};
export const displayToast = (message: ReactNode, type: ToastType = 'warning', className?: string): void => {
toast(
<Toast className={className} type={type}>
{message}
</Toast>,
);
};
|
avgust13/Account-Manager
|
src/renderer/components/Tiles/TileSigningKey/index.tsx
|
import React, {FC, useEffect} from 'react';
import clsx from 'clsx';
import {useBooleanState} from '@renderer/hooks';
import {getCustomClassNames} from '@renderer/utils/components';
import Tile from '../Tile';
import './TileSigningKey.scss';
interface ComponentProps {
accountNumber: string;
className?: string;
loading: boolean;
signingKey: string;
}
const TileSigningKey: FC<ComponentProps> = ({accountNumber, className, loading, signingKey}) => {
const [signingKeyVisible, toggleSigningKeyVisible, , setSigningKeyInvisible] = useBooleanState(false);
useEffect(() => {
setSigningKeyInvisible();
}, [accountNumber, setSigningKeyInvisible]);
const renderSigningKeyDisplay = () => {
if (loading) return '-';
return <div>{signingKeyVisible ? signingKey : '*'.repeat(64)}</div>;
};
return (
<Tile className={clsx('TileSigningKey', className)}>
<>
<div className={clsx('TileAccountNumber__top', {...getCustomClassNames(className, '__top', true)})}>
<div className={clsx('TileSigningKey__title', {...getCustomClassNames(className, '__title', true)})}>
My Signing Key
</div>
<div
className={clsx('TileSigningKey__toggle', {...getCustomClassNames(className, '__title', true)})}
onClick={toggleSigningKeyVisible}
>
{signingKeyVisible ? 'Hide' : 'Show'}
</div>
</div>
<div
className={clsx('TileSigningKey__signing-key', {...getCustomClassNames(className, '__signing-key', true)})}
>
{renderSigningKeyDisplay()}
</div>
</>
</Tile>
);
};
export default TileSigningKey;
|
avgust13/Account-Manager
|
src/renderer/components/PageTabs/index.tsx
|
<gh_stars>100-1000
import React, {FC} from 'react';
import {NavLink} from 'react-router-dom';
import clsx from 'clsx';
import './PageTabs.scss';
interface Item {
name: string;
page: string;
}
interface ComponentProps {
baseUrl: string;
breakpoint?: 'small' | 'large';
items: Item[];
}
const PageTabs: FC<ComponentProps> = ({baseUrl, breakpoint = 'small', items}) => {
return (
<div
className={clsx('PageTabs', {
'PageTabs--large': breakpoint === 'large',
'PageTabs--small': breakpoint === 'small',
})}
>
{items.map(({name, page}) => (
<NavLink activeClassName="PageTabs__tab--active" className="PageTabs__tab" key={page} to={`${baseUrl}/${page}`}>
<div className="PageTabs__tab-name">{name}</div>
<div className="PageTabs__tab-indicator" />
</NavLink>
))}
</div>
);
};
export default PageTabs;
|
avgust13/Account-Manager
|
src/renderer/utils/accounts.ts
|
<filename>src/renderer/utils/accounts.ts
import {sign} from 'tweetnacl';
import {getKeyPairDetails} from '@renderer/utils/signing';
export const generateAccount = () => {
const keyPair = sign.keyPair();
return getKeyPairDetails(keyPair);
};
|
avgust13/Account-Manager
|
src/renderer/selectors/validators.ts
|
<reponame>avgust13/Account-Manager<filename>src/renderer/selectors/validators.ts
import {createCachedSelector} from 're-reselect';
import {RootState} from '@renderer/types';
import {formatAddressFromNode} from '@renderer/utils/address';
import {getActivePrimaryValidator} from './app';
import {getManagedValidators} from './state';
import {getNthArg} from './utils';
export const getIsActivePrimaryValidator: (state: RootState, address: string) => boolean = createCachedSelector(
[getActivePrimaryValidator, getNthArg(1)],
(activePrimaryValidator, address: string) =>
activePrimaryValidator ? formatAddressFromNode(activePrimaryValidator) === address : false,
)(getNthArg(1));
export const getIsManagedValidator: (state: RootState, address: string) => boolean = createCachedSelector(
[getManagedValidators, getNthArg(1)],
(managedValidators, address: string) => !!managedValidators[address],
)(getNthArg(1));
|
avgust13/Account-Manager
|
src/renderer/types/declarations/yup.d.ts
|
<gh_stars>1-10
import {NumberSchema, NumberSchemaConstructor, Ref, StringSchema, StringSchemaConstructor} from 'yup';
declare module 'yup' {
interface NumberSchema {
callbackWithRef(ref: any, cb: (thisValue: number, refValue: any) => boolean, message: string): NumberSchema;
}
interface StringSchema {
equalTo(ref: Ref, message?: string): StringSchema;
notEqualTo(ref: Ref, message?: string): StringSchema;
}
}
export const number: NumberSchemaConstructor;
export const string: StringSchemaConstructor;
|
avgust13/Account-Manager
|
src/renderer/store/app/index.ts
|
<gh_stars>1-10
import {combineReducers} from '@reduxjs/toolkit';
import managedAccounts, {
clearManagedAccounts,
setManagedAccount,
setManagedAccountBalance,
unsetManagedAccount,
} from './managedAccounts';
import managedBanks, {clearManagedBanks, setManagedBank, unsetActiveBank, unsetManagedBank} from './managedBanks';
import managedFriends, {clearManagedFriends, setManagedFriend, unsetManagedFriend} from './managedFriends';
import managedValidators, {
clearManagedValidators,
setManagedValidator,
unsetActivePrimaryValidator,
unsetManagedValidator,
} from './managedValidators';
export {
clearManagedAccounts,
clearManagedBanks,
clearManagedFriends,
clearManagedValidators,
setManagedAccount,
setManagedAccountBalance,
setManagedBank,
setManagedFriend,
setManagedValidator,
unsetActiveBank,
unsetActivePrimaryValidator,
unsetManagedAccount,
unsetManagedBank,
unsetManagedFriend,
unsetManagedValidator,
};
const appReducers = combineReducers({
managedAccounts: managedAccounts.reducer,
managedBanks: managedBanks.reducer,
managedFriends: managedFriends.reducer,
managedValidators: managedValidators.reducer,
});
export default appReducers;
|
avgust13/Account-Manager
|
src/renderer/components/NodeLink/index.tsx
|
<reponame>avgust13/Account-Manager<filename>src/renderer/components/NodeLink/index.tsx<gh_stars>100-1000
import React, {FC, memo} from 'react';
import {NavLink} from 'react-router-dom';
import {AddressData} from '@renderer/types';
import {formatPathFromNode} from '@renderer/utils/address';
import './NodeLink.scss';
interface ComponentProps {
node: AddressData;
urlBase: 'bank' | 'validator';
}
const NodeLink: FC<ComponentProps> = ({node, urlBase}) => {
return (
<NavLink className="NodeLink" to={`/${urlBase}/${formatPathFromNode(node)}/overview`}>
{node.ip_address}
</NavLink>
);
};
export default memo(NodeLink);
|
avgust13/Account-Manager
|
src/renderer/containers/Friend/AddFriendModal/index.tsx
|
<reponame>avgust13/Account-Manager
import React, {FC, useMemo} from 'react';
import {useDispatch, useSelector} from 'react-redux';
import {useHistory} from 'react-router-dom';
import {FormInput, FormTextArea} from '@renderer/components/FormComponents';
import Modal from '@renderer/components/Modal';
import {getManagedFriends} from '@renderer/selectors';
import {setManagedFriend} from '@renderer/store/app';
import {AppDispatch} from '@renderer/types';
import yup from '@renderer/utils/yup';
import './AddFriendModal.scss';
const initialValues = {
accountNumber: '',
nickname: '',
};
type FormValues = typeof initialValues;
interface ComponentProps {
close(): void;
}
const AddFriendModal: FC<ComponentProps> = ({close}) => {
const dispatch = useDispatch<AppDispatch>();
const history = useHistory();
const managedFriends = useSelector(getManagedFriends);
const managedAccountNumbers = useMemo(
() =>
Object.values(managedFriends)
.filter(({account_number}) => !!account_number)
.map(({account_number}) => account_number),
[managedFriends],
);
const managedFriendNicknames = useMemo(
() =>
Object.values(managedFriends)
.filter(({nickname}) => !!nickname)
.map(({nickname}) => nickname),
[managedFriends],
);
const handleSubmit = ({accountNumber, nickname}: FormValues): void => {
dispatch(
setManagedFriend({
account_number: accountNumber,
nickname,
}),
);
history.push(`/friend/${accountNumber}/overview`);
close();
};
const validationSchema = useMemo(() => {
return yup.object().shape({
accountNumber: yup
.string()
.length(64, 'Account number must be 64 characters long')
.required('This field is required')
.notOneOf(managedAccountNumbers, 'This friend already exists'),
nickname: yup.string().notOneOf(managedFriendNicknames, 'That nickname is already taken'),
});
}, [managedAccountNumbers, managedFriendNicknames]);
return (
<Modal
className="AddFriendModal"
close={close}
header="Add Friend"
ignoreDirty
initialValues={initialValues}
onSubmit={handleSubmit}
submitButton="Add"
validationSchema={validationSchema}
>
<FormInput focused label="Nickname" name="nickname" />
<FormTextArea label="Account Number" name="accountNumber" required />
</Modal>
);
};
export default AddFriendModal;
|
avgust13/Account-Manager
|
src/renderer/containers/Bank/BankTransactions/index.tsx
|
<reponame>avgust13/Account-Manager
import React, {FC, useMemo} from 'react';
import AccountLink from '@renderer/components/AccountLink';
import PageTable, {PageTableData, PageTableItems} from '@renderer/components/PageTable';
import Pagination from '@renderer/components/Pagination';
import {BANK_BANK_TRANSACTIONS} from '@renderer/constants';
import {useAddress, usePaginatedNetworkDataFetcher} from '@renderer/hooks';
import {BankTransaction} from '@renderer/types';
enum TableKeys {
id,
block,
sender,
recipient,
amount,
}
const BankTransactions: FC = () => {
const address = useAddress();
const {
count,
currentPage,
loading,
results: bankBankTransactions,
setPage,
totalPages,
} = usePaginatedNetworkDataFetcher<BankTransaction>(BANK_BANK_TRANSACTIONS, address);
const bankBankTransactionsTableData = useMemo<PageTableData[]>(
() =>
bankBankTransactions.map((bankTransaction) => ({
key: bankTransaction.id,
[TableKeys.amount]: bankTransaction.amount,
[TableKeys.block]: bankTransaction.block.id,
[TableKeys.id]: bankTransaction.id,
[TableKeys.recipient]: <AccountLink accountNumber={bankTransaction.recipient} />,
[TableKeys.sender]: <AccountLink accountNumber={bankTransaction.block.sender} />,
})) || [],
[bankBankTransactions],
);
const pageTableItems = useMemo<PageTableItems>(
() => ({
data: bankBankTransactionsTableData,
headers: {
[TableKeys.amount]: 'Amount',
[TableKeys.block]: 'Block',
[TableKeys.id]: 'ID',
[TableKeys.recipient]: 'Recipient',
[TableKeys.sender]: 'Sender',
},
orderedKeys: [TableKeys.id, TableKeys.block, TableKeys.sender, TableKeys.recipient, TableKeys.amount],
}),
[bankBankTransactionsTableData],
);
return (
<div className="BankTransactions">
<PageTable count={count} currentPage={currentPage} items={pageTableItems} loading={loading} />
<Pagination currentPage={currentPage} setPage={setPage} totalPages={totalPages} />
</div>
);
};
export default BankTransactions;
|
avgust13/Account-Manager
|
src/renderer/containers/Account/CreateAccountModal/CreateAccountModalFields.tsx
|
import React, {FC, useEffect} from 'react';
import {FormInput, FormRadioGroup, FormTextArea} from '@renderer/components/FormComponents';
import {useFormContext} from '@renderer/hooks';
export const initialValues = {
nickname: '',
signingKey: '',
type: 'create',
};
export type FormValues = typeof initialValues;
interface ComponentProps {
setIsCreatingNewAccount(val: boolean): void;
}
const CreateAccountModalFields: FC<ComponentProps> = ({setIsCreatingNewAccount}) => {
const {
values: {type},
} = useFormContext<FormValues>();
const renderSigningKey = type === 'add';
useEffect(() => {
if (type === 'add') {
setIsCreatingNewAccount(false);
} else {
setIsCreatingNewAccount(true);
}
}, [setIsCreatingNewAccount, type]);
return (
<>
<FormRadioGroup
focused
options={[
{label: 'Create New Account', value: 'create'},
{label: 'Add Existing Account', value: 'add'},
]}
name="type"
/>
<FormInput label="Nickname" name="nickname" />
{renderSigningKey && <FormTextArea label="Signing Key" name="signingKey" required />}
</>
);
};
export default CreateAccountModalFields;
|
avgust13/Account-Manager
|
src/renderer/containers/Bank/BankConfirmationBlocks/index.tsx
|
import React, {FC, useMemo} from 'react';
import PageTable, {PageTableData, PageTableItems} from '@renderer/components/PageTable';
import Pagination from '@renderer/components/Pagination';
import {BANK_CONFIRMATION_BLOCKS} from '@renderer/constants';
import {useAddress, usePaginatedNetworkDataFetcher} from '@renderer/hooks';
import {BankConfirmationBlock} from '@renderer/types';
enum TableKeys {
id,
block,
blockIdentifier,
validator,
}
const BankConfirmationBlocks: FC = () => {
const address = useAddress();
const {
count,
currentPage,
loading,
results: bankConfirmationBlocks,
setPage,
totalPages,
} = usePaginatedNetworkDataFetcher<BankConfirmationBlock>(BANK_CONFIRMATION_BLOCKS, address);
const bankConfirmationBlocksTableData = useMemo<PageTableData[]>(
() =>
bankConfirmationBlocks.map((confirmationBlock) => ({
key: confirmationBlock.id,
[TableKeys.blockIdentifier]: confirmationBlock.block_identifier,
[TableKeys.block]: confirmationBlock.block,
[TableKeys.id]: confirmationBlock.id,
[TableKeys.validator]: confirmationBlock.validator,
})) || [],
[bankConfirmationBlocks],
);
const pageTableItems = useMemo<PageTableItems>(
() => ({
data: bankConfirmationBlocksTableData,
headers: {
[TableKeys.blockIdentifier]: 'Block Identifier',
[TableKeys.block]: 'Block',
[TableKeys.id]: 'ID',
[TableKeys.validator]: 'Validator',
},
orderedKeys: [TableKeys.id, TableKeys.block, TableKeys.blockIdentifier, TableKeys.validator],
}),
[bankConfirmationBlocksTableData],
);
return (
<div className="BankConfirmationBlocks">
<PageTable count={count} currentPage={currentPage} items={pageTableItems} loading={loading} />
<Pagination currentPage={currentPage} setPage={setPage} totalPages={totalPages} />
</div>
);
};
export default BankConfirmationBlocks;
|
avgust13/Account-Manager
|
src/renderer/containers/Validator/ValidatorOverview/index.tsx
|
import React, {FC} from 'react';
import A from '@renderer/components/A';
import DetailPanel from '@renderer/components/DetailPanel';
import {Loader} from '@renderer/components/FormElements';
import {VALIDATOR_CONFIGS} from '@renderer/constants';
import {useNetworkConfigFetcher} from '@renderer/hooks';
import {ValidatorConfig} from '@renderer/types';
import './ValidatorOverview.scss';
const ValidatorOverview: FC = () => {
const {data: validatorConfig, loading} = useNetworkConfigFetcher<ValidatorConfig>(VALIDATOR_CONFIGS);
return (
<div className="ValidatorOverview">
{loading || !validatorConfig ? (
<Loader />
) : (
<DetailPanel
items={[
{
key: 'Account Number',
value: validatorConfig.account_number,
},
{
key: 'IP Address',
value: validatorConfig.ip_address,
},
{
key: 'Network ID',
value: validatorConfig.node_identifier,
},
{
key: 'Port',
value: validatorConfig.port || '-',
},
{
key: 'Protocol',
value: validatorConfig.protocol,
},
{
key: 'Version',
value: validatorConfig.version,
},
{
key: 'Tx Fee',
value: validatorConfig.default_transaction_fee,
},
{
key: 'Daily Rate',
value: validatorConfig.daily_confirmation_rate || '-',
},
{
key: 'Root Account File',
value: (
<A className="ValidatorOverview__link" href={validatorConfig.root_account_file}>
{validatorConfig.root_account_file}
</A>
),
},
{
key: 'Root Account File Hash',
value: validatorConfig.root_account_file_hash,
},
{
key: 'Seed Block Identifier',
value: validatorConfig.seed_block_identifier || '-',
},
{
key: 'Node Type',
value: validatorConfig.node_type,
},
]}
title="Validator Information"
/>
)}
</div>
);
};
export default ValidatorOverview;
|
avgust13/Account-Manager
|
src/renderer/store/validators/index.ts
|
<filename>src/renderer/store/validators/index.ts
import {combineReducers} from '@reduxjs/toolkit';
import {ACCOUNTS, BANKS, CONFIGS, VALIDATORS} from '@renderer/constants';
import validatorAccounts, {
setValidatorAccounts,
setValidatorAccountsError,
unsetValidatorAccounts,
} from './validatorAccounts';
import validatorBanks, {setValidatorBanks, setValidatorBanksError, unsetValidatorBanks} from './validatorBanks';
import validatorConfigs, {setValidatorConfig, setValidatorConfigError} from './validatorConfigs';
import validatorValidators, {
setValidatorValidators,
setValidatorValidatorsError,
unsetValidatorValidators,
} from './validatorValidators';
export {
setValidatorAccounts,
setValidatorAccountsError,
setValidatorBanks,
setValidatorBanksError,
setValidatorConfig,
setValidatorConfigError,
setValidatorValidators,
setValidatorValidatorsError,
unsetValidatorAccounts,
unsetValidatorBanks,
unsetValidatorValidators,
};
const validatorReducers = combineReducers({
[ACCOUNTS]: validatorAccounts.reducer,
[BANKS]: validatorBanks.reducer,
[CONFIGS]: validatorConfigs.reducer,
[VALIDATORS]: validatorValidators.reducer,
});
export default validatorReducers;
|
avgust13/Account-Manager
|
src/renderer/selectors/banks.ts
|
<filename>src/renderer/selectors/banks.ts
import {createCachedSelector} from 're-reselect';
import {getNthArg} from '@renderer/selectors/utils';
import {RootState} from '@renderer/types';
import {formatAddressFromNode} from '@renderer/utils/address';
import {getActiveBank} from './app';
import {getManagedBanks} from './state';
export const getIsActiveBank: (state: RootState, address: string) => boolean = createCachedSelector(
[getActiveBank, getNthArg(1)],
(activeBank, address: string) => (activeBank ? formatAddressFromNode(activeBank) === address : false),
)(getNthArg(1));
export const getIsManagedBank: (state: RootState, address: string) => boolean = createCachedSelector(
[getManagedBanks, getNthArg(1)],
(managedBanks, address: string) => !!managedBanks[address],
)(getNthArg(1));
|
avgust13/Account-Manager
|
src/renderer/components/PageTable/index.tsx
|
<gh_stars>1-10
import React, {FC, ReactNode, useState} from 'react';
import clsx from 'clsx';
import ArrowToggle from '@renderer/components/ArrowToggle';
import Loader from '@renderer/components/FormElements/Loader';
import PaginationSummary from '@renderer/components/PaginationSummary';
import {getCustomClassNames} from '@renderer/utils/components';
import './PageTable.scss';
interface Header {
[tableKey: string]: string;
}
export interface PageTableData {
key: string;
[tableKey: string]: ReactNode;
}
export interface PageTableItems {
orderedKeys: number[];
headers: Header;
data: PageTableData[];
}
interface ComponentProps {
className?: string;
count: number;
currentPage: number;
items: PageTableItems;
loading: boolean;
}
const PageTable: FC<ComponentProps> = ({className, count, currentPage, items, loading}) => {
const {headers, data, orderedKeys} = items;
const [expanded, setExpanded] = useState<number[]>([]);
const toggleExpanded = (indexToToggle: number) => (): void => {
setExpanded(
expanded.includes(indexToToggle) ? expanded.filter((i) => i !== indexToToggle) : [...expanded, indexToToggle],
);
};
const renderRows = (): ReactNode => {
return data.map((item, dataIndex) => {
const rowIsExpanded = expanded.includes(dataIndex);
return (
<tr
className={clsx('PageTable__row', {
'PageTable__row--expanded': rowIsExpanded,
...getCustomClassNames(className, '__row', true),
...getCustomClassNames(className, '__row--expanded', rowIsExpanded),
})}
key={item.key}
>
<td>
<ArrowToggle
className={clsx('PageTable__ArrowToggle', {...getCustomClassNames(className, '__ArrowToggle', true)})}
expanded={rowIsExpanded}
onClick={toggleExpanded(dataIndex)}
/>
</td>
{orderedKeys.map((key) => (
<td key={key}>{item[key] || '-'}</td>
))}
</tr>
);
});
};
return loading ? (
<Loader />
) : (
<>
<PaginationSummary className="PageTable__PaginationSummary" count={count} currentPage={currentPage} />
<table className={clsx('PageTable', className)}>
<thead className={clsx('PageTable__thead', {...getCustomClassNames(className, '__thead', true)})}>
<tr>
<th />
{orderedKeys.map((key) => (
<td key={key}>{headers[key]}</td>
))}
</tr>
</thead>
<tbody>{renderRows()}</tbody>
</table>
</>
);
};
export default PageTable;
|
avgust13/Account-Manager
|
src/renderer/components/AccountLink/index.tsx
|
<gh_stars>1-10
import React, {FC, memo} from 'react';
import {NavLink} from 'react-router-dom';
import './AccountLink.scss';
interface ComponentProps {
accountNumber: string;
}
const AccountLink: FC<ComponentProps> = ({accountNumber}) => {
return (
<NavLink className="AccountLink" to={`/account/${accountNumber}/overview`}>
{accountNumber}
</NavLink>
);
};
export default memo(AccountLink);
|
avgust13/Account-Manager
|
src/renderer/components/Tiles/Tile/index.tsx
|
import React, {FC, ReactNode} from 'react';
import clsx from 'clsx';
import './Tile.scss';
interface ComponentProps {
children: ReactNode;
className?: string;
}
const Tile: FC<ComponentProps> = ({children, className}) => {
return <div className={clsx('Tile', className)}>{children}</div>;
};
export default Tile;
|
avgust13/Account-Manager
|
src/renderer/containers/ChangeActiveBankModal/index.tsx
|
import React, {FC, useState} from 'react';
import {useDispatch} from 'react-redux';
import {useHistory} from 'react-router-dom';
import {connectAndStoreLocalData} from '@renderer/dispatchers/app';
import {FormInput, FormSelect} from '@renderer/components/FormComponents';
import Modal from '@renderer/components/Modal';
import {AppDispatch, InputOption, ProtocolType} from '@renderer/types';
import {formatPathFromNode} from '@renderer/utils/address';
import {displayErrorToast, displayToast} from '@renderer/utils/toast';
import yup from '@renderer/utils/yup';
const initialValues = {
ipAddress: '',
nickname: '',
port: '',
protocol: 'http' as ProtocolType,
};
type FormValues = typeof initialValues;
const protocolOptions: InputOption[] = [{value: 'http'}, {value: 'https'}];
const genericIpAddressRegex = /([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}|(\d{1,3}\.){3}\d{1,3}/;
const validationSchema = yup.object().shape({
ipAddress: yup
.string()
.required('This field is required')
.matches(genericIpAddressRegex, {excludeEmptyString: true, message: 'IPv4 or IPv6 addresses only'}),
nickname: yup.string(),
port: yup.number().integer(),
protocol: yup.string().required(),
});
interface ComponentProps {
close(): void;
}
const ChangeActiveBankModal: FC<ComponentProps> = ({close}) => {
const [submitting, setSubmitting] = useState<boolean>(false);
const dispatch = useDispatch<AppDispatch>();
const history = useHistory();
const handleSubmit = async ({ipAddress, nickname, port, protocol}: FormValues): Promise<void> => {
try {
setSubmitting(true);
const bankAddressData = {
ip_address: ipAddress,
port: port ? parseInt(port, 10) : null,
protocol,
};
const response = await dispatch(connectAndStoreLocalData(bankAddressData, nickname));
if (response?.error) {
displayErrorToast(response.error);
setSubmitting(false);
return;
}
if (response?.bankConfig) {
history.push(`/bank/${formatPathFromNode(response.bankConfig)}/overview`);
}
close();
} catch (error) {
displayToast('An error occurred');
setSubmitting(false);
}
};
return (
<Modal
className="ChangeActiveBankModal"
close={close}
header="Change Active Bank"
initialValues={initialValues}
onSubmit={handleSubmit}
submitButton="Connect"
submitting={submitting}
validationSchema={validationSchema}
>
<FormSelect focused label="Protocol" name="protocol" options={protocolOptions} required searchable={false} />
<FormInput label="IP Address" name="ipAddress" required />
<FormInput label="Port" name="port" type="number" />
<FormInput label="Nickname" name="nickname" />
</Modal>
);
};
export default ChangeActiveBankModal;
|
avgust13/Account-Manager
|
src/renderer/containers/Validator/EditValidatorNicknameModal/index.tsx
|
<reponame>avgust13/Account-Manager<filename>src/renderer/containers/Validator/EditValidatorNicknameModal/index.tsx
import React, {FC} from 'react';
import {useDispatch} from 'react-redux';
import {FormInput} from '@renderer/components/FormComponents';
import Modal from '@renderer/components/Modal';
import {setManagedValidator} from '@renderer/store/app';
import {AppDispatch, ManagedNode} from '@renderer/types';
interface ComponentProps {
close(): void;
validator: ManagedNode;
}
const EditValidatorNicknameModal: FC<ComponentProps> = ({close, validator}) => {
const dispatch = useDispatch<AppDispatch>();
const initialValues = {
nickname: validator.nickname,
};
type FormValues = typeof initialValues;
const handleSubmit = ({nickname}: FormValues): void => {
dispatch(
setManagedValidator({
...validator,
nickname,
}),
);
close();
};
return (
<Modal
className="EditValidatorNicknameModal"
close={close}
header="Edit Validator Nickname"
initialValues={initialValues}
onSubmit={handleSubmit}
submitButton="Save"
>
<FormInput focused label="Validator Nickname" name="nickname" />
</Modal>
);
};
export default EditValidatorNicknameModal;
|
avgust13/Account-Manager
|
src/renderer/containers/Friend/DeleteFriendModal/index.tsx
|
import React, {FC} from 'react';
import {useDispatch} from 'react-redux';
import Modal from '@renderer/components/Modal';
import {unsetManagedFriend} from '@renderer/store/app';
import {AppDispatch, ManagedFriend} from '@renderer/types';
interface ComponentProps {
close(): void;
managedFriend: ManagedFriend;
}
const DeleteFriendModal: FC<ComponentProps> = ({close, managedFriend}) => {
const dispatch = useDispatch<AppDispatch>();
const handleSubmit = async (): Promise<void> => {
dispatch(unsetManagedFriend(managedFriend));
close();
};
return (
<Modal cancelButton="Cancel" close={close} header="Remove Friend" onSubmit={handleSubmit} submitButton="Yes">
Are you sure you want to remove your friend?
</Modal>
);
};
export default DeleteFriendModal;
|
avgust13/Account-Manager
|
src/renderer/types/notifications.ts
|
import {ValidatorConfirmationBlock} from './network';
export interface Notification {
notificationTime: number;
notificationType: string;
payload: ValidatorConfirmationBlock;
}
export enum NotificationType {
confirmationBlockNotification = 'CONFIRMATION_BLOCK_NOTIFICATION',
}
|
avgust13/Account-Manager
|
src/renderer/store/app/managedAccounts.ts
|
<reponame>avgust13/Account-Manager
import {createSlice, PayloadAction} from '@reduxjs/toolkit';
import {MANAGED_ACCOUNTS} from '@renderer/constants';
import localStore from '@renderer/store/localStore';
import {AccountNumber, Balance, Dict, ManagedAccount} from '@renderer/types';
import {
clearLocalAndStateReducer,
getStateName,
setLocalAndAccountReducer,
unsetLocalAndAccountReducer,
} from '@renderer/utils/store';
const managedAccounts = createSlice({
initialState: (localStore.get(getStateName(MANAGED_ACCOUNTS)) || {}) as Dict<ManagedAccount>,
name: MANAGED_ACCOUNTS,
reducers: {
clearManagedAccounts: clearLocalAndStateReducer(),
setManagedAccount: setLocalAndAccountReducer<ManagedAccount>(MANAGED_ACCOUNTS),
setManagedAccountBalance: (state, {payload}: PayloadAction<AccountNumber & Balance>) => {
const {account_number: accountNumber, balance} = payload;
state[accountNumber].balance = balance;
localStore.set(getStateName(MANAGED_ACCOUNTS), state);
},
unsetManagedAccount: unsetLocalAndAccountReducer(MANAGED_ACCOUNTS),
},
});
export const {
clearManagedAccounts,
setManagedAccount,
setManagedAccountBalance,
unsetManagedAccount,
} = managedAccounts.actions;
export default managedAccounts;
|
avgust13/Account-Manager
|
src/renderer/components/FormElements/index.tsx
|
<reponame>avgust13/Account-Manager<filename>src/renderer/components/FormElements/index.tsx
import Button, {BaseButtonProps} from './Button';
import Input, {BaseInputProps} from './Input';
import Loader from './Loader';
import Radio, {BaseRadioProps} from './Radio';
import Select, {BaseSelectProps} from './Select';
import SelectDetailed from './SelectDetailed';
import TextArea from './TextArea';
export {
BaseButtonProps,
BaseInputProps,
BaseRadioProps,
BaseSelectProps,
Button,
Input,
Loader,
Radio,
Select,
SelectDetailed,
TextArea,
};
|
avgust13/Account-Manager
|
src/renderer/store/app/managedValidators.ts
|
<gh_stars>1-10
import {createSlice} from '@reduxjs/toolkit';
import {MANAGED_VALIDATORS} from '@renderer/constants';
import localStore from '@renderer/store/localStore';
import {Dict, ManagedNode} from '@renderer/types';
import {
clearLocalAndStateReducer,
getStateName,
setLocalAndAddressReducer,
unsetActiveNodeReducer,
unsetLocalAndAddressReducer,
} from '@renderer/utils/store';
const managedValidators = createSlice({
initialState: (localStore.get(getStateName(MANAGED_VALIDATORS)) || {}) as Dict<ManagedNode>,
name: MANAGED_VALIDATORS,
reducers: {
clearManagedValidators: clearLocalAndStateReducer(),
setManagedValidator: setLocalAndAddressReducer<ManagedNode>(MANAGED_VALIDATORS),
unsetActivePrimaryValidator: unsetActiveNodeReducer(),
unsetManagedValidator: unsetLocalAndAddressReducer(MANAGED_VALIDATORS),
},
});
export const {
clearManagedValidators,
setManagedValidator,
unsetActivePrimaryValidator,
unsetManagedValidator,
} = managedValidators.actions;
export default managedValidators;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.