text stringlengths 9 39.2M | dir stringlengths 26 295 | lang stringclasses 185
values | created_date timestamp[us] | updated_date timestamp[us] | repo_name stringlengths 1 97 | repo_full_name stringlengths 7 106 | star int64 1k 183k | len_tokens int64 1 13.8M |
|---|---|---|---|---|---|---|---|---|
```xml
import { ComponentType, PropsWithChildren, ReactNode } from 'react';
import clsx from 'clsx';
import { Icon } from '@@/Icon';
interface Props {
icon?: ReactNode | ComponentType<unknown>;
label: string;
description?: ReactNode;
className?: string;
id?: string;
}
export function TableTitle({
icon,
label,
children,
description,
className,
id,
}: PropsWithChildren<Props>) {
return (
<>
<div className={clsx('toolBar flex-col', className)} id={id}>
<div className="flex w-full items-center gap-1 p-0">
<h2 className="toolBarTitle m-0 text-base">
{icon && (
<div className="widget-icon">
<Icon icon={icon} className="space-right" />
</div>
)}
{label}
</h2>
{children}
</div>
</div>
{!!description && <div className="toolBar !pt-0">{description}</div>}
</>
);
}
``` | /content/code_sandbox/app/react/components/datatables/TableTitle.tsx | xml | 2016-05-19T20:15:28 | 2024-08-16T19:15:14 | portainer | portainer/portainer | 30,083 | 234 |
```xml
import type { PathnameNormalizer } from './pathname-normalizer'
import { denormalizePagePath } from '../../../shared/lib/page-path/denormalize-page-path'
import { PrefixPathnameNormalizer } from './prefix'
import { SuffixPathnameNormalizer } from './suffix'
export class NextDataPathnameNormalizer implements PathnameNormalizer {
private readonly prefix: PrefixPathnameNormalizer
private readonly suffix = new SuffixPathnameNormalizer('.json')
constructor(buildID: string) {
if (!buildID) {
throw new Error('Invariant: buildID is required')
}
this.prefix = new PrefixPathnameNormalizer(`/_next/data/${buildID}`)
}
public match(pathname: string) {
return this.prefix.match(pathname) && this.suffix.match(pathname)
}
public normalize(pathname: string, matched?: boolean): string {
// If we're not matched and we don't match, we don't need to normalize.
if (!matched && !this.match(pathname)) return pathname
pathname = this.prefix.normalize(pathname, true)
pathname = this.suffix.normalize(pathname, true)
return denormalizePagePath(pathname)
}
}
``` | /content/code_sandbox/packages/next/src/server/normalizers/request/next-data.ts | xml | 2016-10-05T23:32:51 | 2024-08-16T19:44:30 | next.js | vercel/next.js | 124,056 | 261 |
```xml
///
///
///
/// path_to_url
///
/// Unless required by applicable law or agreed to in writing, software
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
///
import { Component, Injector } from '@angular/core';
import {
Datasource,
legendPositions,
legendPositionTranslationMap,
WidgetSettings,
WidgetSettingsComponent
} from '@shared/models/widget.models';
import { UntypedFormBuilder, UntypedFormGroup } from '@angular/forms';
import { Store } from '@ngrx/store';
import { AppState } from '@core/core.state';
import { formatValue, mergeDeep } from '@core/utils';
import { DateFormatProcessor, DateFormatSettings } from '@shared/models/widget-settings.models';
import {
barChartWithLabelsDefaultSettings, BarChartWithLabelsWidgetSettings
} from '@home/components/widget/lib/chart/bar-chart-with-labels-widget.models';
@Component({
selector: 'tb-bar-chart-with-labels-widget-settings',
templateUrl: './bar-chart-with-labels-widget-settings.component.html',
styleUrls: ['./../widget-settings.scss']
})
export class BarChartWithLabelsWidgetSettingsComponent extends WidgetSettingsComponent {
public get datasource(): Datasource {
const datasources: Datasource[] = this.widgetConfig.config.datasources;
if (datasources && datasources.length) {
return datasources[0];
} else {
return null;
}
}
legendPositions = legendPositions;
legendPositionTranslationMap = legendPositionTranslationMap;
barChartWidgetSettingsForm: UntypedFormGroup;
tooltipValuePreviewFn = this._tooltipValuePreviewFn.bind(this);
tooltipDatePreviewFn = this._tooltipDatePreviewFn.bind(this);
constructor(protected store: Store<AppState>,
private $injector: Injector,
private fb: UntypedFormBuilder) {
super(store);
}
protected settingsForm(): UntypedFormGroup {
return this.barChartWidgetSettingsForm;
}
protected defaultSettings(): WidgetSettings {
return mergeDeep<BarChartWithLabelsWidgetSettings>({} as BarChartWithLabelsWidgetSettings, barChartWithLabelsDefaultSettings);
}
protected onSettingsSet(settings: WidgetSettings) {
this.barChartWidgetSettingsForm = this.fb.group({
dataZoom: [settings.dataZoom, []],
showBarLabel: [settings.showBarLabel, []],
barLabelFont: [settings.barLabelFont, []],
barLabelColor: [settings.barLabelColor, []],
showBarValue: [settings.showBarValue, []],
barValueFont: [settings.barValueFont, []],
barValueColor: [settings.barValueColor, []],
showBarBorder: [settings.showBarBorder, []],
barBorderWidth: [settings.barBorderWidth, []],
barBorderRadius: [settings.barBorderRadius, []],
barBackgroundSettings: [settings.barBackgroundSettings, []],
noAggregationBarWidthSettings: [settings.noAggregationBarWidthSettings, []],
grid: [settings.grid, []],
yAxis: [settings.yAxis, []],
xAxis: [settings.xAxis, []],
thresholds: [settings.thresholds, []],
animation: [settings.animation, []],
showLegend: [settings.showLegend, []],
legendPosition: [settings.legendPosition, []],
legendLabelFont: [settings.legendLabelFont, []],
legendLabelColor: [settings.legendLabelColor, []],
showTooltip: [settings.showTooltip, []],
tooltipLabelFont: [settings.tooltipLabelFont, []],
tooltipLabelColor: [settings.tooltipLabelColor, []],
tooltipValueFont: [settings.tooltipValueFont, []],
tooltipValueColor: [settings.tooltipValueColor, []],
tooltipShowDate: [settings.tooltipShowDate, []],
tooltipDateFormat: [settings.tooltipDateFormat, []],
tooltipDateFont: [settings.tooltipDateFont, []],
tooltipDateColor: [settings.tooltipDateColor, []],
tooltipDateInterval: [settings.tooltipDateInterval, []],
tooltipBackgroundColor: [settings.tooltipBackgroundColor, []],
tooltipBackgroundBlur: [settings.tooltipBackgroundBlur, []],
background: [settings.background, []],
padding: [settings.padding, []]
});
}
protected validatorTriggers(): string[] {
return ['showBarLabel', 'showBarValue', 'showBarBorder', 'showLegend', 'showTooltip', 'tooltipShowDate'];
}
protected updateValidators(emitEvent: boolean) {
const showBarLabel: boolean = this.barChartWidgetSettingsForm.get('showBarLabel').value;
const showBarValue: boolean = this.barChartWidgetSettingsForm.get('showBarValue').value;
const showBarBorder: boolean = this.barChartWidgetSettingsForm.get('showBarBorder').value;
const showLegend: boolean = this.barChartWidgetSettingsForm.get('showLegend').value;
const showTooltip: boolean = this.barChartWidgetSettingsForm.get('showTooltip').value;
const tooltipShowDate: boolean = this.barChartWidgetSettingsForm.get('tooltipShowDate').value;
if (showBarLabel) {
this.barChartWidgetSettingsForm.get('barLabelFont').enable();
this.barChartWidgetSettingsForm.get('barLabelColor').enable();
} else {
this.barChartWidgetSettingsForm.get('barLabelFont').disable();
this.barChartWidgetSettingsForm.get('barLabelColor').disable();
}
if (showBarValue) {
this.barChartWidgetSettingsForm.get('barValueFont').enable();
this.barChartWidgetSettingsForm.get('barValueColor').enable();
} else {
this.barChartWidgetSettingsForm.get('barValueFont').disable();
this.barChartWidgetSettingsForm.get('barValueColor').disable();
}
if (showBarBorder) {
this.barChartWidgetSettingsForm.get('barBorderWidth').enable();
} else {
this.barChartWidgetSettingsForm.get('barBorderWidth').disable();
}
if (showLegend) {
this.barChartWidgetSettingsForm.get('legendPosition').enable();
this.barChartWidgetSettingsForm.get('legendLabelFont').enable();
this.barChartWidgetSettingsForm.get('legendLabelColor').enable();
} else {
this.barChartWidgetSettingsForm.get('legendPosition').disable();
this.barChartWidgetSettingsForm.get('legendLabelFont').disable();
this.barChartWidgetSettingsForm.get('legendLabelColor').disable();
}
if (showTooltip) {
this.barChartWidgetSettingsForm.get('tooltipLabelFont').enable();
this.barChartWidgetSettingsForm.get('tooltipLabelColor').enable();
this.barChartWidgetSettingsForm.get('tooltipValueFont').enable();
this.barChartWidgetSettingsForm.get('tooltipValueColor').enable();
this.barChartWidgetSettingsForm.get('tooltipShowDate').enable({emitEvent: false});
this.barChartWidgetSettingsForm.get('tooltipBackgroundColor').enable();
this.barChartWidgetSettingsForm.get('tooltipBackgroundBlur').enable();
if (tooltipShowDate) {
this.barChartWidgetSettingsForm.get('tooltipDateFormat').enable();
this.barChartWidgetSettingsForm.get('tooltipDateFont').enable();
this.barChartWidgetSettingsForm.get('tooltipDateColor').enable();
this.barChartWidgetSettingsForm.get('tooltipDateInterval').enable();
} else {
this.barChartWidgetSettingsForm.get('tooltipDateFormat').disable();
this.barChartWidgetSettingsForm.get('tooltipDateFont').disable();
this.barChartWidgetSettingsForm.get('tooltipDateColor').disable();
this.barChartWidgetSettingsForm.get('tooltipDateInterval').disable();
}
} else {
this.barChartWidgetSettingsForm.get('tooltipLabelFont').disable();
this.barChartWidgetSettingsForm.get('tooltipLabelColor').disable();
this.barChartWidgetSettingsForm.get('tooltipValueFont').disable();
this.barChartWidgetSettingsForm.get('tooltipValueColor').disable();
this.barChartWidgetSettingsForm.get('tooltipShowDate').disable({emitEvent: false});
this.barChartWidgetSettingsForm.get('tooltipDateFormat').disable();
this.barChartWidgetSettingsForm.get('tooltipDateFont').disable();
this.barChartWidgetSettingsForm.get('tooltipDateColor').disable();
this.barChartWidgetSettingsForm.get('tooltipDateInterval').disable();
this.barChartWidgetSettingsForm.get('tooltipBackgroundColor').disable();
this.barChartWidgetSettingsForm.get('tooltipBackgroundBlur').disable();
}
}
private _tooltipValuePreviewFn(): string {
const units: string = this.widgetConfig.config.units;
const decimals: number = this.widgetConfig.config.decimals;
return formatValue(22, decimals, units, false);
}
private _tooltipDatePreviewFn(): string {
const dateFormat: DateFormatSettings = this.barChartWidgetSettingsForm.get('tooltipDateFormat').value;
const processor = DateFormatProcessor.fromSettings(this.$injector, dateFormat);
processor.update(Date.now());
return processor.formatted;
}
}
``` | /content/code_sandbox/ui-ngx/src/app/modules/home/components/widget/lib/settings/chart/bar-chart-with-labels-widget-settings.component.ts | xml | 2016-12-01T09:33:30 | 2024-08-16T19:58:25 | thingsboard | thingsboard/thingsboard | 16,820 | 1,900 |
```xml
<?xml version="1.0" encoding="utf-8"?>
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file. -->
<PreferenceScreen
xmlns:android="path_to_url"
xmlns:app="path_to_url">
<org.chromium.chrome.browser.preferences.HyperlinkPreference
android:key="open_source_license"
android:title="@string/open_source_license_title"
app:url="@string/open_source_license_url" />
<org.chromium.chrome.browser.preferences.HyperlinkPreference
android:key="terms_of_service"
android:title="@string/terms_of_service_title"
app:url="@string/chrome_terms_of_service_url" />
<org.chromium.chrome.browser.preferences.HyperlinkPreference
android:key="privacy_notice"
android:title="@string/privacy_notice_title"
app:url="@string/chrome_privacy_notice_url" />
</PreferenceScreen>
``` | /content/code_sandbox/libraries_res/chrome_res/src/main/res/xml/legal_information_preferences.xml | xml | 2016-07-04T07:28:36 | 2024-08-15T05:20:42 | AndroidChromium | JackyAndroid/AndroidChromium | 3,090 | 195 |
```xml
import * as React from 'react';
import {
DrawerBody,
DrawerHeader,
DrawerHeaderTitle,
OverlayDrawer,
Button,
Field,
tokens,
makeStyles,
Input,
} from '@fluentui/react-components';
import { Dismiss24Regular } from '@fluentui/react-icons';
const useStyles = makeStyles({
main: {
display: 'grid',
justifyContent: 'flex-start',
gridRowGap: tokens.spacingVerticalXXL,
},
field: {
display: 'grid',
gridRowGap: tokens.spacingVerticalS,
},
});
export const CustomSize = () => {
const styles = useStyles();
const [open, setOpen] = React.useState(false);
const [customSize, setCustomSize] = React.useState(600);
return (
<div>
<OverlayDrawer
open={open}
position="end"
onOpenChange={(_, state) => setOpen(state.open)}
style={{ width: `${customSize}px` }}
>
<DrawerHeader>
<DrawerHeaderTitle
action={
<Button
appearance="subtle"
aria-label="Close"
icon={<Dismiss24Regular />}
onClick={() => setOpen(false)}
/>
}
>
Drawer with {customSize}px size
</DrawerHeaderTitle>
</DrawerHeader>
<DrawerBody>
<p>Drawer content</p>
</DrawerBody>
</OverlayDrawer>
<div className={styles.main}>
<Button appearance="primary" onClick={() => setOpen(true)}>
Open Drawer
</Button>
<div className={styles.field}>
<Field label="Size">
<Input
pattern="[0-9]*"
value={customSize.toString()}
onChange={(_, data) => setCustomSize(data.value ? parseInt(data.value, 10) : 0)}
/>
</Field>
</div>
</div>
</div>
);
};
CustomSize.parameters = {
docs: {
description: {
story: 'The Drawer can be sized to any custom width, by overriding the `width` style property.',
},
},
};
``` | /content/code_sandbox/packages/react-components/react-drawer/stories/src/Drawer/DrawerCustomSize.stories.tsx | xml | 2016-06-06T15:03:44 | 2024-08-16T18:49:29 | fluentui | microsoft/fluentui | 18,221 | 458 |
```xml
import useCalendarDate from '../useCalendarDate';
import format from 'date-fns/format';
import { act } from '@testing-library/react';
import { renderHook } from '@test/utils';
describe('useCalendarDate', () => {
it('Should return controlled date', () => {
const { result } = renderHook(() =>
useCalendarDate(new Date('07/01/2021'), new Date('08/01/2021'))
);
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-07-01');
});
it('Should return default date', () => {
const { result } = renderHook(() => useCalendarDate(undefined, new Date('08/01/2021')));
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-08-01');
});
it('Should update calendarDate when value is updated', () => {
const { result, rerender } = renderHook(
({ initialValue }) => useCalendarDate(initialValue, new Date(2022, 10, 2)),
{
initialProps: { initialValue: new Date('07/01/2021') }
}
);
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-07-01');
rerender({ initialValue: new Date('09/01/2021') });
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-09-01');
});
it('Should update calendarDate by `setCalendarDate`', () => {
const { result } = renderHook(() =>
useCalendarDate(new Date('07/01/2021'), new Date(2022, 10, 2))
);
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-07-01');
act(() => {
result.current.setCalendarDate(new Date('09/01/2021'));
});
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-09-01');
});
it('Should reset the datetime', () => {
const { result } = renderHook(() =>
useCalendarDate(undefined, new Date('08/04/2022 00:00:10'))
);
act(() => {
result.current.setCalendarDate(new Date('09/01/2021'));
});
expect(format(result.current.calendarDate, 'yyyy-MM-dd')).to.equal('2021-09-01');
act(() => {
result.current.resetCalendarDate();
});
expect(format(result.current.calendarDate, 'yyyy-MM-dd HH:mm:ss')).to.equal(
'2022-08-04 00:00:10'
);
});
});
``` | /content/code_sandbox/src/Calendar/test/useCalendarDateSpec.tsx | xml | 2016-06-06T02:27:46 | 2024-08-16T16:41:54 | rsuite | rsuite/rsuite | 8,263 | 591 |
```xml
<vector xmlns:android="path_to_url"
android:height="24dp"
android:width="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path android:fillColor="#555" android:pathData="M12,20A8,8 0 0,0 20,12A8,8 0 0,0 12,4A8,8 0 0,0 4,12A8,8 0 0,0 12,20M12,2A10,10 0 0,1 22,12A10,10 0 0,1 12,22C6.47,22 2,17.5 2,12A10,10 0 0,1 12,2M12.5,7V12.25L17,14.92L16.25,16.15L11,13V7H12.5Z" />
</vector>
``` | /content/code_sandbox/app/src/main/res/drawable/ic_clock.xml | xml | 2016-08-13T08:08:39 | 2024-08-06T13:58:48 | open-event-organizer-android | fossasia/open-event-organizer-android | 1,783 | 218 |
```xml
/*
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
// TypeScript Version: 4.1
/// <reference types="@stdlib/types"/>
import { Array3D } from '@stdlib/types/array';
/**
* Returns an element from a three-dimensional nested array.
*
* @param x - input array
* @param i0 - first dimension index
* @param i1 - second dimension index
* @param i2 - third dimension index
* @returns nested array element
*
* @example
* var x = [ [ [ 1, 2 ], [ 3, 4 ] ] ];
*
* var v = at3d( x, 0, 0, 1 );
* // returns 2
*
* v = at3d( x, 0, 1, 0 );
* // returns 3
*
* v = at3d( x, -1, -2, -2 );
* // returns 1
*/
declare function at3d<T = unknown>( x: Array3D<T>, i0: number, i1: number, i2: number ): T | void;
// EXPORTS //
export = at3d;
``` | /content/code_sandbox/lib/node_modules/@stdlib/array/base/at3d/docs/types/index.d.ts | xml | 2016-03-24T04:19:52 | 2024-08-16T09:03:19 | stdlib | stdlib-js/stdlib | 4,266 | 286 |
```xml
import "reflect-metadata"
import { DataSource } from "../../../src/data-source/DataSource"
import {
closeTestingConnections,
createTestingConnections,
reloadTestingDatabases,
} from "../../utils/test-utils"
import { Bar } from "./entity/Bar"
import { Foo } from "./entity/Foo"
// TODO: this test was broken after removing primary: true from relation decorators
// due to complexity of cascades, it was skipped fow now
describe.skip("github issues > #7002 cascade save fails if the child entity has CreateDateColumn and PK as JoinColumn", () => {
let connections: DataSource[]
before(
async () =>
(connections = await createTestingConnections({
entities: [__dirname + "/entity/*{.js,.ts}"],
schemaCreate: true,
dropSchema: true,
enabledDrivers: ["mysql", "postgres"],
})),
)
beforeEach(() => reloadTestingDatabases(connections))
after(() => closeTestingConnections(connections))
it("save an entity having a child entity with shared PK and CreatedDateColumn by cascade", () =>
Promise.all(
connections.map(async (connection) => {
const foo = new Foo()
foo.text = "This is a feature post"
await connection.manager.save(
connection.getRepository(Bar).create({
title: "Feature Post",
foo,
}),
)
}),
))
})
``` | /content/code_sandbox/test/github-issues/7002/issue-7002.ts | xml | 2016-02-29T07:41:14 | 2024-08-16T18:28:52 | typeorm | typeorm/typeorm | 33,875 | 297 |
```xml
import { Path } from 'slate'
export const input = {
path: [0, 1, 2],
another: [0],
}
export const test = ({ path, another }) => {
return Path.endsBefore(path, another)
}
export const output = false
``` | /content/code_sandbox/packages/slate/test/interfaces/Path/endsBefore/above.tsx | xml | 2016-06-18T01:52:42 | 2024-08-16T18:43:42 | slate | ianstormtaylor/slate | 29,492 | 61 |
```xml
import {Controller} from "@tsed/di";
import {EndpointMetadata, Get, Groups, Property, Returns} from "@tsed/schema";
import {serialize} from "../../src/utils/serialize.js";
class MyModel {
@Property()
id: string;
@Property()
description: string;
@Groups("summary")
prop1: string; // not display by default
@Groups("details")
prop2: string; // not display by default
@Groups("admin")
sensitiveProp: string; // not displayed because it's a sensitive props
}
@Controller("/controllers")
class MyController {
@Get("/:id")
@Returns(200, MyModel).AllowedGroups("summary", "details")
get() {
return {
id: "id",
description: "description",
prop1: "prop1",
prop2: "prop2",
sensitiveProp: "sensitiveProp"
};
}
@Get("/:id")
@Returns(200, MyModel).Groups("!admin").AllowedGroups("summary", "details")
get2() {
return {
id: "id",
description: "description",
prop1: "prop1",
prop2: "prop2",
sensitiveProp: "sensitiveProp"
};
}
}
function getSpecFixture(method: string, includes: undefined | string[]) {
const data = new MyModel();
data.id = "id";
data.description = "description";
data.prop1 = "prop1";
data.prop2 = "prop2";
data.sensitiveProp = "sensitiveProp";
const endpoint = EndpointMetadata.get(MyController, method);
return serialize(data, {
useAlias: true,
// additionalProperties: this.additionalProperties === "accept",
...endpoint.getResponseOptions(200, {includes}),
endpoint: true
});
}
describe("@AllowedGroups", () => {
describe("without @Groups", () => {
it("should serialize the model (no included groups - undefined)", () => {
const spec = getSpecFixture("get", undefined);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1",
prop2: "prop2",
sensitiveProp: "sensitiveProp"
});
});
it("should serialize the model (no included groups - [])", () => {
const spec = getSpecFixture("get", []);
expect(spec).toEqual({
description: "description",
id: "id"
});
});
it("should serialize the model (included groups)", () => {
const spec = getSpecFixture("get", ["summary"]);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1"
});
});
it("should serialize the model (included groups + unexpected groups)", () => {
const spec = getSpecFixture("get", ["summary", "admin"]);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1"
});
});
});
describe("with @Groups", () => {
it("should serialize the model (no included groups - undefined)", () => {
const spec = getSpecFixture("get2", undefined);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1",
prop2: "prop2"
});
});
it("should serialize the model (no included groups - [])", () => {
const spec = getSpecFixture("get2", []);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1",
prop2: "prop2"
});
});
it("should serialize the model (included groups)", () => {
const spec = getSpecFixture("get2", ["summary"]);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1"
});
});
it("should serialize the model (included groups + unexpected groups)", () => {
const spec = getSpecFixture("get2", ["summary", "admin"]);
expect(spec).toEqual({
description: "description",
id: "id",
prop1: "prop1"
});
});
});
});
``` | /content/code_sandbox/packages/specs/json-mapper/test/integration/allowed-groups.integration.spec.ts | xml | 2016-02-21T18:38:47 | 2024-08-14T21:19:48 | tsed | tsedio/tsed | 2,817 | 937 |
```xml
import * as compose from 'lodash.flowright';
import PluginDetails from '../components/detail/PluginDetails';
import React from 'react';
import Spinner from 'modules/common/components/Spinner';
import { gql } from '@apollo/client';
import { graphql } from '@apollo/client/react/hoc';
import { mutations } from '@erxes/ui-settings/src/general/graphql';
import { queries } from '../graphql';
import { withProps } from 'modules/common/utils';
import { Plugin } from '../types';
type Props = {
id: string;
};
type FinalProps = {
manageInstall;
enabledServicesQuery;
} & Props;
type State = {
plugin: Plugin;
plugins: Plugin[];
};
class PluginDetailsContainer extends React.Component<FinalProps, State> {
constructor(props) {
super(props);
this.state = {
plugin: {} as Plugin,
plugins: []
};
}
async componentDidMount() {
const url =
process.env.NODE_ENV === 'production'
? `path_to_url{this.props.id}`
: `path_to_url{this.props.id}`;
fetch(url)
.then(async response => {
const plugin = await response.json();
this.setState({ plugin });
})
.catch(e => {
console.log(e);
});
const pluginsUrl =
process.env.NODE_ENV === 'production'
? 'path_to_url
: 'path_to_url
fetch(pluginsUrl)
.then(async response => {
const plugins = await response.json();
this.setState({ plugins });
})
.catch(e => {
console.log(e, 'error');
});
}
render() {
const { plugin, plugins } = this.state;
if (!plugin || Object.keys(plugin).length === 0) {
return <Spinner objective={true} />;
}
return (
<PluginDetails {...this.props} plugin={plugin || {}} plugins={plugins} />
);
}
}
export default withProps<{}>(
compose(
graphql<{}, {}, {}>(gql(queries.enabledServices), {
name: 'enabledServicesQuery'
}),
graphql<{}>(gql(mutations.managePluginInstall), {
name: 'manageInstall'
})
)(PluginDetailsContainer)
);
``` | /content/code_sandbox/packages/core-ui/src/modules/settings/marketplace/containers/PluginDetails.tsx | xml | 2016-11-11T06:54:50 | 2024-08-16T10:26:06 | erxes | erxes/erxes | 3,479 | 474 |
```xml
/* eslint-disable no-bitwise */
import { codeBlock, stripIndent } from "common-tags";
import fs from "fs";
import path from "path";
import ts from "typescript";
interface DocEntry {
name: string;
docs?: string;
type: "ViewerNetworkEvent" | "ViewerEvent";
text?: string;
}
/**
* We use this regexp to find a previous block that we
* are going to update in the readme file.
*/
const BLOCK_REGEXP =
/<!-- START docs:events -->(.|\n)*<!-- END docs:events -->/gm;
/** Build flags that affects AST generation */
const buildFlags =
// Do not truncate output.
ts.NodeBuilderFlags.NoTruncation |
// Use multiline object literals format.
ts.NodeBuilderFlags.MultilineObjectLiterals;
/** Generate documentation for all classes in a set of .ts files */
function gatherEntries(
fileNames: string[],
options: ts.CompilerOptions
): DocEntry[] {
// Build a program using the set of root file names in fileNames
const program = ts.createProgram(fileNames, options);
const printer = ts.createPrinter({
noEmitHelpers: true,
omitTrailingSemicolon: true,
removeComments: false,
});
// Get the checker, we will use it to find more about classes
const checker = program.getTypeChecker();
const data: DocEntry[] = [];
/** Hold a pointer to the sourcefile we are currently processing. */
let currentSourceFile: ts.SourceFile;
// Visit every sourceFile in the program
for (const sourceFile of program.getSourceFiles()) {
if (!sourceFile.isDeclarationFile) {
currentSourceFile = sourceFile;
// Walk the tree to search for classes
ts.forEachChild(sourceFile, visit);
}
}
const sorted = data.sort((a, b) => {
if (a.name > b.name) {
return 1;
}
if (b.name > a.name) {
return -1;
}
return 0;
});
return sorted;
/** visit nodes finding exported events */
function visit(node: ts.Node) {
// Only consider exported nodes
if (!isNodeExported(node)) {
return;
}
if (ts.isVariableStatement(node)) {
if (
!node.getFullText().includes("createViewerNetworkEvent") &&
!node.getFullText().includes("createViewerEvent")
) {
return;
}
const firstChild = node.declarationList.declarations[0];
if (ts.isVariableDeclaration(firstChild)) {
const symbol = checker.getSymbolAtLocation(firstChild.name);
if (symbol) {
serializeEventSymbol(symbol);
}
}
}
}
function serializeEventSymbol(symbol: ts.Symbol) {
const type = checker.getTypeOfSymbolAtLocation(
symbol,
symbol.valueDeclaration
);
const typeNode = checker.typeToTypeNode(type, undefined, buildFlags)!;
const typeName = symbol.getName();
const entry: DocEntry = {
name: typeName,
docs: ts.displayPartsToString(symbol.getDocumentationComment(checker)),
type: type.getSymbol()!.getName() as DocEntry["type"],
};
typeNode.forEachChild((ch) => {
if (ts.isTypeLiteralNode(ch)) {
const text = printer.printNode(
ts.EmitHint.Unspecified,
ch,
currentSourceFile
);
if (text !== "{}") {
entry.text = text;
}
/*
Go through each parameter.
ch.members.forEach(m => {
if (ts.isPropertySignature(m)) {
if (ts.isIdentifier(m.name)) {
data.parameters[m.name.text] = printer.printNode(
ts.EmitHint.Unspecified,
m.type!,
currentSourceFile
);
}
}
});
*/
}
});
data.push(entry);
}
/** True if this is visible outside this file, false otherwise */
function isNodeExported(node: ts.Node): boolean {
return (
// eslint-disable-next-line no-bitwise, @typescript-eslint/no-unnecessary-type-assertion
(ts.getCombinedModifierFlags(node as ts.Declaration) &
ts.ModifierFlags.Export) !==
0 ||
(!!node.parent && node.parent.kind === ts.SyntaxKind.SourceFile)
);
}
}
function prefixLines(text: string, prefix: string) {
return text.split("\n").join(`\n${prefix}`);
}
function getEventName(typeName: string) {
return (
typeName[0].toLocaleLowerCase() +
typeName.slice(1, typeName.length - "Event".length)
);
}
/**
* Removes "%future added value" from text. This is a placeholder type
* added by Relay to help with future proofness.
*/
function removeFutureAddedValue(text: string) {
return text
.replace(': "%future added value" | ', ": ")
.replace(' | "%future added value"', "");
}
/**
* Append or update previous documention in markdownFile.
* @param markdownFile The markdown file we want to inject the docs too.
* @param entries data as returned by gatherEntries.
*/
function emitDocs(markdownFile: string, entries: DocEntry[], verify = false) {
const previousContent = fs.existsSync(markdownFile)
? fs.readFileSync(markdownFile).toString()
: "";
const summary = stripIndent`
- ${entries
.map(
(e) => `<a href="#${getEventName(e.name)}">${getEventName(e.name)}</a>`
)
.join("\n - ")}
`;
const list = entries
.map(
(e) =>
codeBlock`
- ${
e.type === "ViewerEvent"
? `<a id="${getEventName(e.name)}">**${getEventName(e.name)}**</a>`
: `<a id="${getEventName(e.name)}">**${getEventName(
e.name
)}.success**, **${getEventName(e.name)}.error**</a>`
}: ${e.docs ? e.docs.replace("\n", " ") : ""}
${
e.text
? codeBlock`
\`\`\`ts
${removeFutureAddedValue(e.text)}
\`\`\`
`
: ""
}
`
)
.join("\n");
const output = stripIndent`
<!-- START docs:events -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN npm run docs:events -->
### Index
${prefixLines(summary, " ")}
### Events
${prefixLines(list, " ")}
<!-- END docs:events -->
`;
let newContent;
// Find previous block.
if (BLOCK_REGEXP.test(previousContent)) {
newContent = previousContent.replace(BLOCK_REGEXP, output);
} else {
newContent = previousContent + "\n" + output;
}
if (previousContent === newContent) {
// eslint-disable-next-line no-console
console.log(`${markdownFile} is up to date`);
return;
}
if (verify) {
// eslint-disable-next-line no-console
console.error(
`${markdownFile} is outdated, please run \`npm run docs:events\``
);
process.exit(1);
return;
}
fs.writeFileSync(markdownFile, newContent);
// eslint-disable-next-line no-console
console.log(`Successfully injected documentation into ${markdownFile}`);
}
function main() {
if (process.argv.length < 4) {
throw new Error("Must provide path to events and a markdown file.");
}
const eventFile = process.argv[2];
const markdownFile = process.argv[3];
// Find tsconfig file.
const configFile = ts.findConfigFile(eventFile, fs.existsSync);
if (!configFile) {
throw new Error("tsconfig file not found");
}
const configText = fs.readFileSync(configFile).toString();
const result = ts.parseConfigFileTextToJson(configFile, configText);
if (result.error) {
throw result.error;
}
// Parse the JSON raw data into actual consumable compiler options.
const config = ts.parseJsonConfigFileContent(
result.config,
ts.sys,
path.dirname(configFile)
);
const entries = gatherEntries([eventFile], config.options);
emitDocs(markdownFile, entries, process.argv[4] === "--verify");
}
main();
``` | /content/code_sandbox/client/scripts/generateEventDocs.ts | xml | 2016-10-31T16:14:05 | 2024-08-06T16:15:57 | talk | coralproject/talk | 1,881 | 1,828 |
```xml
import AbstractChromeStorageService from "./abstractions/abstract-chrome-storage-api.service";
export default class BrowserMemoryStorageService extends AbstractChromeStorageService {
constructor() {
super(chrome.storage.session);
}
}
``` | /content/code_sandbox/apps/browser/src/platform/services/browser-memory-storage.service.ts | xml | 2016-03-09T23:14:01 | 2024-08-16T15:07:51 | clients | bitwarden/clients | 8,877 | 45 |
```xml
import {
BoldIcon,
CodeIcon,
Heading1Icon,
Heading2Icon,
BlockQuoteIcon,
LinkIcon,
StrikethroughIcon,
OrderedListIcon,
BulletedListIcon,
TodoListIcon,
InputIcon,
HighlightIcon,
CommentIcon,
ItalicIcon,
OutdentIcon,
IndentIcon,
CopyIcon,
Heading3Icon,
} from "outline-icons";
import { EditorState } from "prosemirror-state";
import * as React from "react";
import Highlight from "@shared/editor/marks/Highlight";
import { getMarksBetween } from "@shared/editor/queries/getMarksBetween";
import { isInCode } from "@shared/editor/queries/isInCode";
import { isInList } from "@shared/editor/queries/isInList";
import { isMarkActive } from "@shared/editor/queries/isMarkActive";
import { isNodeActive } from "@shared/editor/queries/isNodeActive";
import { MenuItem } from "@shared/editor/types";
import CircleIcon from "~/components/Icons/CircleIcon";
import { Dictionary } from "~/hooks/useDictionary";
export default function formattingMenuItems(
state: EditorState,
isTemplate: boolean,
isMobile: boolean,
dictionary: Dictionary
): MenuItem[] {
const { schema } = state;
const isCode = isInCode(state);
const isCodeBlock = isInCode(state, { onlyBlock: true });
const isEmpty = state.selection.empty;
const highlight = getMarksBetween(
state.selection.from,
state.selection.to,
state
).find(({ mark }) => mark.type.name === "highlight");
return [
{
name: "placeholder",
tooltip: dictionary.placeholder,
icon: <InputIcon />,
active: isMarkActive(schema.marks.placeholder),
visible: isTemplate && (!isMobile || !isEmpty),
},
{
name: "separator",
visible: isTemplate && (!isMobile || !isEmpty),
},
{
name: "strong",
tooltip: dictionary.strong,
icon: <BoldIcon />,
active: isMarkActive(schema.marks.strong),
visible: !isCode && (!isMobile || !isEmpty),
},
{
name: "em",
tooltip: dictionary.em,
icon: <ItalicIcon />,
active: isMarkActive(schema.marks.em),
visible: !isCode && (!isMobile || !isEmpty),
},
{
name: "strikethrough",
tooltip: dictionary.strikethrough,
icon: <StrikethroughIcon />,
active: isMarkActive(schema.marks.strikethrough),
visible: !isCode && (!isMobile || !isEmpty),
},
{
tooltip: dictionary.mark,
icon: highlight ? (
<CircleIcon color={highlight.mark.attrs.color} />
) : (
<HighlightIcon />
),
active: () => !!highlight,
visible: !isCode && (!isMobile || !isEmpty),
children: Highlight.colors.map((color, index) => ({
name: "highlight",
label: Highlight.colorNames[index],
icon: <CircleIcon retainColor color={color} />,
active: isMarkActive(schema.marks.highlight, { color }),
attrs: { color },
})),
},
{
name: "code_inline",
tooltip: dictionary.codeInline,
icon: <CodeIcon />,
active: isMarkActive(schema.marks.code_inline),
visible: !isCodeBlock && (!isMobile || !isEmpty),
},
{
name: "separator",
visible: !isCodeBlock,
},
{
name: "heading",
tooltip: dictionary.heading,
icon: <Heading1Icon />,
active: isNodeActive(schema.nodes.heading, { level: 1 }),
attrs: { level: 1 },
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "heading",
tooltip: dictionary.subheading,
icon: <Heading2Icon />,
active: isNodeActive(schema.nodes.heading, { level: 2 }),
attrs: { level: 2 },
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "heading",
tooltip: dictionary.subheading,
icon: <Heading3Icon />,
active: isNodeActive(schema.nodes.heading, { level: 3 }),
attrs: { level: 3 },
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "blockquote",
tooltip: dictionary.quote,
icon: <BlockQuoteIcon />,
active: isNodeActive(schema.nodes.blockquote),
attrs: { level: 2 },
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "separator",
visible: !isCodeBlock,
},
{
name: "checkbox_list",
tooltip: dictionary.checkboxList,
icon: <TodoListIcon />,
keywords: "checklist checkbox task",
active: isNodeActive(schema.nodes.checkbox_list),
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "bullet_list",
tooltip: dictionary.bulletList,
icon: <BulletedListIcon />,
active: isNodeActive(schema.nodes.bullet_list),
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "ordered_list",
tooltip: dictionary.orderedList,
icon: <OrderedListIcon />,
active: isNodeActive(schema.nodes.ordered_list),
visible: !isCodeBlock && (!isMobile || isEmpty),
},
{
name: "outdentList",
tooltip: dictionary.outdent,
icon: <OutdentIcon />,
visible:
isMobile && isInList(state, { types: ["ordered_list", "bullet_list"] }),
},
{
name: "indentList",
tooltip: dictionary.indent,
icon: <IndentIcon />,
visible:
isMobile && isInList(state, { types: ["ordered_list", "bullet_list"] }),
},
{
name: "outdentCheckboxList",
tooltip: dictionary.outdent,
icon: <OutdentIcon />,
visible: isMobile && isInList(state, { types: ["checkbox_list"] }),
},
{
name: "indentCheckboxList",
tooltip: dictionary.indent,
icon: <IndentIcon />,
visible: isMobile && isInList(state, { types: ["checkbox_list"] }),
},
{
name: "separator",
visible: !isCodeBlock,
},
{
name: "link",
tooltip: dictionary.createLink,
icon: <LinkIcon />,
active: isMarkActive(schema.marks.link),
attrs: { href: "" },
visible: !isCodeBlock && (!isMobile || !isEmpty),
},
{
name: "comment",
tooltip: dictionary.comment,
icon: <CommentIcon />,
label: isCodeBlock ? dictionary.comment : undefined,
active: isMarkActive(schema.marks.comment, { resolved: false }),
visible: !isMobile || !isEmpty,
},
{
name: "separator",
visible: isCode && !isCodeBlock && (!isMobile || !isEmpty),
},
{
name: "copyToClipboard",
icon: <CopyIcon />,
tooltip: dictionary.copy,
visible: isCode && !isCodeBlock && (!isMobile || !isEmpty),
},
];
}
``` | /content/code_sandbox/app/editor/menus/formatting.tsx | xml | 2016-05-22T21:31:47 | 2024-08-16T19:57:22 | outline | outline/outline | 26,751 | 1,602 |
```xml
import useActivityKeyerContext from './private/useContext';
export default function useActivityKeys(): readonly [readonly string[]] {
return useActivityKeyerContext().activityKeysState;
}
``` | /content/code_sandbox/packages/api/src/providers/ActivityKeyer/useActivityKeys.ts | xml | 2016-07-07T23:16:57 | 2024-08-16T00:12:37 | BotFramework-WebChat | microsoft/BotFramework-WebChat | 1,567 | 40 |
```xml
import { gql } from "@apollo/client";
import * as compose from "lodash.flowright";
import Bulk from "@erxes/ui/src/components/Bulk";
import { Alert, withProps } from "@erxes/ui/src/utils";
import { generatePaginationParams } from "@erxes/ui/src/utils/router";
import React from "react";
import { graphql } from "@apollo/client/react/hoc";
import List from "../../components/flow/FlowList";
import { mutations, queries } from "../../graphql";
import {
flowsRemoveMutationResponse,
flowTotalCountQueryResponse,
FlowsQueryResponse,
FlowsAddMutationResponse,
IFlowDocument,
} from "../../types";
import { useNavigate } from "react-router-dom";
type Props = {
queryParams: any;
type?: string;
};
type FinalProps = {
flowsQuery: FlowsQueryResponse;
flowTotalCountQuery: flowTotalCountQueryResponse;
} & Props &
flowsRemoveMutationResponse &
FlowsAddMutationResponse;
const ProductListContainer = (props: FinalProps) => {
const navigate = useNavigate();
const {
queryParams,
flowsQuery,
flowTotalCountQuery,
flowsRemove,
flowsAdd,
} = props;
const addFlow = (isSub?: boolean) => {
flowsAdd({
variables: {
name: "Your flow title",
status: "draft",
isSub,
},
})
.then((data) => {
navigate({
pathname: `/processes/flows/details/${data.data.flowsAdd._id}`,
search: "?isCreate=true",
});
})
.catch((error) => {
Alert.error(error.message);
});
};
if (flowsQuery.loading) {
return false;
}
// remove action
const remove = ({ flowIds }, emptyBulk) => {
flowsRemove({
variables: { flowIds },
})
.then((removeStatus) => {
emptyBulk();
const status = removeStatus.data.flowsRemove;
getRefetchQueries();
status === "deleted"
? Alert.success("You successfully deleted a flow")
: Alert.warning("Flow status deleted");
})
.catch((e) => {
Alert.error(e.message);
});
};
const updatedProps = {
...props,
queryParams,
flows: flowsQuery.flows || [],
remove,
addFlow,
loading: flowsQuery.loading,
searchValue: queryParams.searchValue || "",
flowsTotalCount: flowTotalCountQuery.flowTotalCount || 0,
};
const flowList = (props) => {
return <List {...updatedProps} {...props} />;
};
const refetch = () => {
flowsQuery.refetch();
};
return <Bulk content={flowList} refetch={refetch} />;
};
const getRefetchQueries = () => {
return ["flows", "flowsMain", "flowCategories", "flowTotalCount"];
};
const options = () => ({
refetchQueries: getRefetchQueries(),
});
const generateFilter = (qp) => {
return {
categoryId: qp.categoryId,
searchValue: qp.searchValue,
branchId: qp.branchId,
departmentId: qp.departmentId,
status: qp.status,
validation: qp.validation,
};
};
export default withProps<Props>(
compose(
graphql<Props, FlowsQueryResponse, { page: number; perPage: number }>(
gql(queries.flowsMain),
{
name: "flowsQuery",
options: ({ queryParams }) => ({
variables: {
...generateFilter(queryParams),
...generatePaginationParams(queryParams),
},
fetchPolicy: "network-only",
}),
}
),
graphql<Props, flowTotalCountQueryResponse>(gql(queries.flowTotalCount), {
name: "flowTotalCountQuery",
options: ({ queryParams }) => ({
variables: {
...generateFilter(queryParams),
},
fetchPolicy: "network-only",
}),
}),
graphql<Props, flowsRemoveMutationResponse, { flowsIds: string[] }>(
gql(mutations.flowsRemove),
{
name: "flowsRemove",
options,
}
),
graphql<{}, FlowsAddMutationResponse, IFlowDocument>(
gql(mutations.flowsAdd),
{
name: "flowsAdd",
options: () => ({
refetchQueries: ["flows", "flowDetail"],
}),
}
)
)(ProductListContainer)
);
``` | /content/code_sandbox/packages/plugin-processes-ui/src/flow/containers/flow/FlowList.tsx | xml | 2016-11-11T06:54:50 | 2024-08-16T10:26:06 | erxes | erxes/erxes | 3,479 | 956 |
```xml
import { waitForLocalState } from "coral-framework/lib/relay";
import { waitTillAuthPopupIsClosedLocal } from "coral-stream/__generated__/waitTillAuthPopupIsClosedLocal.graphql";
import { Environment, graphql } from "react-relay";
async function waitTillAuthPopupIsClosed(environment: Environment) {
// Wait for auth popup to close.
await waitForLocalState<waitTillAuthPopupIsClosedLocal>(
environment,
graphql`
fragment waitTillAuthPopupIsClosedLocal on Local {
authPopup {
open
}
}
`,
(data) => data.authPopup.open === false
);
}
export default waitTillAuthPopupIsClosed;
``` | /content/code_sandbox/client/src/core/client/stream/common/AuthPopup/waitTillAuthPopupIsClosed.ts | xml | 2016-10-31T16:14:05 | 2024-08-06T16:15:57 | talk | coralproject/talk | 1,881 | 153 |
```xml
import { Locator, type Page } from '@playwright/test';
import EditorPage from '../pages/editor-page';
import { Device } from '../types/types';
import EditorSelectors from '../selectors/editor-selectors';
export default class {
readonly page: Page;
constructor( page: Page ) {
this.page = page;
// TODO: throw exception if experiment Breakpoints is deactivated.
}
static getDeviceLocator( page: Page, device: Device ): Locator {
// TODO: use the new data-testid attribute
const baseLocator = page.locator( '[aria-label="Switch Device"]' );
const locators = {
mobile: baseLocator.locator( 'button[aria-label="Mobile Portrait (up to 767px)"]' ),
mobile_extra: baseLocator.locator( 'button[aria-label="Mobile Landscape (up to 880px)"]' ),
tablet: baseLocator.locator( 'button[aria-label="Tablet Portrait (up to 1024px)"]' ),
tablet_extra: baseLocator.locator( 'button[aria-label="Tablet Landscape (up to 1200px)"]' ),
laptop: baseLocator.locator( 'button[aria-label="Laptop (up to 1366px)"]' ),
desktop: baseLocator.locator( 'button[aria-label="Desktop"]' ),
widescreen: baseLocator.locator( 'button[aria-label="Widescreen (2400px and up)"]' ),
};
return locators[ device ];
}
static getAll() {
return [ 'mobile', 'mobile_extra', 'tablet', 'tablet_extra', 'laptop', 'desktop', 'widescreen' ];
}
static getBasic() {
return [ 'mobile', 'tablet', 'desktop' ];
}
async saveOrUpdate( editor: EditorPage, toReload = false ) {
const hasTopBar: boolean = await editor.hasTopBar();
if ( hasTopBar ) {
await editor.saveSiteSettingsWithTopBar( toReload );
} else {
await editor.saveSiteSettingsNoTopBar();
}
}
async addAllBreakpoints( editor: EditorPage, experimentPostId?: string ) {
await editor.openSiteSettings( 'layout' );
await editor.openSection( 'section_breakpoints' );
await this.page.waitForSelector( 'text=Active Breakpoints' );
const devices = [ 'Mobile Landscape', 'Tablet Landscape', 'Laptop', 'Widescreen' ];
for ( const device of devices ) {
if ( await this.page.$( '.select2-selection__e-plus-button' ) ) {
await this.page.click( '.select2-selection__e-plus-button' );
await this.page.click( `li:has-text("${ device }")` );
}
}
await this.saveOrUpdate( editor, true );
if ( experimentPostId ) {
await this.page.goto( `/wp-admin/post.php?post=${ experimentPostId }&action=elementor` );
} else {
await this.page.reload();
if ( await this.page.$( '#elementor-panel-header-kit-close' ) ) {
await this.page.locator( '#elementor-panel-header-kit-close' ).click( { timeout: 30000 } );
}
}
await this.page.waitForSelector( '#elementor-editor-wrapper' );
}
async resetBreakpoints( editor: EditorPage ) {
await editor.openSiteSettings( 'layout' );
await editor.openSection( 'section_breakpoints' );
await this.page.waitForSelector( 'text=Active Breakpoints' );
const removeBreakpointButton = EditorSelectors.panels.siteSettings.layout.breakpoints.removeBreakpointButton;
while ( await this.page.locator( removeBreakpointButton ).count() > 0 ) {
await this.page.click( removeBreakpointButton );
}
await this.saveOrUpdate( editor, true );
}
}
``` | /content/code_sandbox/tests/playwright/assets/breakpoints.ts | xml | 2016-05-30T13:05:46 | 2024-08-16T13:13:10 | elementor | elementor/elementor | 6,507 | 846 |
```xml
<vector xmlns:android="path_to_url"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:pathData="M6,6.663C6,4.29 8.626,2.856 10.622,4.14L18.924,9.476C20.761,10.657 20.761,13.343 18.924,14.524L10.622,19.86C8.626,21.144 6,19.71 6,17.337V6.663ZM9.541,5.822C8.875,5.394 8,5.872 8,6.663V17.337C8,18.128 8.875,18.606 9.541,18.178L17.842,12.841C18.455,12.448 18.455,11.552 17.842,11.159L9.541,5.822Z"
android:fillColor="#303233"
android:fillType="evenOdd"/>
</vector>
``` | /content/code_sandbox/icon-pack/src/main/res/drawable/ic_play_medium_regular_outline.xml | xml | 2016-05-04T11:46:20 | 2024-08-15T16:29:10 | android | meganz/android | 1,537 | 260 |
```xml
import { NO_ERRORS_SCHEMA, ChangeDetectionStrategy } from '@angular/core';
import * as compiler from '@angular/compiler';
import { Config, DirectiveDeclaration } from '../config';
import { SemVerDSL } from '../../util/ngVersion';
let refId = 0;
const dummyMetadataFactory = (declaration: DirectiveDeclaration) => {
if (refId > 1e10) {
refId = 0;
}
return {
inputs: declaration.inputs || [],
outputs: declaration.outputs || [],
hostListeners: declaration.hostListeners || [],
hostProperties: declaration.hostProperties || [],
hostAttributes: declaration.hostAttributes || [],
isSummary: true,
type: {
diDeps: [],
lifecycleHooks: [],
isHost: false,
reference: ++refId + '-ref',
},
isComponent: false,
selector: declaration.selector,
exportAs: declaration.exportAs,
providers: [],
viewProviders: [],
queries: [],
entryComponents: [],
changeDetection: 0,
template: {
isSummary: true,
animations: [],
ngContentSelectors: [],
encapsulation: 0,
},
};
};
class Console {
log(message: string) {}
warn(message: string) {}
}
let defaultDirectives: DirectiveDeclaration[] = [];
export const parseTemplate = (template: string, directives: DirectiveDeclaration[] = []) => {
defaultDirectives = directives.map((d) => dummyMetadataFactory(d));
const TemplateParser = compiler.TemplateParser as any;
const expressionParser = new compiler.Parser(new compiler.Lexer());
const elementSchemaRegistry = new compiler.DomElementSchemaRegistry();
const ngConsole = new Console();
const htmlParser = new compiler.HtmlParser();
let tmplParser: any;
SemVerDSL.gte('4.0.0-beta.8', () => {
const config = new compiler.CompilerConfig({});
tmplParser = new TemplateParser(config, expressionParser, elementSchemaRegistry, htmlParser, ngConsole, []);
})
.elseIf.lt('4.1.0', () => {
tmplParser = new TemplateParser(expressionParser, elementSchemaRegistry, htmlParser, ngConsole, []);
})
.elseIf.lt('5.0.0-rc.0', () => {
const config = new compiler.CompilerConfig({});
tmplParser = new TemplateParser(
config,
new (compiler as any).JitReflector(),
expressionParser,
elementSchemaRegistry,
htmlParser,
ngConsole,
[]
);
})
.else(() => {
const JitReflector = require('./jitReflector').JitReflector;
const config = new compiler.CompilerConfig({});
tmplParser = new compiler.TemplateParser(
config,
new JitReflector(),
expressionParser,
elementSchemaRegistry,
htmlParser as any,
ngConsole,
[]
);
});
const { interpolation } = Config;
// Make sure it works with 2.2.x & 2.3.x
const summaryKind = ((compiler as any).CompileSummaryKind || {}).Template;
let templateMetadata: any = {
encapsulation: 0,
template: template,
templateUrl: '',
styles: [],
isInline: true,
styleUrls: [],
ngContentSelectors: [],
animations: [],
externalStylesheets: [],
interpolation,
toSummary() {
return {
isSummary: true,
animations: this.animations.map((anim) => anim.name),
ngContentSelectors: this.ngContentSelectors,
encapsulation: this.encapsulation,
summaryKind: summaryKind,
};
},
};
// Make sure it works with 2.2.x & 2.3.x
const type = {
diDeps: [],
lifecycleHooks: [],
reference: null,
// Used by Angular 2.2.x
isHost: false,
name: '',
prefix: '',
moduleUrl: '',
value: '',
identifier: null,
};
let result;
try {
SemVerDSL.lt('4.1.0', () => {
result = tmplParser.tryParse(
(compiler.CompileDirectiveMetadata as any).create({
type,
template: templateMetadata,
}),
template,
defaultDirectives,
[],
[NO_ERRORS_SCHEMA],
''
).templateAst;
})
.elseIf.lt('4.1.3', () => {
result = tmplParser.tryParse(
compiler.CompileDirectiveMetadata.create({
type,
template: templateMetadata,
isHost: true,
isComponent: true,
selector: '',
exportAs: '',
changeDetection: ChangeDetectionStrategy.Default,
inputs: [],
outputs: [],
host: {},
providers: [],
viewProviders: [],
queries: [],
viewQueries: [],
entryComponents: [],
guards: [],
componentViewType: null,
rendererType: null,
componentFactory: null,
}),
template,
defaultDirectives,
[],
[NO_ERRORS_SCHEMA],
''
).templateAst;
})
.elseIf.lt('5.0.0-rc.0', () => {
result = tmplParser.tryParse(
compiler.CompileDirectiveMetadata.create({
type,
template: templateMetadata,
isHost: true,
isComponent: true,
selector: '',
exportAs: '',
changeDetection: ChangeDetectionStrategy.Default,
inputs: [],
outputs: [],
host: {},
providers: [],
viewProviders: [],
queries: [],
viewQueries: [],
entryComponents: [],
guards: [],
componentViewType: null,
rendererType: null,
componentFactory: null,
}),
template,
defaultDirectives,
[],
[NO_ERRORS_SCHEMA],
''
).templateAst;
})
.elseIf.lt('5.2.0', () => {
result = tmplParser.tryParse(
compiler.CompileDirectiveMetadata.create({
type,
template: templateMetadata,
isHost: true,
isComponent: true,
selector: '',
exportAs: '',
changeDetection: ChangeDetectionStrategy.Default,
inputs: [],
outputs: [],
host: {},
providers: [],
viewProviders: [],
queries: [],
viewQueries: [],
entryComponents: [],
guards: [],
componentViewType: null,
rendererType: null,
componentFactory: null,
}),
template,
defaultDirectives,
[],
[NO_ERRORS_SCHEMA],
'',
true
).templateAst;
})
.else(() => {
result = tmplParser.tryParse(
compiler.CompileDirectiveMetadata.create({
type,
template: templateMetadata,
isHost: true,
isComponent: true,
selector: '',
exportAs: '',
changeDetection: ChangeDetectionStrategy.Default,
inputs: [],
outputs: [],
host: {},
providers: [],
viewProviders: [],
queries: [],
viewQueries: [],
entryComponents: [],
componentViewType: null,
rendererType: null,
componentFactory: null,
guards: {},
}),
template,
defaultDirectives,
[],
[NO_ERRORS_SCHEMA],
'',
true
).templateAst;
});
} catch (e) {
console.error(e);
}
return result;
};
``` | /content/code_sandbox/src/angular/templates/templateParser.ts | xml | 2016-02-10T17:22:40 | 2024-08-14T16:41:28 | codelyzer | mgechev/codelyzer | 2,446 | 1,570 |
```xml
import { promises as fs, readdir, realpath } from 'fs';
import { homedir, hostname, tmpdir, userInfo } from 'os';
import path, { resolve as resolvePath } from 'path';
import { env as process_env } from 'process';
import type { CancellationToken, Event, TextDocument, WorkspaceFolder } from 'vscode';
import { Disposable, env, EventEmitter, extensions, FileType, Range, Uri, window, workspace } from 'vscode';
import { md5 } from '@env/crypto';
import { fetch, getProxyAgent } from '@env/fetch';
import { hrtime } from '@env/hrtime';
import { isLinux, isWindows } from '@env/platform';
import type { GitExtension, API as ScmGitApi } from '../../../@types/vscode.git';
import { getCachedAvatarUri } from '../../../avatars';
import type { GitConfigKeys } from '../../../constants';
import { GlyphChars, Schemes } from '../../../constants';
import type { Container } from '../../../container';
import { emojify } from '../../../emojis';
import { CancellationError } from '../../../errors';
import { Features } from '../../../features';
import { GitErrorHandling } from '../../../git/commandOptions';
import {
ApplyPatchCommitError,
ApplyPatchCommitErrorReason,
BlameIgnoreRevsFileBadRevisionError,
BlameIgnoreRevsFileError,
CherryPickError,
CherryPickErrorReason,
FetchError,
GitSearchError,
PullError,
PushError,
PushErrorReason,
StashApplyError,
StashApplyErrorReason,
StashPushError,
WorktreeCreateError,
WorktreeCreateErrorReason,
WorktreeDeleteError,
WorktreeDeleteErrorReason,
} from '../../../git/errors';
import type {
GitCaches,
GitDir,
GitProvider,
GitProviderDescriptor,
LeftRightCommitCountResult,
NextComparisonUrisResult,
PagedResult,
PagingOptions,
PreviousComparisonUrisResult,
PreviousLineComparisonUrisResult,
RepositoryCloseEvent,
RepositoryInitWatcher,
RepositoryOpenEvent,
RepositoryVisibility,
RevisionUriData,
ScmRepository,
} from '../../../git/gitProvider';
import { encodeGitLensRevisionUriAuthority, GitUri, isGitUri } from '../../../git/gitUri';
import type { GitBlame, GitBlameAuthor, GitBlameLine, GitBlameLines } from '../../../git/models/blame';
import type { BranchSortOptions } from '../../../git/models/branch';
import {
getBranchId,
getBranchNameAndRemote,
getBranchNameWithoutRemote,
getRemoteNameFromBranchName,
GitBranch,
isDetachedHead,
sortBranches,
} from '../../../git/models/branch';
import type { GitStashCommit } from '../../../git/models/commit';
import { GitCommit, GitCommitIdentity } from '../../../git/models/commit';
import { deletedOrMissing, uncommitted, uncommittedStaged } from '../../../git/models/constants';
import { GitContributor } from '../../../git/models/contributor';
import type {
GitDiff,
GitDiffFile,
GitDiffFiles,
GitDiffFilter,
GitDiffLine,
GitDiffShortStat,
} from '../../../git/models/diff';
import type { GitFile, GitFileStatus } from '../../../git/models/file';
import { GitFileChange } from '../../../git/models/file';
import type {
GitGraph,
GitGraphRow,
GitGraphRowContexts,
GitGraphRowHead,
GitGraphRowRemoteHead,
GitGraphRowsStats,
GitGraphRowStats,
GitGraphRowTag,
} from '../../../git/models/graph';
import type { GitLog } from '../../../git/models/log';
import type { GitMergeStatus } from '../../../git/models/merge';
import type { GitRebaseStatus } from '../../../git/models/rebase';
import type {
GitBranchReference,
GitReference,
GitRevisionRange,
GitTagReference,
} from '../../../git/models/reference';
import {
createReference,
getBranchTrackingWithoutRemote,
getReferenceFromBranch,
isBranchReference,
isRevisionRange,
isSha,
isShaLike,
isUncommitted,
isUncommittedStaged,
shortenRevision,
} from '../../../git/models/reference';
import type { GitReflog } from '../../../git/models/reflog';
import type { GitRemote } from '../../../git/models/remote';
import { getRemoteIconUri, getVisibilityCacheKey, sortRemotes } from '../../../git/models/remote';
import { RemoteResourceType } from '../../../git/models/remoteResource';
import type { RepositoryChangeEvent } from '../../../git/models/repository';
import { Repository, RepositoryChange, RepositoryChangeComparisonMode } from '../../../git/models/repository';
import type { GitStash } from '../../../git/models/stash';
import type { GitStatusFile } from '../../../git/models/status';
import { GitStatus } from '../../../git/models/status';
import type { GitTag, TagSortOptions } from '../../../git/models/tag';
import { getTagId, sortTags } from '../../../git/models/tag';
import type { GitTreeEntry } from '../../../git/models/tree';
import type { GitUser } from '../../../git/models/user';
import { isUserMatch } from '../../../git/models/user';
import type { GitWorktree } from '../../../git/models/worktree';
import { getWorktreesByBranch } from '../../../git/models/worktree';
import { parseGitBlame } from '../../../git/parsers/blameParser';
import { parseGitBranches } from '../../../git/parsers/branchParser';
import {
parseGitApplyFiles,
parseGitDiffNameStatusFiles,
parseGitDiffShortStat,
parseGitFileDiff,
} from '../../../git/parsers/diffParser';
import {
createLogParserSingle,
createLogParserWithFiles,
getContributorsParser,
getGraphParser,
getGraphStatsParser,
getRefAndDateParser,
getRefParser,
LogType,
parseGitLog,
parseGitLogAllFormat,
parseGitLogDefaultFormat,
parseGitLogSimple,
parseGitLogSimpleFormat,
parseGitLogSimpleRenamed,
} from '../../../git/parsers/logParser';
import { parseGitRefLog, parseGitRefLogDefaultFormat } from '../../../git/parsers/reflogParser';
import { parseGitRemotes } from '../../../git/parsers/remoteParser';
import { parseGitStatus } from '../../../git/parsers/statusParser';
import { parseGitTags } from '../../../git/parsers/tagParser';
import { parseGitLsFiles, parseGitTree } from '../../../git/parsers/treeParser';
import { parseGitWorktrees } from '../../../git/parsers/worktreeParser';
import { getRemoteProviderMatcher, loadRemoteProviders } from '../../../git/remotes/remoteProviders';
import type { GitSearch, GitSearchResultData, GitSearchResults, SearchQuery } from '../../../git/search';
import { getGitArgsFromSearchQuery, getSearchQueryComparisonKey } from '../../../git/search';
import {
showBlameInvalidIgnoreRevsFileWarningMessage,
showGenericErrorMessage,
showGitDisabledErrorMessage,
showGitInvalidConfigErrorMessage,
showGitMissingErrorMessage,
showGitVersionUnsupportedErrorMessage,
} from '../../../messages';
import type {
GraphBranchContextValue,
GraphItemContext,
GraphItemRefContext,
GraphItemRefGroupContext,
GraphTagContextValue,
} from '../../../plus/webviews/graph/protocol';
import { countStringLength, filterMap } from '../../../system/array';
import { TimedCancellationSource } from '../../../system/cancellation';
import { configuration } from '../../../system/configuration';
import { gate } from '../../../system/decorators/gate';
import { debug, log } from '../../../system/decorators/log';
import { debounce } from '../../../system/function';
import { filterMap as filterMapIterable, find, first, join, last, map, some } from '../../../system/iterable';
import { Logger } from '../../../system/logger';
import type { LogScope } from '../../../system/logger.scope';
import { getLogScope, setLogScopeExit } from '../../../system/logger.scope';
import {
commonBaseIndex,
dirname,
getBestPath,
isAbsolute,
isFolderGlob,
joinPaths,
maybeUri,
normalizePath,
pathEquals,
relative,
splitPath,
} from '../../../system/path';
import type { PromiseOrValue } from '../../../system/promise';
import { any, asSettled, getSettledValue } from '../../../system/promise';
import { equalsIgnoreCase, getDurationMilliseconds, interpolate, splitSingle } from '../../../system/string';
import { PathTrie } from '../../../system/trie';
import { compare, fromString } from '../../../system/version';
import { serializeWebviewItemContext } from '../../../system/webview';
import type { CachedBlame, CachedDiff, CachedLog, TrackedGitDocument } from '../../../trackers/trackedDocument';
import { GitDocumentState } from '../../../trackers/trackedDocument';
import { registerCommitMessageProvider } from './commitMessageProvider';
import type { Git, PushForceOptions } from './git';
import {
getShaInLogRegex,
GitErrors,
gitLogDefaultConfigs,
gitLogDefaultConfigsWithFiles,
maxGitCliLength,
} from './git';
import type { GitLocation } from './locator';
import { findGitPath, InvalidGitConfigError, UnableToFindGitError } from './locator';
import { CancelledRunError, fsExists, RunError } from './shell';
const emptyArray = Object.freeze([]) as unknown as any[];
const emptyPromise: Promise<GitBlame | GitDiffFile | GitLog | undefined> = Promise.resolve(undefined);
const emptyPagedResult: PagedResult<any> = Object.freeze({ values: [] });
const slash = 47;
const RepoSearchWarnings = {
doesNotExist: /no such file or directory/i,
};
const driveLetterRegex = /(?<=^\/?)([a-zA-Z])(?=:\/)/;
const userConfigRegex = /^user\.(name|email) (.*)$/gm;
const mappedAuthorRegex = /(.+)\s<(.+)>/;
const stashSummaryRegex =
// eslint-disable-next-line no-control-regex
/(?:(?:(?<wip>WIP) on|On) (?<onref>[^/](?!.*\/\.)(?!.*\.\.)(?!.*\/\/)(?!.*@\{)[^\x00-\x1F\x7F ~^:?*[\\]+[^./]):\s*)?(?<summary>.*)$/s;
const reflogCommands = ['merge', 'pull'];
interface RepositoryInfo {
gitDir?: GitDir;
user?: GitUser | null;
}
export class LocalGitProvider implements GitProvider, Disposable {
readonly descriptor: GitProviderDescriptor = { id: 'git', name: 'Git', virtual: false };
readonly supportedSchemes = new Set<string>([
Schemes.File,
Schemes.Git,
Schemes.GitLens,
Schemes.PRs,
// DocumentSchemes.Vsls,
]);
private _onDidChange = new EventEmitter<void>();
get onDidChange(): Event<void> {
return this._onDidChange.event;
}
private _onDidChangeRepository = new EventEmitter<RepositoryChangeEvent>();
get onDidChangeRepository(): Event<RepositoryChangeEvent> {
return this._onDidChangeRepository.event;
}
private _onDidCloseRepository = new EventEmitter<RepositoryCloseEvent>();
get onDidCloseRepository(): Event<RepositoryCloseEvent> {
return this._onDidCloseRepository.event;
}
private _onDidOpenRepository = new EventEmitter<RepositoryOpenEvent>();
get onDidOpenRepository(): Event<RepositoryOpenEvent> {
return this._onDidOpenRepository.event;
}
private readonly _branchesCache = new Map<string, Promise<PagedResult<GitBranch>>>();
private readonly _contributorsCache = new Map<string, Map<string, Promise<GitContributor[]>>>();
private readonly _mergeStatusCache = new Map<string, Promise<GitMergeStatus | undefined>>();
private readonly _rebaseStatusCache = new Map<string, Promise<GitRebaseStatus | undefined>>();
private readonly _remotesCache = new Map<string, Promise<GitRemote[]>>();
private readonly _repoInfoCache = new Map<string, RepositoryInfo>();
private readonly _stashesCache = new Map<string, GitStash | null>();
private readonly _tagsCache = new Map<string, Promise<PagedResult<GitTag>>>();
private readonly _trackedPaths = new PathTrie<PromiseOrValue<[string, string] | undefined>>();
private readonly _worktreesCache = new Map<string, Promise<GitWorktree[]>>();
private _disposables: Disposable[] = [];
constructor(
protected readonly container: Container,
protected readonly git: Git,
) {
this.git.setLocator(this.ensureGit.bind(this));
this._disposables.push(
configuration.onDidChange(e => {
if (configuration.changed(e, 'remotes')) {
this.resetCaches(undefined, 'remotes');
}
}, this),
this.container.events.on('git:cache:reset', e =>
this.resetCaches(e.data.repoPath, ...(e.data.caches ?? emptyArray)),
),
);
}
dispose() {
Disposable.from(...this._disposables).dispose();
}
private get useCaching() {
return configuration.get('advanced.caching.enabled');
}
private onRepositoryChanged(repo: Repository, e: RepositoryChangeEvent) {
if (e.changed(RepositoryChange.Config, RepositoryChangeComparisonMode.Any)) {
this._repoInfoCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Heads, RepositoryChange.Remotes, RepositoryChangeComparisonMode.Any)) {
this._branchesCache.delete(repo.path);
this._contributorsCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Remotes, RepositoryChange.RemoteProviders, RepositoryChangeComparisonMode.Any)) {
this._remotesCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Index, RepositoryChange.Unknown, RepositoryChangeComparisonMode.Any)) {
this._trackedPaths.clear();
}
if (e.changed(RepositoryChange.Merge, RepositoryChangeComparisonMode.Any)) {
this._mergeStatusCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Rebase, RepositoryChangeComparisonMode.Any)) {
this._rebaseStatusCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Stash, RepositoryChangeComparisonMode.Any)) {
this._stashesCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Tags, RepositoryChangeComparisonMode.Any)) {
this._tagsCache.delete(repo.path);
}
if (e.changed(RepositoryChange.Worktrees, RepositoryChangeComparisonMode.Any)) {
this._worktreesCache.delete(repo.path);
}
this._onDidChangeRepository.fire(e);
}
private _gitLocator: Promise<GitLocation> | undefined;
private async ensureGit(): Promise<GitLocation> {
if (this._gitLocator == null) {
this._gitLocator = this.findGit();
}
return this._gitLocator;
}
@log()
private async findGit(): Promise<GitLocation> {
const scope = getLogScope();
if (!configuration.getCore('git.enabled', null, true)) {
Logger.log(scope, 'Built-in Git is disabled ("git.enabled": false)');
void showGitDisabledErrorMessage();
throw new UnableToFindGitError();
}
const scmGitPromise = this.getScmGitApi();
async function subscribeToScmOpenCloseRepository(this: LocalGitProvider) {
const scmGit = await scmGitPromise;
if (scmGit == null) return;
registerCommitMessageProvider(this.container, scmGit);
// Find env to pass to Git
for (const v of Object.values(scmGit.git)) {
if (v != null && typeof v === 'object' && 'git' in v) {
for (const vv of Object.values(v.git)) {
if (vv != null && typeof vv === 'object' && 'GIT_ASKPASS' in vv) {
Logger.debug(scope, 'Found built-in Git env');
this.git.setEnv(vv);
break;
}
}
}
}
const closing = new Set<Uri>();
const fireRepositoryClosed = debounce(() => {
if (this.container.deactivating) return;
for (const uri of closing) {
this._onDidCloseRepository.fire({ uri: uri });
}
closing.clear();
}, 1000);
this._disposables.push(
// Since we will get "close" events for repos when vscode is shutting down, debounce the event so ensure we aren't shutting down
scmGit.onDidCloseRepository(e => {
if (this.container.deactivating) return;
closing.add(e.rootUri);
fireRepositoryClosed();
}),
scmGit.onDidOpenRepository(e => this._onDidOpenRepository.fire({ uri: e.rootUri })),
);
for (const scmRepository of scmGit.repositories) {
this._onDidOpenRepository.fire({ uri: scmRepository.rootUri });
}
}
void subscribeToScmOpenCloseRepository.call(this);
const potentialGitPaths = configuration.getCore('git.path') ?? this.container.storage.getWorkspace('gitPath');
const start = hrtime();
const findGitPromise = findGitPath(potentialGitPaths);
// Try to use the same git as the built-in vscode git extension, but don't wait for it if we find something faster
const findGitFromSCMPromise = scmGitPromise.then(gitApi => {
const path = gitApi?.git.path;
if (!path) return findGitPromise;
if (potentialGitPaths != null) {
if (typeof potentialGitPaths === 'string') {
if (path === potentialGitPaths) return findGitPromise;
} else if (potentialGitPaths.includes(path)) {
return findGitPromise;
}
}
return findGitPath(path, false);
});
const location = await any<GitLocation>(findGitPromise, findGitFromSCMPromise);
// Save the found git path, but let things settle first to not impact startup performance
setTimeout(() => void this.container.storage.storeWorkspace('gitPath', location.path), 1000);
if (scope != null) {
setLogScopeExit(
scope,
` ${GlyphChars.Dot} Git (${location.version}) found in ${
location.path === 'git' ? 'PATH' : location.path
}`,
);
} else {
Logger.log(
scope,
`Git (${location.version}) found in ${
location.path === 'git' ? 'PATH' : location.path
} [${getDurationMilliseconds(start)}ms]`,
);
}
// Warn if git is less than v2.7.2
if (compare(fromString(location.version), fromString('2.7.2')) === -1) {
Logger.log(scope, `Git version (${location.version}) is outdated`);
void showGitVersionUnsupportedErrorMessage(location.version, '2.7.2');
}
return location;
}
@debug({ exit: true })
async discoverRepositories(
uri: Uri,
options?: { cancellation?: CancellationToken; depth?: number; silent?: boolean },
): Promise<Repository[]> {
if (uri.scheme !== Schemes.File) return [];
try {
const autoRepositoryDetection = configuration.getCore('git.autoRepositoryDetection') ?? true;
const folder = workspace.getWorkspaceFolder(uri);
if (folder == null && !options?.silent) return [];
void (await this.ensureGit());
if (options?.cancellation?.isCancellationRequested) return [];
const repositories = await this.repositorySearch(
folder ?? uri,
options?.depth ??
(autoRepositoryDetection === false || autoRepositoryDetection === 'openEditors' ? 0 : undefined),
options?.cancellation,
options?.silent,
);
if (!options?.silent && (autoRepositoryDetection === true || autoRepositoryDetection === 'subFolders')) {
for (const repository of repositories) {
void this.getOrOpenScmRepository(repository.uri);
}
}
if (!options?.silent && repositories.length > 0) {
this._trackedPaths.clear();
}
return repositories;
} catch (ex) {
if (ex instanceof InvalidGitConfigError) {
void showGitInvalidConfigErrorMessage();
} else if (ex instanceof UnableToFindGitError) {
void showGitMissingErrorMessage();
} else {
const msg: string = ex?.message ?? '';
if (msg && !options?.silent) {
void window.showErrorMessage(`Unable to initialize Git; ${msg}`);
}
}
throw ex;
}
}
@debug({ exit: true })
openRepository(
folder: WorkspaceFolder | undefined,
uri: Uri,
root: boolean,
suspended?: boolean,
closed?: boolean,
): Repository[] {
if (!closed) {
void this.getOrOpenScmRepository(uri);
}
const opened = [
new Repository(
this.container,
this.onRepositoryChanged.bind(this),
this.descriptor,
folder ?? workspace.getWorkspaceFolder(uri),
uri,
root,
suspended ?? !window.state.focused,
closed,
),
];
// Add a closed (hidden) repository for the canonical version if not already opened
const canonicalUri = this.toCanonicalMap.get(getBestPath(uri));
if (canonicalUri != null && this.container.git.getRepository(canonicalUri) == null) {
opened.push(
new Repository(
this.container,
this.onRepositoryChanged.bind(this),
this.descriptor,
folder ?? workspace.getWorkspaceFolder(canonicalUri),
canonicalUri,
root,
suspended ?? !window.state.focused,
true,
),
);
}
return opened;
}
@debug({ singleLine: true })
openRepositoryInitWatcher(): RepositoryInitWatcher {
const watcher = workspace.createFileSystemWatcher('**/.git', false, true, true);
return {
onDidCreate: watcher.onDidCreate,
dispose: () => void watcher.dispose(),
};
}
private _supportedFeatures = new Map<Features, boolean>();
async supports(feature: Features): Promise<boolean> {
let supported = this._supportedFeatures.get(feature);
if (supported != null) return supported;
switch (feature) {
case Features.Worktrees:
supported = await this.git.isAtLeastVersion('2.17.0');
this._supportedFeatures.set(feature, supported);
return supported;
case Features.StashOnlyStaged:
supported = await this.git.isAtLeastVersion('2.35.0');
this._supportedFeatures.set(feature, supported);
return supported;
case Features.ForceIfIncludes:
supported = await this.git.isAtLeastVersion('2.30.0');
this._supportedFeatures.set(feature, supported);
return supported;
default:
return true;
}
}
@debug<LocalGitProvider['visibility']>({ exit: r => `returned ${r[0]}` })
async visibility(repoPath: string): Promise<[visibility: RepositoryVisibility, cacheKey: string | undefined]> {
const remotes = await this.getRemotes(repoPath, { sort: true });
if (remotes.length === 0) return ['local', undefined];
let local = true;
for await (const result of asSettled(remotes.map(r => this.getRemoteVisibility(r)))) {
if (result.status !== 'fulfilled') continue;
if (result.value[0] === 'public') {
return ['public', getVisibilityCacheKey(result.value[1])];
}
if (result.value[0] !== 'local') {
local = false;
}
}
return local ? ['local', undefined] : ['private', getVisibilityCacheKey(remotes)];
}
private _pendingRemoteVisibility = new Map<string, ReturnType<typeof fetch>>();
@debug<LocalGitProvider['getRemoteVisibility']>({ args: { 0: r => r.url }, exit: r => `returned ${r[0]}` })
private async getRemoteVisibility(
remote: GitRemote,
): Promise<[visibility: RepositoryVisibility, remote: GitRemote]> {
const scope = getLogScope();
let url;
switch (remote.provider?.id) {
case 'github':
case 'gitlab':
case 'bitbucket':
case 'azure-devops':
case 'gitea':
case 'gerrit':
case 'google-source':
url = remote.provider.url({ type: RemoteResourceType.Repo });
if (url == null) return ['private', remote];
break;
default: {
url = remote.url;
if (!url.includes('git@')) {
return maybeUri(url) ? ['private', remote] : ['local', remote];
}
const [host, repo] = url.split('@')[1].split(':');
if (!host || !repo) return ['private', remote];
url = `path_to_url{host}/${repo}`;
}
}
// Check if the url returns a 200 status code
let promise = this._pendingRemoteVisibility.get(url);
if (promise == null) {
const aborter = new AbortController();
const timer = setTimeout(() => aborter.abort(), 30000);
promise = fetch(url, { method: 'HEAD', agent: getProxyAgent(), signal: aborter.signal });
void promise.finally(() => clearTimeout(timer));
this._pendingRemoteVisibility.set(url, promise);
}
try {
const rsp = await promise;
if (rsp.ok) return ['public', remote];
Logger.debug(scope, `Response=${rsp.status}`);
} catch (ex) {
debugger;
Logger.error(ex, scope);
} finally {
this._pendingRemoteVisibility.delete(url);
}
return ['private', remote];
}
@log<LocalGitProvider['repositorySearch']>({
args: false,
singleLine: true,
prefix: (context, folder) => `${context.prefix}(${(folder instanceof Uri ? folder : folder.uri).fsPath})`,
exit: r => `returned ${r.length} repositories ${r.length !== 0 ? Logger.toLoggable(r) : ''}`,
})
private async repositorySearch(
folderOrUri: Uri | WorkspaceFolder,
depth?: number,
cancellation?: CancellationToken,
silent?: boolean | undefined,
): Promise<Repository[]> {
const scope = getLogScope();
let folder;
let rootUri;
if (folderOrUri instanceof Uri) {
rootUri = folderOrUri;
folder = workspace.getWorkspaceFolder(rootUri);
} else {
rootUri = folderOrUri.uri;
}
depth =
depth ??
configuration.get('advanced.repositorySearchDepth', rootUri) ??
configuration.getCore('git.repositoryScanMaxDepth', rootUri, 1);
Logger.log(scope, `searching (depth=${depth})...`);
const repositories: Repository[] = [];
let rootPath;
let canonicalRootPath;
const uri = await this.findRepositoryUri(rootUri, true);
if (uri != null) {
rootPath = normalizePath(uri.fsPath);
const canonicalUri = this.toCanonicalMap.get(getBestPath(uri));
if (canonicalUri != null) {
canonicalRootPath = normalizePath(canonicalUri.fsPath);
}
Logger.log(scope, `found root repository in '${uri.fsPath}'`);
repositories.push(...this.openRepository(folder, uri, true, undefined, silent));
}
if (depth <= 0 || cancellation?.isCancellationRequested) return repositories;
// Get any specified excludes -- this is a total hack, but works for some simple cases and something is better than nothing :)
const excludes = new Set<string>(configuration.getCore('git.repositoryScanIgnoredFolders', rootUri, []));
for (let [key, value] of Object.entries({
...configuration.getCore('files.exclude', rootUri, {}),
...configuration.getCore('search.exclude', rootUri, {}),
})) {
if (!value) continue;
if (key.includes('*.')) continue;
if (key.startsWith('**/')) {
key = key.substring(3);
}
excludes.add(key);
}
let repoPaths;
try {
repoPaths = await this.repositorySearchCore(rootUri.fsPath, depth, excludes, cancellation);
} catch (ex) {
const msg: string = ex?.toString() ?? '';
if (RepoSearchWarnings.doesNotExist.test(msg)) {
Logger.log(scope, `FAILED${msg ? ` Error: ${msg}` : ''}`);
} else {
Logger.error(ex, scope, 'FAILED');
}
return repositories;
}
for (let p of repoPaths) {
p = dirname(p);
const normalized = normalizePath(p);
// If we are the same as the root, skip it
if (
(isLinux &&
(normalized === rootPath || (canonicalRootPath != null && normalized === canonicalRootPath))) ||
equalsIgnoreCase(normalized, rootPath) ||
(canonicalRootPath != null && equalsIgnoreCase(normalized, canonicalRootPath))
) {
continue;
}
Logger.log(scope, `searching in '${p}'...`);
Logger.debug(
scope,
`normalizedRepoPath=${normalized}, rootPath=${rootPath}, canonicalRootPath=${canonicalRootPath}`,
);
const rp = await this.findRepositoryUri(Uri.file(p), true);
if (rp == null) continue;
Logger.log(scope, `found repository in '${rp.fsPath}'`);
repositories.push(...this.openRepository(folder, rp, false, undefined, silent));
}
return repositories;
}
@debug<LocalGitProvider['repositorySearchCore']>({ args: { 2: false, 3: false }, exit: true })
private repositorySearchCore(
root: string,
depth: number,
excludes: Set<string>,
cancellation?: CancellationToken,
repositories: string[] = [],
): Promise<string[]> {
const scope = getLogScope();
if (cancellation?.isCancellationRequested) return Promise.resolve(repositories);
return new Promise<string[]>((resolve, reject) => {
readdir(root, { withFileTypes: true }, async (err, files) => {
if (err != null) {
reject(err);
return;
}
if (files.length === 0) {
resolve(repositories);
return;
}
depth--;
let f;
for (f of files) {
if (cancellation?.isCancellationRequested) break;
if (f.name === '.git') {
repositories.push(resolvePath(root, f.name));
} else if (depth >= 0 && f.isDirectory() && !excludes.has(f.name)) {
try {
await this.repositorySearchCore(
resolvePath(root, f.name),
depth,
excludes,
cancellation,
repositories,
);
} catch (ex) {
Logger.error(ex, scope, 'FAILED');
}
}
}
resolve(repositories);
});
});
}
canHandlePathOrUri(scheme: string, pathOrUri: string | Uri): string | undefined {
if (!this.supportedSchemes.has(scheme)) return undefined;
return getBestPath(pathOrUri);
}
getAbsoluteUri(pathOrUri: string | Uri, base: string | Uri): Uri {
// Convert the base to a Uri if it isn't one
if (typeof base === 'string') {
// If it looks like a Uri parse it
if (maybeUri(base)) {
base = Uri.parse(base, true);
} else {
if (!isAbsolute(base)) {
debugger;
void window.showErrorMessage(
`Unable to get absolute uri between ${
typeof pathOrUri === 'string' ? pathOrUri : pathOrUri.toString(true)
} and ${base}; Base path '${base}' must be an absolute path`,
);
throw new Error(`Base path '${base}' must be an absolute path`);
}
base = Uri.file(base);
}
}
// Short-circuit if the path is relative
if (typeof pathOrUri === 'string') {
const normalized = normalizePath(pathOrUri);
if (!isAbsolute(normalized)) return Uri.joinPath(base, normalized);
}
const relativePath = this.getRelativePath(pathOrUri, base);
return Uri.joinPath(base, relativePath);
}
@log({ exit: true })
async getBestRevisionUri(repoPath: string, path: string, ref: string | undefined): Promise<Uri | undefined> {
if (ref === deletedOrMissing) return undefined;
// TODO@eamodio Align this with isTrackedCore?
if (!ref || (isUncommitted(ref) && !isUncommittedStaged(ref))) {
// Make sure the file exists in the repo
let data = await this.git.ls_files(repoPath, path);
if (data != null) return this.getAbsoluteUri(path, repoPath);
// Check if the file exists untracked
data = await this.git.ls_files(repoPath, path, { untracked: true });
if (data != null) return this.getAbsoluteUri(path, repoPath);
return undefined;
}
// If the ref is the index, then try to create a Uri using the Git extension, but if we can't find a repo for it, then generate our own Uri
if (isUncommittedStaged(ref)) {
let scmRepo = await this.getScmRepository(repoPath);
if (scmRepo == null) {
// If the repoPath is a canonical path, then we need to remap it to the real path, because the vscode.git extension always uses the real path
const realUri = this.fromCanonicalMap.get(repoPath);
if (realUri != null) {
scmRepo = await this.getScmRepository(realUri.fsPath);
}
}
if (scmRepo != null) {
return this.getScmGitUri(path, repoPath);
}
}
return this.getRevisionUri(repoPath, path, ref);
}
getRelativePath(pathOrUri: string | Uri, base: string | Uri): string {
// Convert the base to a Uri if it isn't one
if (typeof base === 'string') {
// If it looks like a Uri parse it
if (maybeUri(base)) {
base = Uri.parse(base, true);
} else {
if (!isAbsolute(base)) {
debugger;
void window.showErrorMessage(
`Unable to get relative path between ${
typeof pathOrUri === 'string' ? pathOrUri : pathOrUri.toString(true)
} and ${base}; Base path '${base}' must be an absolute path`,
);
throw new Error(`Base path '${base}' must be an absolute path`);
}
base = Uri.file(base);
}
}
// Convert the path to a Uri if it isn't one
if (typeof pathOrUri === 'string') {
if (maybeUri(pathOrUri)) {
pathOrUri = Uri.parse(pathOrUri, true);
} else {
if (!isAbsolute(pathOrUri)) return normalizePath(pathOrUri);
pathOrUri = Uri.file(pathOrUri);
}
}
const relativePath = relative(base.fsPath, pathOrUri.fsPath);
return normalizePath(relativePath);
}
getRevisionUri(repoPath: string, path: string, ref: string): Uri {
if (isUncommitted(ref) && !isUncommittedStaged(ref)) return this.getAbsoluteUri(path, repoPath);
let uncPath;
path = normalizePath(this.getAbsoluteUri(path, repoPath).fsPath);
if (path.startsWith('//')) {
// save the UNC part of the path so we can re-add it later
const index = path.indexOf('/', 2);
uncPath = path.substring(0, index);
path = path.substring(index);
}
if (path.charCodeAt(0) !== slash) {
path = `/${path}`;
}
const metadata: RevisionUriData = {
ref: ref,
repoPath: normalizePath(repoPath),
uncPath: uncPath,
};
const uri = Uri.from({
scheme: Schemes.GitLens,
authority: encodeGitLensRevisionUriAuthority(metadata),
path: path,
// Replace `/` with `\u2009\u2215\u2009` so that it doesn't get treated as part of the path of the file
query: ref
? JSON.stringify({ ref: shortenRevision(ref).replaceAll('/', '\u2009\u2215\u2009') })
: undefined,
});
return uri;
}
@log({ exit: true })
async getWorkingUri(repoPath: string, uri: Uri) {
let relativePath = this.getRelativePath(uri, repoPath);
let data;
let ref;
do {
data = await this.git.ls_files(repoPath, relativePath);
if (data != null) {
relativePath = splitSingle(data, '\n')[0];
break;
}
// TODO: Add caching
const cfg = configuration.get('advanced');
// Get the most recent commit for this file name
ref = await this.git.log__file_recent(repoPath, relativePath, {
ordering: cfg.commitOrdering,
similarityThreshold: cfg.similarityThreshold,
});
if (ref == null) return undefined;
// Now check if that commit had any renames
data = await this.git.log__file(repoPath, '.', ref, {
argsOrFormat: parseGitLogSimpleFormat,
fileMode: 'simple',
filters: ['R', 'C', 'D'],
limit: 1,
ordering: cfg.commitOrdering,
});
if (data == null || data.length === 0) break;
const [foundRef, foundFile, foundStatus] = parseGitLogSimpleRenamed(data, relativePath);
if (foundStatus === 'D' && foundFile != null) return undefined;
if (foundRef == null || foundFile == null) break;
relativePath = foundFile;
} while (true);
uri = this.getAbsoluteUri(relativePath, repoPath);
return (await fsExists(uri.fsPath)) ? uri : undefined;
}
@log()
async addRemote(repoPath: string, name: string, url: string, options?: { fetch?: boolean }): Promise<void> {
await this.git.remote__add(repoPath, name, url, options);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['remotes'] });
}
@log()
async pruneRemote(repoPath: string, name: string): Promise<void> {
await this.git.remote__prune(repoPath, name);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['remotes'] });
}
@log()
async removeRemote(repoPath: string, name: string): Promise<void> {
await this.git.remote__remove(repoPath, name);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['remotes'] });
}
@log()
async applyChangesToWorkingFile(uri: GitUri, ref1?: string, ref2?: string) {
const scope = getLogScope();
ref1 = ref1 ?? uri.sha;
if (ref1 == null || uri.repoPath == null) return;
if (ref2 == null) {
ref2 = ref1;
ref1 = `${ref1}^`;
}
const [relativePath, root] = splitPath(uri, uri.repoPath);
let patch;
try {
patch = await this.git.diff(root, relativePath, ref1, ref2);
void (await this.git.apply(root, patch));
} catch (ex) {
const msg: string = ex?.toString() ?? '';
if (patch && /patch does not apply/i.test(msg)) {
const result = await window.showWarningMessage(
'Unable to apply changes cleanly. Retry and allow conflicts?',
{ title: 'Yes' },
{ title: 'No', isCloseAffordance: true },
);
if (result == null || result.title !== 'Yes') return;
if (result.title === 'Yes') {
try {
void (await this.git.apply(root, patch, { allowConflicts: true }));
return;
} catch (e) {
// eslint-disable-next-line no-ex-assign
ex = e;
}
}
}
Logger.error(ex, scope);
void showGenericErrorMessage('Unable to apply changes');
}
}
@log()
async applyUnreachableCommitForPatch(
repoPath: string,
ref: string,
options?: {
branchName?: string;
createBranchIfNeeded?: boolean;
createWorktreePath?: string;
stash?: boolean | 'prompt';
},
): Promise<void> {
const scope = getLogScope();
if (options?.stash) {
// Stash any changes first
const status = await this.getStatusForRepo(repoPath);
if (status?.files?.length) {
if (options.stash === 'prompt') {
const confirm = { title: 'Stash Changes' };
const cancel = { title: 'Cancel', isCloseAffordance: true };
const result = await window.showWarningMessage(
'You have changes in your working tree.\nDo you want to stash them before applying the patch?',
{ modal: true },
confirm,
cancel,
);
if (result !== confirm) throw new CancellationError();
}
try {
await this.git.stash__push(repoPath, undefined, { includeUntracked: true });
} catch (ex) {
Logger.error(ex, scope);
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.StashFailed,
`Unable to apply patch; failed stashing working changes changes${
ex instanceof StashPushError ? `: ${ex.message}` : ''
}`,
ex,
);
}
}
}
let targetPath = repoPath;
const currentBranch = await this.getBranch(repoPath);
const branchExists =
options?.branchName == null ||
currentBranch?.name === options.branchName ||
(await this.getBranches(repoPath, { filter: b => b.name === options.branchName }))?.values?.length > 0;
const shouldCreate = options?.branchName != null && !branchExists && options.createBranchIfNeeded;
// TODO: Worktree creation should ideally be handled before calling this, and then
// applyPatchCommit should be pointing to the worktree path. If done here, the newly created
// worktree cannot be opened and we cannot handle issues elegantly.
if (options?.createWorktreePath != null) {
if (options?.branchName === null || options.branchName === currentBranch?.name) {
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.CreateWorktreeFailed,
'Unable to apply patch; failed creating worktree',
);
}
try {
await this.createWorktree(repoPath, options.createWorktreePath, {
commitish: options?.branchName != null && branchExists ? options.branchName : currentBranch?.name,
createBranch: shouldCreate ? options.branchName : undefined,
});
} catch (ex) {
Logger.error(ex, scope);
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.CreateWorktreeFailed,
`Unable to apply patch; failed creating worktree${
ex instanceof WorktreeCreateError ? `: ${ex.message}` : ''
}`,
ex,
);
}
const worktree = await this.container.git.getWorktree(
repoPath,
w => normalizePath(w.uri.fsPath) === normalizePath(options.createWorktreePath!),
);
if (worktree == null) {
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.CreateWorktreeFailed,
'Unable to apply patch; failed creating worktree',
);
}
targetPath = worktree.uri.fsPath;
}
if (options?.branchName != null && currentBranch?.name !== options.branchName) {
const checkoutRef = shouldCreate ? currentBranch?.ref ?? 'HEAD' : options.branchName;
await this.checkout(targetPath, checkoutRef, {
createBranch: shouldCreate ? options.branchName : undefined,
});
}
// Apply the patch using a cherry pick without committing
try {
await this.git.cherrypick(targetPath, ref, { noCommit: true, errors: GitErrorHandling.Throw });
} catch (ex) {
Logger.error(ex, scope);
if (ex instanceof CherryPickError) {
if (ex.reason === CherryPickErrorReason.Conflicts) {
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.AppliedWithConflicts,
`Patch applied with conflicts`,
ex,
);
}
if (ex.reason === CherryPickErrorReason.AbortedWouldOverwrite) {
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.ApplyAbortedWouldOverwrite,
`Unable to apply patch as some local changes would be overwritten`,
ex,
);
}
}
throw new ApplyPatchCommitError(
ApplyPatchCommitErrorReason.ApplyFailed,
`Unable to apply patch${ex instanceof CherryPickError ? `: ${ex.message}` : ''}`,
ex,
);
}
}
@log()
async checkout(
repoPath: string,
ref: string,
options?: { createBranch?: string } | { path?: string },
): Promise<void> {
const scope = getLogScope();
try {
await this.git.checkout(repoPath, ref, options);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['branches', 'status'] });
} catch (ex) {
const msg: string = ex?.toString() ?? '';
if (/overwritten by checkout/i.test(msg)) {
void showGenericErrorMessage(
`Unable to checkout '${ref}'. Please commit or stash your changes before switching branches`,
);
return;
}
Logger.error(ex, scope);
void showGenericErrorMessage(`Unable to checkout '${ref}'`);
}
}
@log()
async clone(url: string, parentPath: string): Promise<string | undefined> {
const scope = getLogScope();
try {
return this.git.clone(url, parentPath);
} catch (ex) {
Logger.error(ex, scope);
void showGenericErrorMessage(`Unable to clone '${url}'`);
}
return undefined;
}
@log({ args: { 1: '<contents>', 3: '<message>' } })
async createUnreachableCommitForPatch(
repoPath: string,
contents: string,
baseRef: string,
message: string,
): Promise<GitCommit | undefined> {
const scope = getLogScope();
if (!contents.endsWith('\n')) {
contents += '\n';
}
// Create a temporary index file
const tempDir = await fs.mkdtemp(path.join(tmpdir(), 'gl-'));
const tempIndex = joinPaths(tempDir, 'index');
try {
// Tell Git to use our soon to be created index file
const env = { GIT_INDEX_FILE: tempIndex };
// Create the temp index file from a base ref/sha
// Get the tree of the base
const newIndex = await this.git.git<string>(
{
cwd: repoPath,
env: env,
},
'ls-tree',
'-z',
'-r',
'--full-name',
baseRef,
);
// Write the tree to our temp index
await this.git.git<string>(
{
cwd: repoPath,
env: env,
stdin: newIndex,
},
'update-index',
'-z',
'--index-info',
);
// Apply the patch to our temp index, without touching the working directory
await this.git.apply2(repoPath, { env: env, stdin: contents }, '--cached');
// Create a new tree from our patched index
const tree = (
await this.git.git<string>(
{
cwd: repoPath,
env: env,
},
'write-tree',
)
)?.trim();
// Create new commit from the tree
const sha = (
await this.git.git<string>(
{
cwd: repoPath,
env: env,
},
'commit-tree',
tree,
'-p',
baseRef,
'-m',
message,
)
)?.trim();
return this.getCommit(repoPath, sha);
} catch (ex) {
Logger.error(ex, scope);
debugger;
throw ex;
} finally {
// Delete the temporary index file
try {
await fs.rm(tempDir, { recursive: true });
} catch (ex) {
debugger;
}
}
}
@log({ singleLine: true })
private resetCaches(repoPath: string | undefined, ...caches: GitCaches[]) {
const cachesToClear = [];
if (!caches.length || caches.includes('branches')) {
cachesToClear.push(this._branchesCache);
}
if (!caches.length || caches.includes('contributors')) {
cachesToClear.push(this._contributorsCache);
}
if (!caches.length || caches.includes('remotes')) {
cachesToClear.push(this._remotesCache);
}
if (!caches.length || caches.includes('stashes')) {
cachesToClear.push(this._stashesCache);
}
if (!caches.length || caches.includes('status')) {
cachesToClear.push(this._mergeStatusCache, this._rebaseStatusCache);
}
if (!caches.length || caches.includes('tags')) {
cachesToClear.push(this._tagsCache);
}
if (!caches.length || caches.includes('worktrees')) {
cachesToClear.push(this._worktreesCache);
}
if (!caches.length) {
cachesToClear.push(this._trackedPaths, this._repoInfoCache);
}
for (const cache of cachesToClear) {
if (repoPath != null) {
cache.delete(repoPath);
} else {
cache.clear();
}
}
}
@log<LocalGitProvider['excludeIgnoredUris']>({ args: { 1: uris => uris.length } })
async excludeIgnoredUris(repoPath: string, uris: Uri[]): Promise<Uri[]> {
const paths = new Map<string, Uri>(uris.map(u => [normalizePath(u.fsPath), u]));
const data = await this.git.check_ignore(repoPath, ...paths.keys());
if (data == null) return uris;
const ignored = data.split('\0').filter(<T>(i?: T): i is T => Boolean(i));
if (ignored.length === 0) return uris;
for (const file of ignored) {
paths.delete(file);
}
return [...paths.values()];
}
@gate()
@log()
async fetch(
repoPath: string,
options?: { all?: boolean; branch?: GitBranchReference; prune?: boolean; pull?: boolean; remote?: string },
): Promise<void> {
const scope = getLogScope();
const { branch, ...opts } = options ?? {};
try {
if (isBranchReference(branch)) {
const [branchName, remoteName] = getBranchNameAndRemote(branch);
if (remoteName == null) return undefined;
await this.git.fetch(repoPath, {
branch: branchName,
remote: remoteName,
upstream: getBranchTrackingWithoutRemote(branch)!,
pull: options?.pull,
});
} else {
await this.git.fetch(repoPath, opts);
}
this.container.events.fire('git:cache:reset', { repoPath: repoPath });
} catch (ex) {
Logger.error(ex, scope);
if (!FetchError.is(ex)) throw ex;
void window.showErrorMessage(ex.message);
}
}
@gate()
@log()
async push(
repoPath: string,
options?: { reference?: GitReference; force?: boolean; publish?: { remote: string } },
): Promise<void> {
const scope = getLogScope();
let branchName: string;
let remoteName: string | undefined;
let upstreamName: string | undefined;
let setUpstream:
| {
branch: string;
remote: string;
remoteBranch: string;
}
| undefined;
if (isBranchReference(options?.reference)) {
if (options.publish != null) {
branchName = options.reference.name;
remoteName = options.publish.remote;
} else {
[branchName, remoteName] = getBranchNameAndRemote(options.reference);
}
upstreamName = getBranchTrackingWithoutRemote(options.reference);
} else {
const branch = await this.getBranch(repoPath);
if (branch == null) return;
branchName =
options?.reference != null
? `${options.reference.ref}:${
options?.publish != null ? 'refs/heads/' : ''
}${branch.getNameWithoutRemote()}`
: branch.name;
remoteName = branch.getRemoteName() ?? options?.publish?.remote;
upstreamName = options?.reference == null && options?.publish != null ? branch.name : undefined;
// Git can't setup remote tracking when publishing a new branch to a specific commit, so we'll need to do it after the push
if (options?.publish?.remote != null && options?.reference != null) {
setUpstream = {
branch: branch.getNameWithoutRemote(),
remote: remoteName!,
remoteBranch: branch.getNameWithoutRemote(),
};
}
}
if (options?.publish == null && remoteName == null && upstreamName == null) {
debugger;
throw new PushError(PushErrorReason.Other);
}
let forceOpts: PushForceOptions | undefined;
if (options?.force) {
const withLease = configuration.getCore('git.useForcePushWithLease') ?? true;
if (withLease) {
forceOpts = {
withLease: withLease,
ifIncludes: configuration.getCore('git.useForcePushIfIncludes') ?? true,
};
} else {
forceOpts = {
withLease: withLease,
};
}
}
try {
await this.git.push(repoPath, {
branch: branchName,
remote: remoteName,
upstream: upstreamName,
force: forceOpts,
publish: options?.publish != null,
});
// Since Git can't setup remote tracking when publishing a new branch to a specific commit, do it now
if (setUpstream != null) {
await this.git.branch__set_upstream(
repoPath,
setUpstream.branch,
setUpstream.remote,
setUpstream.remoteBranch,
);
}
this.container.events.fire('git:cache:reset', { repoPath: repoPath });
} catch (ex) {
Logger.error(ex, scope);
if (!PushError.is(ex)) throw ex;
void window.showErrorMessage(ex.message);
}
}
@gate()
@log()
async pull(repoPath: string, options?: { rebase?: boolean; tags?: boolean }): Promise<void> {
const scope = getLogScope();
try {
await this.git.pull(repoPath, {
rebase: options?.rebase,
tags: options?.tags,
});
this.container.events.fire('git:cache:reset', { repoPath: repoPath });
} catch (ex) {
Logger.error(ex, scope);
if (!PullError.is(ex)) throw ex;
void window.showErrorMessage(ex.message);
}
}
private readonly toCanonicalMap = new Map<string, Uri>();
private readonly fromCanonicalMap = new Map<string, Uri>();
protected readonly unsafePaths = new Set<string>();
@gate()
@debug({ exit: true })
async findRepositoryUri(uri: Uri, isDirectory?: boolean): Promise<Uri | undefined> {
const scope = getLogScope();
let repoPath: string | undefined;
try {
if (isDirectory == null) {
const stats = await workspace.fs.stat(uri);
isDirectory = (stats.type & FileType.Directory) === FileType.Directory;
}
// If the uri isn't a directory, go up one level
if (!isDirectory) {
uri = Uri.joinPath(uri, '..');
}
let safe;
[safe, repoPath] = await this.git.rev_parse__show_toplevel(uri.fsPath);
if (safe) {
this.unsafePaths.delete(uri.fsPath);
} else if (safe === false) {
this.unsafePaths.add(uri.fsPath);
}
if (!repoPath) return undefined;
const repoUri = Uri.file(repoPath);
// On Git 2.25+ if you call `rev-parse --show-toplevel` on a mapped drive, instead of getting the mapped drive path back, you get the UNC path for the mapped drive.
// So try to normalize it back to the mapped drive path, if possible
if (isWindows && repoUri.authority.length !== 0 && uri.authority.length === 0) {
const match = driveLetterRegex.exec(uri.path);
if (match != null) {
const [, letter] = match;
try {
const networkPath = await new Promise<string | undefined>(resolve =>
realpath.native(`${letter}:\\`, { encoding: 'utf8' }, (err, resolvedPath) =>
resolve(err != null ? undefined : resolvedPath),
),
);
if (networkPath != null) {
// If the repository is at the root of the mapped drive then we
// have to append `\` (ex: D:\) otherwise the path is not valid.
const isDriveRoot = pathEquals(repoUri.fsPath, networkPath);
repoPath = normalizePath(
repoUri.fsPath.replace(
networkPath,
`${letter.toLowerCase()}:${isDriveRoot || networkPath.endsWith('\\') ? '\\' : ''}`,
),
);
return Uri.file(repoPath);
}
} catch {}
}
return Uri.file(normalizePath(uri.fsPath));
}
// Check if we are a symlink and if so, use the symlink path (not its resolved path)
// This is because VS Code will provide document Uris using the symlinked path
const canonicalUri = this.toCanonicalMap.get(repoPath);
if (canonicalUri == null) {
let symlink;
[repoPath, symlink] = await new Promise<[string, string | undefined]>(resolve => {
realpath(uri.fsPath, { encoding: 'utf8' }, (err, resolvedPath) => {
if (err != null) {
Logger.debug(scope, `fs.realpath failed; repoPath=${repoPath}`);
resolve([repoPath!, undefined]);
return;
}
if (pathEquals(uri.fsPath, resolvedPath)) {
Logger.debug(scope, `No symlink detected; repoPath=${repoPath}`);
resolve([repoPath!, undefined]);
return;
}
let linkPath = normalizePath(resolvedPath);
const index = commonBaseIndex(`${repoPath}/`, `${linkPath}/`, '/');
const uriPath = normalizePath(uri.fsPath);
if (index < linkPath.length - 1) {
linkPath = uriPath.substring(0, uriPath.length - (linkPath.length - index));
} else {
linkPath = uriPath;
}
Logger.debug(
scope,
`Symlink detected; repoPath=${repoPath}, path=${uri.fsPath}, resolvedPath=${resolvedPath}`,
);
resolve([repoPath!, linkPath]);
});
});
// If we found a symlink, keep track of the mappings
if (symlink != null) {
this.toCanonicalMap.set(repoPath, Uri.file(symlink));
this.fromCanonicalMap.set(symlink, Uri.file(repoPath));
}
}
return repoPath ? Uri.file(repoPath) : undefined;
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
@log()
getLeftRightCommitCount(
repoPath: string,
range: GitRevisionRange,
options?: { authors?: GitUser[] | undefined; excludeMerges?: boolean },
): Promise<LeftRightCommitCountResult | undefined> {
return this.git.rev_list__left_right(repoPath, range, options?.authors, options?.excludeMerges);
}
@gate<LocalGitProvider['getBlame']>((u, d) => `${u.toString()}|${d?.isDirty}`)
@log<LocalGitProvider['getBlame']>({ args: { 1: d => d?.isDirty } })
async getBlame(uri: GitUri, document?: TextDocument | undefined): Promise<GitBlame | undefined> {
const scope = getLogScope();
if (document?.isDirty) return this.getBlameContents(uri, document.getText());
let key = 'blame';
if (uri.sha != null) {
key += `:${uri.sha}`;
}
const doc = await this.container.documentTracker.getOrAdd(document ?? uri);
if (this.useCaching) {
if (doc.state != null) {
const cachedBlame = doc.state.getBlame(key);
if (cachedBlame != null) {
Logger.debug(scope, `Cache hit: '${key}'`);
return cachedBlame.item;
}
}
Logger.debug(scope, `Cache miss: '${key}'`);
doc.state ??= new GitDocumentState();
}
const promise = this.getBlameCore(uri, doc, key, scope);
if (doc.state != null) {
Logger.debug(scope, `Cache add: '${key}'`);
const value: CachedBlame = {
item: promise as Promise<GitBlame>,
};
doc.state.setBlame(key, value);
}
return promise;
}
private async getBlameCore(
uri: GitUri,
document: TrackedGitDocument,
key: string,
scope: LogScope | undefined,
): Promise<GitBlame | undefined> {
const paths = await this.isTrackedWithDetails(uri);
if (paths == null) {
Logger.log(scope, `Skipping blame; '${uri.fsPath}' is not tracked`);
return emptyPromise as Promise<GitBlame>;
}
const [relativePath, root] = paths;
try {
const [dataResult, userResult, statResult] = await Promise.allSettled([
this.git.blame(root, relativePath, {
ref: uri.sha,
args: configuration.get('advanced.blame.customArguments'),
ignoreWhitespace: configuration.get('blame.ignoreWhitespace'),
}),
this.getCurrentUser(root),
workspace.fs.stat(uri),
]);
const blame = parseGitBlame(
this.container,
root,
getSettledValue(dataResult),
getSettledValue(userResult),
getSettledValue(statResult)?.mtime,
);
return blame;
} catch (ex) {
Logger.error(ex, scope);
// Trap and cache expected blame errors
if (document.state != null) {
const msg = ex?.toString() ?? '';
Logger.debug(scope, `Cache replace (with empty promise): '${key}'; reason=${msg}`);
const value: CachedBlame = {
item: emptyPromise as Promise<GitBlame>,
errorMessage: msg,
};
document.state.setBlame(key, value);
document.setBlameFailure(ex);
if (ex instanceof BlameIgnoreRevsFileError || ex instanceof BlameIgnoreRevsFileBadRevisionError) {
void showBlameInvalidIgnoreRevsFileWarningMessage(ex);
}
return emptyPromise as Promise<GitBlame>;
}
return undefined;
}
}
@log<LocalGitProvider['getBlameContents']>({ args: { 1: '<contents>' } })
async getBlameContents(uri: GitUri, contents: string): Promise<GitBlame | undefined> {
const scope = getLogScope();
const key = `blame:${md5(contents)}`;
const doc = await this.container.documentTracker.getOrAdd(uri);
if (this.useCaching) {
if (doc.state != null) {
const cachedBlame = doc.state.getBlame(key);
if (cachedBlame != null) {
Logger.debug(scope, `Cache hit: ${key}`);
return cachedBlame.item;
}
}
Logger.debug(scope, `Cache miss: ${key}`);
doc.state ??= new GitDocumentState();
}
const promise = this.getBlameContentsCore(uri, contents, doc, key, scope);
if (doc.state != null) {
Logger.debug(scope, `Cache add: '${key}'`);
const value: CachedBlame = {
item: promise as Promise<GitBlame>,
};
doc.state.setBlame(key, value);
}
return promise;
}
private async getBlameContentsCore(
uri: GitUri,
contents: string,
document: TrackedGitDocument,
key: string,
scope: LogScope | undefined,
): Promise<GitBlame | undefined> {
const paths = await this.isTrackedWithDetails(uri);
if (paths == null) {
Logger.log(scope, `Skipping blame; '${uri.fsPath}' is not tracked`);
return emptyPromise as Promise<GitBlame>;
}
const [relativePath, root] = paths;
try {
const [dataResult, userResult, statResult] = await Promise.allSettled([
this.git.blame(root, relativePath, {
contents: contents,
args: configuration.get('advanced.blame.customArguments'),
correlationKey: `:${key}`,
ignoreWhitespace: configuration.get('blame.ignoreWhitespace'),
}),
this.getCurrentUser(root),
workspace.fs.stat(uri),
]);
const blame = parseGitBlame(
this.container,
root,
getSettledValue(dataResult),
getSettledValue(userResult),
getSettledValue(statResult)?.mtime,
);
return blame;
} catch (ex) {
Logger.error(ex, scope);
// Trap and cache expected blame errors
if (document.state != null) {
const msg = ex?.toString() ?? '';
Logger.debug(scope, `Cache replace (with empty promise): '${key}'; reason=${msg}`);
const value: CachedBlame = {
item: emptyPromise as Promise<GitBlame>,
errorMessage: msg,
};
document.state.setBlame(key, value);
document.setBlameFailure(ex);
if (ex instanceof BlameIgnoreRevsFileError || ex instanceof BlameIgnoreRevsFileBadRevisionError) {
void showBlameInvalidIgnoreRevsFileWarningMessage(ex);
}
return emptyPromise as Promise<GitBlame>;
}
return undefined;
}
}
@gate<LocalGitProvider['getBlameForLine']>(
(u, l, d, o) => `${u.toString()}|${l}|${d?.isDirty}|${o?.forceSingleLine}`,
)
@log<LocalGitProvider['getBlameForLine']>({ args: { 2: d => d?.isDirty } })
async getBlameForLine(
uri: GitUri,
editorLine: number, // 0-based, Git is 1-based
document?: TextDocument | undefined,
options?: { forceSingleLine?: boolean },
): Promise<GitBlameLine | undefined> {
if (document?.isDirty) return this.getBlameForLineContents(uri, editorLine, document.getText(), options);
const scope = getLogScope();
if (!options?.forceSingleLine && this.useCaching) {
const blame = await this.getBlame(uri, document);
if (blame == null) return undefined;
let blameLine = blame.lines[editorLine];
if (blameLine == null) {
if (blame.lines.length !== editorLine) return undefined;
blameLine = blame.lines[editorLine - 1];
}
const commit = blame.commits.get(blameLine.sha);
if (commit == null) return undefined;
const author = blame.authors.get(commit.author.name)!;
return {
author: { ...author, lineCount: commit.lines.length },
commit: commit,
line: blameLine,
};
}
const lineToBlame = editorLine + 1;
const [relativePath, root] = splitPath(uri, uri.repoPath);
try {
const [dataResult, userResult, statResult] = await Promise.allSettled([
this.git.blame(root, relativePath, {
ref: uri.sha,
args: configuration.get('advanced.blame.customArguments'),
ignoreWhitespace: configuration.get('blame.ignoreWhitespace'),
startLine: lineToBlame,
endLine: lineToBlame,
}),
this.getCurrentUser(root),
workspace.fs.stat(uri),
]);
const blame = parseGitBlame(
this.container,
root,
getSettledValue(dataResult),
getSettledValue(userResult),
getSettledValue(statResult)?.mtime,
);
if (blame == null) return undefined;
return {
author: first(blame.authors.values())!,
commit: first(blame.commits.values())!,
line: blame.lines[editorLine],
};
} catch (ex) {
Logger.error(ex, scope);
if (ex instanceof BlameIgnoreRevsFileError || ex instanceof BlameIgnoreRevsFileBadRevisionError) {
void showBlameInvalidIgnoreRevsFileWarningMessage(ex);
}
return undefined;
}
}
@log<LocalGitProvider['getBlameForLineContents']>({ args: { 2: '<contents>' } })
async getBlameForLineContents(
uri: GitUri,
editorLine: number, // 0-based, Git is 1-based
contents: string,
options?: { forceSingleLine?: boolean },
): Promise<GitBlameLine | undefined> {
if (!options?.forceSingleLine && this.useCaching) {
const blame = await this.getBlameContents(uri, contents);
if (blame == null) return undefined;
let blameLine = blame.lines[editorLine];
if (blameLine == null) {
if (blame.lines.length !== editorLine) return undefined;
blameLine = blame.lines[editorLine - 1];
}
const commit = blame.commits.get(blameLine.sha);
if (commit == null) return undefined;
const author = blame.authors.get(commit.author.name)!;
return {
author: { ...author, lineCount: commit.lines.length },
commit: commit,
line: blameLine,
};
}
const lineToBlame = editorLine + 1;
const [relativePath, root] = splitPath(uri, uri.repoPath);
try {
const [dataResult, userResult, statResult] = await Promise.allSettled([
this.git.blame(root, relativePath, {
contents: contents,
args: configuration.get('advanced.blame.customArguments'),
ignoreWhitespace: configuration.get('blame.ignoreWhitespace'),
startLine: lineToBlame,
endLine: lineToBlame,
}),
this.getCurrentUser(root),
workspace.fs.stat(uri),
]);
const blame = parseGitBlame(
this.container,
root,
getSettledValue(dataResult),
getSettledValue(userResult),
getSettledValue(statResult)?.mtime,
);
if (blame == null) return undefined;
return {
author: first(blame.authors.values())!,
commit: first(blame.commits.values())!,
line: blame.lines[editorLine],
};
} catch {
return undefined;
}
}
@log()
async getBlameForRange(uri: GitUri, range: Range): Promise<GitBlameLines | undefined> {
const blame = await this.getBlame(uri);
if (blame == null) return undefined;
return this.getBlameRange(blame, uri, range);
}
@log<LocalGitProvider['getBlameForRangeContents']>({ args: { 2: '<contents>' } })
async getBlameForRangeContents(uri: GitUri, range: Range, contents: string): Promise<GitBlameLines | undefined> {
const blame = await this.getBlameContents(uri, contents);
if (blame == null) return undefined;
return this.getBlameRange(blame, uri, range);
}
@log<LocalGitProvider['getBlameRange']>({ args: { 0: '<blame>' } })
getBlameRange(blame: GitBlame, uri: GitUri, range: Range): GitBlameLines | undefined {
if (blame.lines.length === 0) return { allLines: blame.lines, ...blame };
if (range.start.line === 0 && range.end.line === blame.lines.length - 1) {
return { allLines: blame.lines, ...blame };
}
const lines = blame.lines.slice(range.start.line, range.end.line + 1);
const shas = new Set(lines.map(l => l.sha));
// ranges are 0-based
const startLine = range.start.line + 1;
const endLine = range.end.line + 1;
const authors = new Map<string, GitBlameAuthor>();
const commits = new Map<string, GitCommit>();
for (const c of blame.commits.values()) {
if (!shas.has(c.sha)) continue;
const commit = c.with({
lines: c.lines.filter(l => l.line >= startLine && l.line <= endLine),
});
commits.set(c.sha, commit);
let author = authors.get(commit.author.name);
if (author == null) {
author = {
name: commit.author.name,
lineCount: 0,
};
authors.set(author.name, author);
}
author.lineCount += commit.lines.length;
}
const sortedAuthors = new Map([...authors.entries()].sort((a, b) => b[1].lineCount - a[1].lineCount));
return {
repoPath: uri.repoPath!,
authors: sortedAuthors,
commits: commits,
lines: lines,
allLines: blame.lines,
};
}
@gate()
@log()
async getBranch(repoPath: string): Promise<GitBranch | undefined> {
let {
values: [branch],
} = await this.getBranches(repoPath, { filter: b => b.current });
if (branch != null) return branch;
const commitOrdering = configuration.get('advanced.commitOrdering');
const data = await this.git.rev_parse__currentBranch(repoPath, commitOrdering);
if (data == null) return undefined;
const [name, upstream] = data[0].split('\n');
if (isDetachedHead(name)) {
const [rebaseStatusResult, committerDateResult] = await Promise.allSettled([
this.getRebaseStatus(repoPath),
this.git.log__recent_committerdate(repoPath, commitOrdering),
]);
const committerDate = getSettledValue(committerDateResult);
const rebaseStatus = getSettledValue(rebaseStatusResult);
branch = new GitBranch(
this.container,
repoPath,
rebaseStatus?.incoming.name ?? name,
false,
true,
committerDate != null ? new Date(Number(committerDate) * 1000) : undefined,
data[1],
upstream ? { name: upstream, missing: false } : undefined,
undefined,
undefined,
undefined,
rebaseStatus != null,
);
}
return branch;
}
@log({ args: { 1: false } })
async getBranches(
repoPath: string | undefined,
options?: {
filter?: (b: GitBranch) => boolean;
paging?: PagingOptions;
sort?: boolean | BranchSortOptions;
},
): Promise<PagedResult<GitBranch>> {
if (repoPath == null) return emptyPagedResult;
let resultsPromise = this.useCaching ? this._branchesCache.get(repoPath) : undefined;
if (resultsPromise == null) {
async function load(this: LocalGitProvider): Promise<PagedResult<GitBranch>> {
try {
const data = await this.git.for_each_ref__branch(repoPath!, { all: true });
// If we don't get any data, assume the repo doesn't have any commits yet so check if we have a current branch
if (data == null || data.length === 0) {
let current;
const commitOrdering = configuration.get('advanced.commitOrdering');
const data = await this.git.rev_parse__currentBranch(repoPath!, commitOrdering);
if (data != null) {
const [name, upstream] = data[0].split('\n');
const [rebaseStatusResult, committerDateResult] = await Promise.allSettled([
isDetachedHead(name) ? this.getRebaseStatus(repoPath!) : undefined,
this.git.log__recent_committerdate(repoPath!, commitOrdering),
]);
const committerDate = getSettledValue(committerDateResult);
const rebaseStatus = getSettledValue(rebaseStatusResult);
current = new GitBranch(
this.container,
repoPath!,
rebaseStatus?.incoming.name ?? name,
false,
true,
committerDate != null ? new Date(Number(committerDate) * 1000) : undefined,
data[1],
{ name: upstream, missing: false },
undefined,
undefined,
undefined,
rebaseStatus != null,
);
}
return current != null ? { values: [current] } : emptyPagedResult;
}
return { values: parseGitBranches(this.container, data, repoPath!) };
} catch (ex) {
this._branchesCache.delete(repoPath!);
return emptyPagedResult;
}
}
resultsPromise = load.call(this);
if (this.useCaching && options?.paging?.cursor == null) {
this._branchesCache.set(repoPath, resultsPromise);
}
}
let result = await resultsPromise;
if (options?.filter != null) {
result = {
...result,
values: result.values.filter(options.filter),
};
}
if (options?.sort) {
sortBranches(result.values, typeof options.sort === 'boolean' ? undefined : options.sort);
}
return result;
}
@log()
async getChangedFilesCount(repoPath: string, ref?: string): Promise<GitDiffShortStat | undefined> {
const data = await this.git.diff__shortstat(repoPath, ref);
if (!data) return undefined;
return parseGitDiffShortStat(data);
}
@log()
async getCommit(repoPath: string, ref: string): Promise<GitCommit | undefined> {
const log = await this.getLog(repoPath, { limit: 2, ref: ref });
if (log == null) return undefined;
return log.commits.get(ref) ?? first(log.commits.values());
}
@log()
async getCommitBranches(
repoPath: string,
refs: string[],
branch?: string | undefined,
options?:
| { all?: boolean; commitDate?: Date; mode?: 'contains' | 'pointsAt' }
| { commitDate?: Date; mode?: 'contains' | 'pointsAt'; remotes?: boolean },
): Promise<string[]> {
if (branch != null) {
const data = await this.git.branchOrTag__containsOrPointsAt(repoPath, refs, {
type: 'branch',
mode: 'contains',
name: branch,
});
return data ? [data?.trim()] : [];
}
const data = await this.git.branchOrTag__containsOrPointsAt(repoPath, refs, { type: 'branch', ...options });
if (!data) return [];
return filterMap(data.split('\n'), b => b.trim() || undefined);
}
@log({ exit: true })
getCommitCount(repoPath: string, ref: string): Promise<number | undefined> {
return this.git.rev_list__count(repoPath, ref);
}
@log()
async getCommitForFile(
repoPath: string | undefined,
uri: Uri,
options?: { ref?: string; firstIfNotFound?: boolean; range?: Range },
): Promise<GitCommit | undefined> {
const scope = getLogScope();
const [relativePath, root] = splitPath(uri, repoPath);
try {
const log = await this.getLogForFile(root, relativePath, {
limit: 2,
ref: options?.ref,
range: options?.range,
});
if (log == null) return undefined;
let commit;
if (options?.ref) {
const commit = log.commits.get(options.ref);
if (commit == null && !options?.firstIfNotFound) {
// If the ref isn't a valid sha we will never find it, so let it fall through so we return the first
if (isSha(options.ref) || isUncommitted(options.ref)) return undefined;
}
}
return commit ?? first(log.commits.values());
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
@log()
async getCommitsForGraph(
repoPath: string,
asWebviewUri: (uri: Uri) => Uri,
options?: {
branch?: string;
include?: { stats?: boolean };
limit?: number;
ref?: string;
},
): Promise<GitGraph> {
const defaultLimit = options?.limit ?? configuration.get('graph.defaultItemLimit') ?? 5000;
const defaultPageLimit = configuration.get('graph.pageItemLimit') ?? 1000;
const ordering = configuration.get('graph.commitOrdering', undefined, 'date');
const onlyFollowFirstParent = configuration.get('graph.onlyFollowFirstParent', undefined, false);
const deferStats = options?.include?.stats; // && defaultLimit > 1000;
const parser = getGraphParser(options?.include?.stats && !deferStats);
const refParser = getRefParser();
const statsParser = getGraphStatsParser();
const [refResult, stashResult, branchesResult, remotesResult, currentUserResult, worktreesByBranchResult] =
await Promise.allSettled([
this.git.log(repoPath, undefined, ...refParser.arguments, '-n1', options?.ref ?? 'HEAD'),
this.getStash(repoPath),
this.getBranches(repoPath),
this.getRemotes(repoPath),
this.getCurrentUser(repoPath),
getWorktreesByBranch(this.container.git.getRepository(repoPath)),
]);
const branches = getSettledValue(branchesResult)?.values;
const branchMap = branches != null ? new Map(branches.map(r => [r.name, r])) : new Map<string, GitBranch>();
const headBranch = branches?.find(b => b.current);
const headRefUpstreamName = headBranch?.upstream?.name;
const worktreesByBranch = getSettledValue(worktreesByBranchResult);
const currentUser = getSettledValue(currentUserResult);
const remotes = getSettledValue(remotesResult);
const remoteMap = remotes != null ? new Map(remotes.map(r => [r.name, r])) : new Map<string, GitRemote>();
const selectSha = first(refParser.parse(getSettledValue(refResult) ?? ''));
const downstreamMap = new Map<string, string[]>();
let stdin: string | undefined;
// TODO@eamodio this is insanity -- there *HAS* to be a better way to get git log to return stashes
const stash = getSettledValue(stashResult);
if (stash != null && stash.commits.size !== 0) {
stdin = join(
map(stash.commits.values(), c => c.sha.substring(0, 9)),
'\n',
);
}
const useAvatars = configuration.get('graph.avatars', undefined, true);
const avatars = new Map<string, string>();
const ids = new Set<string>();
const reachableFromHEAD = new Set<string>();
const remappedIds = new Map<string, string>();
let total = 0;
let iterations = 0;
let pendingRowsStatsCount = 0;
async function getCommitsForGraphCore(
this: LocalGitProvider,
limit: number,
sha?: string,
cursor?: { sha: string; skip: number },
): Promise<GitGraph> {
const startTotal = total;
iterations++;
let log: string | string[] | undefined;
let nextPageLimit = limit;
let size;
do {
const args = [...parser.arguments, `--${ordering}-order`, '--all'];
if (onlyFollowFirstParent) {
args.push('--first-parent');
}
if (cursor?.skip) {
args.push(`--skip=${cursor.skip}`);
}
let data;
if (sha) {
[data, limit] = await this.git.logStreamTo(
repoPath,
sha,
limit,
stdin ? { stdin: stdin } : undefined,
...args,
);
} else {
args.push(`-n${nextPageLimit + 1}`);
data = await this.git.log(repoPath, stdin ? { stdin: stdin } : undefined, ...args);
if (cursor) {
if (!getShaInLogRegex(cursor.sha).test(data)) {
// If we didn't find any new commits, we must have them all so return that we have everything
if (size === data.length) {
return {
repoPath: repoPath,
avatars: avatars,
ids: ids,
includes: options?.include,
branches: branchMap,
remotes: remoteMap,
downstreams: downstreamMap,
worktreesByBranch: worktreesByBranch,
rows: [],
};
}
size = data.length;
nextPageLimit = (nextPageLimit === 0 ? defaultPageLimit : nextPageLimit) * 2;
cursor.skip -= Math.floor(cursor.skip * 0.1);
continue;
}
}
}
if (!data) {
return {
repoPath: repoPath,
avatars: avatars,
ids: ids,
includes: options?.include,
branches: branchMap,
remotes: remoteMap,
downstreams: downstreamMap,
worktreesByBranch: worktreesByBranch,
rows: [],
};
}
log = data;
if (limit !== 0) {
limit = nextPageLimit;
}
break;
} while (true);
const rows: GitGraphRow[] = [];
let avatarUri: Uri | undefined;
let avatarUrl: string | undefined;
let branch: GitBranch | undefined;
let branchId: string;
let branchName: string;
let context:
| GraphItemRefContext<GraphBranchContextValue>
| GraphItemRefContext<GraphTagContextValue>
| undefined;
let contexts: GitGraphRowContexts | undefined;
let group;
let groupName;
const groupedRefs = new Map<
string,
{ head?: boolean; local?: GitBranchReference; remotes?: GitBranchReference[] }
>();
let head = false;
let isCurrentUser = false;
let refHead: GitGraphRowHead;
let refHeads: GitGraphRowHead[];
let refRemoteHead: GitGraphRowRemoteHead;
let refRemoteHeads: GitGraphRowRemoteHead[];
let refTag: GitGraphRowTag;
let refTags: GitGraphRowTag[];
let parent: string;
let parents: string[];
let remote: GitRemote | undefined;
let remoteBranchId: string;
let remoteName: string;
let stashCommit: GitStashCommit | undefined;
let stats: GitGraphRowsStats | undefined;
let tagId: string;
let tagName: string;
let tip: string;
let count = 0;
const commits = parser.parse(log);
for (const commit of commits) {
count++;
if (ids.has(commit.sha)) continue;
total++;
if (remappedIds.has(commit.sha)) continue;
ids.add(commit.sha);
refHeads = [];
refRemoteHeads = [];
refTags = [];
contexts = {};
if (commit.tips) {
groupedRefs.clear();
for (tip of commit.tips.split(', ')) {
head = false;
if (tip === 'refs/stash') continue;
if (tip.startsWith('tag: ')) {
tagName = tip.substring(5);
tagId = getTagId(repoPath, tagName);
context = {
webviewItem: 'gitlens:tag',
webviewItemValue: {
type: 'tag',
ref: createReference(tagName, repoPath, {
id: tagId,
refType: 'tag',
name: tagName,
}),
},
};
refTag = {
id: tagId,
name: tagName,
// Not currently used, so don't bother looking it up
annotated: true,
context:
serializeWebviewItemContext<GraphItemRefContext<GraphTagContextValue>>(context),
};
refTags.push(refTag);
continue;
}
if (tip.startsWith('HEAD')) {
head = true;
reachableFromHEAD.add(commit.sha);
if (tip !== 'HEAD') {
tip = tip.substring(8);
}
}
remoteName = getRemoteNameFromBranchName(tip);
if (remoteName) {
remote = remoteMap.get(remoteName);
if (remote != null) {
branchName = getBranchNameWithoutRemote(tip);
if (branchName === 'HEAD') continue;
remoteBranchId = getBranchId(repoPath, true, tip);
avatarUrl = (
(useAvatars ? remote.provider?.avatarUri : undefined) ??
getRemoteIconUri(this.container, remote, asWebviewUri)
)?.toString(true);
context = {
webviewItem: 'gitlens:branch+remote',
webviewItemValue: {
type: 'branch',
ref: createReference(tip, repoPath, {
id: remoteBranchId,
refType: 'branch',
name: tip,
remote: true,
upstream: { name: remote.name, missing: false },
}),
},
};
refRemoteHead = {
id: remoteBranchId,
name: branchName,
owner: remote.name,
url: remote.url,
avatarUrl: avatarUrl,
context:
serializeWebviewItemContext<GraphItemRefContext<GraphBranchContextValue>>(
context,
),
current: tip === headRefUpstreamName,
hostingServiceType: remote.provider?.gkProviderId,
};
refRemoteHeads.push(refRemoteHead);
group = groupedRefs.get(branchName);
if (group == null) {
group = { remotes: [] };
groupedRefs.set(branchName, group);
}
if (group.remotes == null) {
group.remotes = [];
}
group.remotes.push(context.webviewItemValue.ref);
continue;
}
}
branch = branchMap.get(tip);
branchId = branch?.id ?? getBranchId(repoPath, false, tip);
context = {
webviewItem: `gitlens:branch${head ? '+current' : ''}${
branch?.upstream != null ? '+tracking' : ''
}${worktreesByBranch?.has(branchId) ? '+worktree' : ''}`,
webviewItemValue: {
type: 'branch',
ref: createReference(tip, repoPath, {
id: branchId,
refType: 'branch',
name: tip,
remote: false,
upstream: branch?.upstream,
}),
},
};
refHead = {
id: branchId,
name: tip,
isCurrentHead: head,
context: serializeWebviewItemContext<GraphItemRefContext<GraphBranchContextValue>>(context),
upstream:
branch?.upstream != null
? {
name: branch.upstream.name,
id: getBranchId(repoPath, true, branch.upstream.name),
}
: undefined,
};
refHeads.push(refHead);
if (branch?.upstream?.name != null) {
// Add the branch name (tip) to the upstream name entry in the downstreams map
let downstreams = downstreamMap.get(branch.upstream.name);
if (downstreams == null) {
downstreams = [];
downstreamMap.set(branch.upstream.name, downstreams);
}
downstreams.push(tip);
}
group = groupedRefs.get(tip);
if (group == null) {
group = {};
groupedRefs.set(tip, group);
}
if (head) {
group.head = true;
}
group.local = context.webviewItemValue.ref;
}
for ([groupName, group] of groupedRefs) {
if (
group.remotes != null &&
((group.local != null && group.remotes.length > 0) || group.remotes.length > 1)
) {
if (contexts.refGroups == null) {
contexts.refGroups = {};
}
contexts.refGroups[groupName] = serializeWebviewItemContext<GraphItemRefGroupContext>({
webviewItemGroup: `gitlens:refGroup${group.head ? '+current' : ''}`,
webviewItemGroupValue: {
type: 'refGroup',
refs: group.local != null ? [group.local, ...group.remotes] : group.remotes,
},
});
}
}
}
stashCommit = stash?.commits.get(commit.sha);
parents = commit.parents ? commit.parents.split(' ') : [];
if (reachableFromHEAD.has(commit.sha)) {
for (parent of parents) {
reachableFromHEAD.add(parent);
}
}
// Remove the second & third parent, if exists, from each stash commit as it is a Git implementation for the index and untracked files
if (stashCommit != null && parents.length > 1) {
// Remap the "index commit" (e.g. contains staged files) of the stash
remappedIds.set(parents[1], commit.sha);
// Remap the "untracked commit" (e.g. contains untracked files) of the stash
remappedIds.set(parents[2], commit.sha);
parents.splice(1, 2);
}
if (stashCommit == null && !avatars.has(commit.authorEmail)) {
avatarUri = getCachedAvatarUri(commit.authorEmail);
if (avatarUri != null) {
avatars.set(commit.authorEmail, avatarUri.toString(true));
}
}
isCurrentUser = isUserMatch(currentUser, commit.author, commit.authorEmail);
if (stashCommit != null) {
contexts.row = serializeWebviewItemContext<GraphItemRefContext>({
webviewItem: 'gitlens:stash',
webviewItemValue: {
type: 'stash',
ref: createReference(commit.sha, repoPath, {
refType: 'stash',
name: stashCommit.name,
message: stashCommit.message,
number: stashCommit.number,
}),
},
});
} else {
contexts.row = serializeWebviewItemContext<GraphItemRefContext>({
webviewItem: `gitlens:commit${head ? '+HEAD' : ''}${
reachableFromHEAD.has(commit.sha) ? '+current' : ''
}`,
webviewItemValue: {
type: 'commit',
ref: createReference(commit.sha, repoPath, {
refType: 'revision',
message: commit.message,
}),
},
});
contexts.avatar = serializeWebviewItemContext<GraphItemContext>({
webviewItem: `gitlens:contributor${isCurrentUser ? '+current' : ''}`,
webviewItemValue: {
type: 'contributor',
repoPath: repoPath,
name: commit.author,
email: commit.authorEmail,
current: isCurrentUser,
},
});
}
rows.push({
sha: commit.sha,
parents: onlyFollowFirstParent ? [parents[0]] : parents,
author: isCurrentUser ? 'You' : commit.author,
email: commit.authorEmail,
date: Number(ordering === 'author-date' ? commit.authorDate : commit.committerDate) * 1000,
message: emojify(commit.message.trim()),
// TODO: review logic for stash, wip, etc
type: stashCommit != null ? 'stash-node' : parents.length > 1 ? 'merge-node' : 'commit-node',
heads: refHeads,
remotes: refRemoteHeads,
tags: refTags,
contexts: contexts,
});
if (commit.stats != null) {
if (stats == null) {
stats = new Map<string, GitGraphRowStats>();
}
stats.set(commit.sha, commit.stats);
}
}
const startingCursor = cursor?.sha;
const lastSha = last(ids);
cursor =
lastSha != null
? {
sha: lastSha,
skip: total - iterations,
}
: undefined;
let rowsStatsDeferred: GitGraph['rowsStatsDeferred'];
if (deferStats) {
if (stats == null) {
stats = new Map<string, GitGraphRowStats>();
}
pendingRowsStatsCount++;
// eslint-disable-next-line no-async-promise-executor
const promise = new Promise<void>(async resolve => {
try {
const args = [...statsParser.arguments];
if (startTotal === 0) {
args.push(`-n${total}`);
} else {
args.push(`-n${total - startTotal}`, `--skip=${startTotal}`);
}
args.push(`--${ordering}-order`, '--all');
const statsData = await this.git.log(repoPath, stdin ? { stdin: stdin } : undefined, ...args);
if (statsData) {
const commitStats = statsParser.parse(statsData);
for (const stat of commitStats) {
stats!.set(stat.sha, stat.stats);
}
}
} finally {
pendingRowsStatsCount--;
resolve();
}
});
rowsStatsDeferred = {
isLoaded: () => pendingRowsStatsCount === 0,
promise: promise,
};
}
return {
repoPath: repoPath,
avatars: avatars,
ids: ids,
includes: options?.include,
remappedIds: remappedIds,
branches: branchMap,
remotes: remoteMap,
downstreams: downstreamMap,
worktreesByBranch: worktreesByBranch,
rows: rows,
id: sha,
rowsStats: stats,
rowsStatsDeferred: rowsStatsDeferred,
paging: {
limit: limit === 0 ? count : limit,
startingCursor: startingCursor,
hasMore: limit !== 0 && count > limit,
},
more: async (limit: number, sha?: string): Promise<GitGraph | undefined> =>
getCommitsForGraphCore.call(this, limit, sha, cursor),
};
}
return getCommitsForGraphCore.call(this, defaultLimit, selectSha);
}
@log()
async getCommitTags(
repoPath: string,
ref: string,
options?: { commitDate?: Date; mode?: 'contains' | 'pointsAt' },
): Promise<string[]> {
const data = await this.git.branchOrTag__containsOrPointsAt(repoPath, [ref], { type: 'tag', ...options });
if (!data) return [];
return filterMap(data.split('\n'), b => b.trim() || undefined);
}
getConfig(repoPath: string, key: GitConfigKeys): Promise<string | undefined> {
return this.git.config__get(key, repoPath);
}
setConfig(repoPath: string, key: GitConfigKeys, value: string | undefined): Promise<void> {
return this.git.config__set(key, value, repoPath);
}
@log()
async getContributors(
repoPath: string,
options?: { all?: boolean; merges?: boolean | 'first-parent'; ref?: string; stats?: boolean },
): Promise<GitContributor[]> {
if (repoPath == null) return [];
let key = options?.ref ?? '';
if (options?.all) {
key += ':all';
}
if (options?.merges) {
key += `:merges:${options.merges}`;
}
if (options?.stats) {
key += ':stats';
}
const contributorsCache = this.useCaching ? this._contributorsCache.get(repoPath) : undefined;
let contributors = contributorsCache?.get(key);
if (contributors == null) {
async function load(this: LocalGitProvider) {
try {
repoPath = normalizePath(repoPath);
const currentUser = await this.getCurrentUser(repoPath);
const parser = getContributorsParser(options?.stats);
const args = [...parser.arguments, '--full-history', '--use-mailmap'];
const merges = options?.merges ?? true;
if (merges) {
args.push(merges === 'first-parent' ? '--first-parent' : '--no-min-parents');
} else {
args.push('--no-merges');
}
if (options?.all) {
args.push('--all', '--single-worktree');
}
const data = await this.git.log(repoPath, { ref: options?.ref }, ...args);
const contributors = new Map<string, GitContributor>();
const commits = parser.parse(data);
for (const c of commits) {
const key = `${c.author}|${c.email}`;
let contributor = contributors.get(key);
if (contributor == null) {
contributor = new GitContributor(
repoPath,
c.author,
c.email,
1,
new Date(Number(c.date) * 1000),
isUserMatch(currentUser, c.author, c.email),
c.stats,
);
contributors.set(key, contributor);
} else {
(contributor as PickMutable<GitContributor, 'count'>).count++;
const date = new Date(Number(c.date) * 1000);
if (date > contributor.date!) {
(contributor as PickMutable<GitContributor, 'date'>).date = date;
}
}
}
return [...contributors.values()];
} catch (ex) {
contributorsCache?.delete(key);
return [];
}
}
contributors = load.call(this);
if (this.useCaching) {
if (contributorsCache == null) {
this._contributorsCache.set(repoPath, new Map([[key, contributors]]));
} else {
contributorsCache.set(key, contributors);
}
}
}
return contributors;
}
@gate()
@log()
async getCurrentUser(repoPath: string): Promise<GitUser | undefined> {
if (!repoPath) return undefined;
const scope = getLogScope();
const repo = this._repoInfoCache.get(repoPath);
let user = repo?.user;
if (user != null) return user;
// If we found the repo, but no user data was found just return
if (user === null) return undefined;
user = { name: undefined, email: undefined };
try {
const data = await this.git.config__get_regex('^user\\.', repoPath, { local: true });
if (data) {
let key: string;
let value: string;
let match;
do {
match = userConfigRegex.exec(data);
if (match == null) break;
[, key, value] = match;
// Stops excessive memory usage -- path_to_url
user[key as 'name' | 'email'] = ` ${value}`.substr(1);
} while (true);
} else {
user.name =
process_env.GIT_AUTHOR_NAME || process_env.GIT_COMMITTER_NAME || userInfo()?.username || undefined;
if (!user.name) {
// If we found no user data, mark it so we won't bother trying again
this._repoInfoCache.set(repoPath, { ...repo, user: null });
return undefined;
}
user.email =
process_env.GIT_AUTHOR_EMAIL ||
process_env.GIT_COMMITTER_EMAIL ||
process_env.EMAIL ||
`${user.name}@${hostname()}`;
}
const author = `${user.name} <${user.email}>`;
// Check if there is a mailmap for the current user
const mappedAuthor = await this.git.check_mailmap(repoPath, author);
if (mappedAuthor != null && mappedAuthor.length !== 0 && author !== mappedAuthor) {
const match = mappedAuthorRegex.exec(mappedAuthor);
if (match != null) {
[, user.name, user.email] = match;
}
}
this._repoInfoCache.set(repoPath, { ...repo, user: user });
return user;
} catch (ex) {
Logger.error(ex, scope);
debugger;
// Mark it so we won't bother trying again
this._repoInfoCache.set(repoPath, { ...repo, user: null });
return undefined;
}
}
@log({ exit: true })
async getBaseBranchName(repoPath: string, ref: string): Promise<string | undefined> {
const mergeBaseConfigKey: GitConfigKeys = `branch.${ref}.gk-merge-base`;
try {
const pattern = `^branch\\.${ref}\\.`;
const data = await this.git.config__get_regex(pattern, repoPath);
if (data) {
const regex = new RegExp(`${pattern}(.+) (.+)$`, 'gm');
let mergeBase: string | undefined;
let update = false;
while (true) {
const match = regex.exec(data);
if (match == null) break;
const [, key, value] = match;
if (key === 'gk-merge-base') {
mergeBase = value;
update = false;
break;
} else if (key === 'vscode-merge-base') {
mergeBase = value;
update = true;
continue;
}
}
if (mergeBase != null) {
const [branch] = (await this.getBranches(repoPath, { filter: b => b.name === mergeBase })).values;
if (branch != null) {
if (update) {
void this.setConfig(repoPath, mergeBaseConfigKey, branch.name);
}
return branch.name;
}
}
}
} catch {}
const branch = await this.getBaseBranchFromReflog(repoPath, ref);
if (branch?.upstream != null) {
void this.setConfig(repoPath, mergeBaseConfigKey, branch.upstream.name);
return branch.upstream.name;
}
return undefined;
}
private async getBaseBranchFromReflog(repoPath: string, ref: string): Promise<GitBranch | undefined> {
try {
let data = await this.git.reflog(repoPath, undefined, ref, '--grep-reflog=branch: Created from *.');
let entries = data.split('\n').filter(entry => Boolean(entry));
if (entries.length !== 1) return undefined;
// Check if branch created from an explicit branch
let match = entries[0].match(/branch: Created from (.*)$/);
if (match != null && match.length === 2) {
const name = match[1];
if (name !== 'HEAD') {
const [branch] = (await this.getBranches(repoPath, { filter: b => b.name === name })).values;
return branch;
}
}
// Check if branch was created from HEAD
data = await this.git.reflog(
repoPath,
undefined,
'HEAD',
`--grep-reflog=checkout: moving from .* to ${ref.replace('refs/heads/', '')}`,
);
entries = data.split('\n').filter(entry => Boolean(entry));
if (!entries.length) return undefined;
match = entries[entries.length - 1].match(/checkout: moving from ([^\s]+)\s/);
if (match != null && match.length === 2) {
const name = match[1];
const [branch] = (await this.getBranches(repoPath, { filter: b => b.name === name })).values;
return branch;
}
} catch {}
return undefined;
}
@log({ exit: true })
async getDefaultBranchName(repoPath: string | undefined, remote?: string): Promise<string | undefined> {
if (repoPath == null) return undefined;
if (remote) {
try {
const data = await this.git.ls_remote__HEAD(repoPath, remote);
if (data == null) return undefined;
const match = /ref:\s(\S+)\s+HEAD/m.exec(data);
if (match == null) return undefined;
const [, branch] = match;
return `${remote}/${branch.substr('refs/heads/'.length)}`;
} catch {}
}
try {
const data = await this.git.symbolic_ref(repoPath, `refs/remotes/origin/HEAD`);
if (data != null) return data.trim();
} catch {}
return undefined;
}
@log()
async getDiff(
repoPath: string,
to: string,
from?: string,
options?: { context?: number; uris?: Uri[] },
): Promise<GitDiff | undefined> {
const scope = getLogScope();
const params = [`-U${options?.context ?? 3}`];
if (to === uncommitted) {
if (from != null) {
params.push(from);
} else {
// Get only unstaged changes
from = 'HEAD';
}
} else if (to === uncommittedStaged) {
params.push('--staged');
if (from != null) {
params.push(from);
} else {
// Get only staged changes
from = 'HEAD';
}
} else if (from == null) {
if (to === '' || to.toUpperCase() === 'HEAD') {
from = 'HEAD';
params.push(from);
} else {
from = `${to}^`;
params.push(from, to);
}
} else if (to === '') {
params.push(from);
} else {
params.push(from, to);
}
if (options?.uris) {
params.push('--', ...options.uris.map(u => u.fsPath));
}
let data;
try {
data = await this.git.diff2(repoPath, { errors: GitErrorHandling.Throw }, ...params);
} catch (ex) {
debugger;
Logger.error(ex, scope);
return undefined;
}
const diff: GitDiff = { contents: data, from: from, to: to };
return diff;
}
@log({ args: { 1: false } })
async getDiffFiles(repoPath: string, contents: string): Promise<GitDiffFiles | undefined> {
const data = await this.git.apply2(repoPath, { stdin: contents }, '--numstat', '--summary', '-z');
if (!data) return undefined;
const files = parseGitApplyFiles(data, repoPath);
return {
files: files,
};
}
@log()
async getDiffForFile(uri: GitUri, ref1: string | undefined, ref2?: string): Promise<GitDiffFile | undefined> {
const scope = getLogScope();
let key = 'diff';
if (ref1 != null) {
key += `:${ref1}`;
}
if (ref2 != null) {
key += `:${ref2}`;
}
const doc = await this.container.documentTracker.getOrAdd(uri);
if (this.useCaching) {
if (doc.state != null) {
const cachedDiff = doc.state.getDiff(key);
if (cachedDiff != null) {
Logger.debug(scope, `Cache hit: '${key}'`);
return cachedDiff.item;
}
}
Logger.debug(scope, `Cache miss: '${key}'`);
doc.state ??= new GitDocumentState();
}
const encoding = await getEncoding(uri);
const promise = this.getDiffForFileCore(
uri.repoPath,
uri.fsPath,
ref1,
ref2,
{ encoding: encoding },
doc,
key,
scope,
);
if (doc.state != null) {
Logger.debug(scope, `Cache add: '${key}'`);
const value: CachedDiff = {
item: promise as Promise<GitDiffFile>,
};
doc.state.setDiff(key, value);
}
return promise;
}
private async getDiffForFileCore(
repoPath: string | undefined,
path: string,
ref1: string | undefined,
ref2: string | undefined,
options: { encoding?: string },
document: TrackedGitDocument,
key: string,
scope: LogScope | undefined,
): Promise<GitDiffFile | undefined> {
const [relativePath, root] = splitPath(path, repoPath);
try {
const data = await this.git.diff(root, relativePath, ref1, ref2, {
...options,
filters: ['M'],
linesOfContext: 0,
renames: true,
similarityThreshold: configuration.get('advanced.similarityThreshold'),
});
const diff = parseGitFileDiff(data);
return diff;
} catch (ex) {
// Trap and cache expected diff errors
if (document.state != null) {
const msg = ex?.toString() ?? '';
Logger.debug(scope, `Cache replace (with empty promise): '${key}'`);
const value: CachedDiff = {
item: emptyPromise as Promise<GitDiffFile>,
errorMessage: msg,
};
document.state.setDiff(key, value);
return emptyPromise as Promise<GitDiffFile>;
}
return undefined;
}
}
@log<LocalGitProvider['getDiffForFileContents']>({ args: { 1: '<contents>' } })
async getDiffForFileContents(uri: GitUri, ref: string, contents: string): Promise<GitDiffFile | undefined> {
const scope = getLogScope();
const key = `diff:${md5(contents)}`;
const doc = await this.container.documentTracker.getOrAdd(uri);
if (this.useCaching) {
if (doc.state != null) {
const cachedDiff = doc.state.getDiff(key);
if (cachedDiff != null) {
Logger.debug(scope, `Cache hit: ${key}`);
return cachedDiff.item;
}
}
Logger.debug(scope, `Cache miss: ${key}`);
doc.state ??= new GitDocumentState();
}
const encoding = await getEncoding(uri);
const promise = this.getDiffForFileContentsCore(
uri.repoPath,
uri.fsPath,
ref,
contents,
{ encoding: encoding },
doc,
key,
scope,
);
if (doc.state != null) {
Logger.debug(scope, `Cache add: '${key}'`);
const value: CachedDiff = {
item: promise as Promise<GitDiffFile>,
};
doc.state.setDiff(key, value);
}
return promise;
}
private async getDiffForFileContentsCore(
repoPath: string | undefined,
path: string,
ref: string,
contents: string,
options: { encoding?: string },
document: TrackedGitDocument,
key: string,
scope: LogScope | undefined,
): Promise<GitDiffFile | undefined> {
const [relativePath, root] = splitPath(path, repoPath);
try {
const data = await this.git.diff__contents(root, relativePath, ref, contents, {
...options,
filters: ['M'],
similarityThreshold: configuration.get('advanced.similarityThreshold'),
});
const diff = parseGitFileDiff(data);
return diff;
} catch (ex) {
// Trap and cache expected diff errors
if (document.state != null) {
const msg = ex?.toString() ?? '';
Logger.debug(scope, `Cache replace (with empty promise): '${key}'`);
const value: CachedDiff = {
item: emptyPromise as Promise<GitDiffFile>,
errorMessage: msg,
};
document.state.setDiff(key, value);
return emptyPromise as Promise<GitDiffFile>;
}
return undefined;
}
}
@log()
async getDiffForLine(
uri: GitUri,
editorLine: number, // 0-based, Git is 1-based
ref1: string | undefined,
ref2?: string,
): Promise<GitDiffLine | undefined> {
try {
const diff = await this.getDiffForFile(uri, ref1, ref2);
if (diff == null) return undefined;
const line = editorLine + 1;
const hunk = diff.hunks.find(c => c.current.position.start <= line && c.current.position.end >= line);
if (hunk == null) return undefined;
const hunkLine = hunk.lines.get(line);
if (hunkLine == null) return undefined;
return {
hunk: hunk,
line: hunkLine,
};
} catch (ex) {
return undefined;
}
}
@log()
async getDiffStatus(
repoPath: string,
ref1OrRange: string | GitRevisionRange,
ref2?: string,
options?: { filters?: GitDiffFilter[]; path?: string; similarityThreshold?: number },
): Promise<GitFile[] | undefined> {
try {
const data = await this.git.diff__name_status(repoPath, ref1OrRange, ref2, {
similarityThreshold: configuration.get('advanced.similarityThreshold') ?? undefined,
...options,
});
if (!data) return undefined;
const files = parseGitDiffNameStatusFiles(data, repoPath);
return files == null || files.length === 0 ? undefined : files;
} catch (ex) {
return undefined;
}
}
@log()
async getFileStatusForCommit(repoPath: string, uri: Uri, ref: string): Promise<GitFile | undefined> {
if (ref === deletedOrMissing || isUncommitted(ref)) return undefined;
const [relativePath, root] = splitPath(uri, repoPath);
const data = await this.git.show__name_status(root, relativePath, ref);
if (!data) return undefined;
const files = parseGitDiffNameStatusFiles(data, repoPath);
if (files == null || files.length === 0) return undefined;
return files[0];
}
@log({ exit: true })
async getFirstCommitSha(repoPath: string): Promise<string | undefined> {
const data = await this.git.rev_list(repoPath, 'HEAD', { maxParents: 0 });
return data?.[0];
}
@gate()
@debug<LocalGitProvider['getGitDir']>({
exit: r => `returned ${r.uri.toString(true)}, commonUri=${r.commonUri?.toString(true)}`,
})
async getGitDir(repoPath: string): Promise<GitDir> {
const repo = this._repoInfoCache.get(repoPath);
if (repo?.gitDir != null) return repo.gitDir;
const gitDirPaths = await this.git.rev_parse__git_dir(repoPath);
let gitDir: GitDir;
if (gitDirPaths != null) {
gitDir = {
uri: Uri.file(gitDirPaths.path),
commonUri: gitDirPaths.commonPath != null ? Uri.file(gitDirPaths.commonPath) : undefined,
};
} else {
gitDir = {
uri: this.getAbsoluteUri('.git', repoPath),
};
}
this._repoInfoCache.set(repoPath, { ...repo, gitDir: gitDir });
return gitDir;
}
@debug()
async getLastFetchedTimestamp(repoPath: string): Promise<number | undefined> {
try {
const gitDir = await this.getGitDir(repoPath);
const stats = await workspace.fs.stat(Uri.joinPath(gitDir.uri, 'FETCH_HEAD'));
// If the file is empty, assume the fetch failed, and don't update the timestamp
if (stats.size > 0) return stats.mtime;
} catch {}
return undefined;
}
@log()
async getLog(
repoPath: string,
options?: {
all?: boolean;
authors?: GitUser[];
cursor?: string;
limit?: number;
merges?: boolean | 'first-parent';
ordering?: 'date' | 'author-date' | 'topo' | null;
ref?: string;
status?: null | 'name-status' | 'numstat' | 'stat';
since?: number | string;
until?: number | string;
extraArgs?: string[];
stdin?: string;
},
): Promise<GitLog | undefined> {
const scope = getLogScope();
try {
const limit = options?.limit ?? configuration.get('advanced.maxListItems') ?? 0;
const similarityThreshold = configuration.get('advanced.similarityThreshold');
const args = [
`--format=${options?.all ? parseGitLogAllFormat : parseGitLogDefaultFormat}`,
`-M${similarityThreshold == null ? '' : `${similarityThreshold}%`}`,
];
if (options?.status !== null) {
args.push(`--${options?.status ?? 'name-status'}`, '--full-history');
}
if (options?.all) {
args.push('--all');
}
const merges = options?.merges ?? true;
if (merges) {
if (limit <= 2) {
// Ensure we return the merge commit files when we are asking for a specific ref
args.push('-m');
}
args.push(merges === 'first-parent' ? '--first-parent' : '--no-min-parents');
} else {
args.push('--no-merges');
}
const ordering = options?.ordering ?? configuration.get('advanced.commitOrdering');
if (ordering) {
args.push(`--${ordering}-order`);
}
if (options?.authors?.length) {
args.push('--use-mailmap', ...options.authors.map(a => `--author=^${a.name} <${a.email}>$`));
}
let hasMoreOverride;
if (options?.since) {
hasMoreOverride = true;
args.push(`--since="${options.since}"`);
}
if (options?.until) {
hasMoreOverride = true;
args.push(`--until="${options.until}"`);
}
if (options?.extraArgs?.length) {
if (
options.extraArgs.some(
arg => arg.startsWith('-n') || arg.startsWith('--until=') || arg.startsWith('--since='),
)
) {
hasMoreOverride = true;
}
args.push(...options.extraArgs);
}
if (limit) {
hasMoreOverride = undefined;
args.push(`-n${limit + 1}`);
}
const data = await this.git.log(
repoPath,
{ configs: gitLogDefaultConfigsWithFiles, ref: options?.ref, stdin: options?.stdin },
...args,
);
// const parser = GitLogParser.defaultParser;
// const data = await this.git.log2(repoPath, options?.ref, {
// ...options,
// // args: parser.arguments,
// limit: limit,
// merges: options?.merges == null ? true : options.merges,
// ordering: options?.ordering ?? configuration.get('advanced.commitOrdering'),
// similarityThreshold: configuration.get('advanced.similarityThreshold'),
// });
// const commits = [];
// const entries = parser.parse(data);
// for (const entry of entries) {
// commits.push(
// new GitCommit2(
// repoPath,
// entry.sha,
// new GitCommitIdentity(
// entry.author,
// entry.authorEmail,
// new Date((entry.authorDate as any) * 1000),
// ),
// new GitCommitIdentity(
// entry.committer,
// entry.committerEmail,
// new Date((entry.committerDate as any) * 1000),
// ),
// entry.message.split('\n', 1)[0],
// entry.parents.split(' '),
// entry.message,
// entry.files.map(f => new GitFileChange(repoPath, f.path, f.status as any, f.originalPath)),
// [],
// ),
// );
// }
const log = parseGitLog(
this.container,
data,
LogType.Log,
repoPath,
undefined,
options?.ref,
await this.getCurrentUser(repoPath),
limit,
false,
undefined,
undefined,
hasMoreOverride,
);
if (log != null) {
log.query = (limit: number | undefined) => this.getLog(repoPath, { ...options, limit: limit });
if (log.hasMore) {
let opts;
if (options != null) {
let _;
({ extraArgs: _, ...opts } = options);
}
log.more = this.getLogMoreFn(log, opts);
}
}
return log;
} catch (ex) {
Logger.error(ex, scope);
debugger;
return undefined;
}
}
@log()
async getLogRefsOnly(
repoPath: string,
options?: {
authors?: GitUser[];
cursor?: string;
limit?: number;
merges?: boolean | 'first-parent';
ordering?: 'date' | 'author-date' | 'topo' | null;
ref?: string;
since?: string;
},
): Promise<Set<string> | undefined> {
const scope = getLogScope();
const limit = options?.limit ?? configuration.get('advanced.maxListItems') ?? 0;
try {
const parser = createLogParserSingle('%H');
const args = [...parser.arguments, '--full-history'];
const ordering = options?.ordering ?? configuration.get('advanced.commitOrdering');
if (ordering) {
args.push(`--${ordering}-order`);
}
if (limit) {
args.push(`-n${limit + 1}`);
}
if (options?.since) {
args.push(`--since="${options.since}"`);
}
const merges = options?.merges ?? true;
if (merges) {
args.push(merges === 'first-parent' ? '--first-parent' : '--no-min-parents');
} else {
args.push('--no-merges');
}
if (options?.authors?.length) {
if (!args.includes('--use-mailmap')) {
args.push('--use-mailmap');
}
args.push(...options.authors.map(a => `--author=^${a.name} <${a.email}>$`));
}
const data = await this.git.log(repoPath, { ref: options?.ref }, ...args);
const commits = new Set(parser.parse(data));
return commits;
} catch (ex) {
Logger.error(ex, scope);
debugger;
return undefined;
}
}
private getLogMoreFn(
log: GitLog,
options?: {
all?: boolean;
authors?: GitUser[];
limit?: number;
merges?: boolean;
ordering?: 'date' | 'author-date' | 'topo' | null;
ref?: string;
},
): (limit: number | { until: string } | undefined) => Promise<GitLog> {
return async (limit: number | { until: string } | undefined) => {
const moreUntil = limit != null && typeof limit === 'object' ? limit.until : undefined;
let moreLimit = typeof limit === 'number' ? limit : undefined;
if (moreUntil && some(log.commits.values(), c => c.ref === moreUntil)) {
return log;
}
moreLimit = moreLimit ?? configuration.get('advanced.maxSearchItems') ?? 0;
// If the log is for a range, then just get everything prior + more
if (isRevisionRange(log.sha)) {
const moreLog = await this.getLog(log.repoPath, {
...options,
limit: moreLimit === 0 ? 0 : (options?.limit ?? 0) + moreLimit,
});
// If we can't find any more, assume we have everything
if (moreLog == null) return { ...log, hasMore: false, more: undefined };
return moreLog;
}
const lastCommit = last(log.commits.values());
const ref = lastCommit?.ref;
// If we were asked for all refs, use the last commit timestamp (plus a second) as a cursor
let timestamp: number | undefined;
if (options?.all) {
const date = lastCommit?.committer.date;
// Git only allows 1-second precision, so round up to the nearest second
timestamp = date != null ? Math.ceil(date.getTime() / 1000) + 1 : undefined;
}
let moreLogCount;
let queryLimit = moreUntil == null ? moreLimit : 0;
do {
const moreLog = await this.getLog(log.repoPath, {
...options,
limit: queryLimit,
...(timestamp
? {
until: timestamp,
extraArgs: ['--boundary'],
}
: { ref: moreUntil == null ? `${ref}^` : `${moreUntil}^..${ref}^` }),
});
// If we can't find any more, assume we have everything
if (moreLog == null) return { ...log, hasMore: false, more: undefined };
const currentCount = log.commits.size;
const commits = new Map([...log.commits, ...moreLog.commits]);
if (currentCount === commits.size && queryLimit !== 0) {
// If we didn't find any new commits, we must have them all so return that we have everything
if (moreLogCount === moreLog.commits.size) {
return { ...log, hasMore: false, more: undefined };
}
moreLogCount = moreLog.commits.size;
queryLimit = queryLimit * 2;
continue;
}
if (timestamp != null && ref != null && !moreLog.commits.has(ref)) {
debugger;
}
const mergedLog: GitLog = {
repoPath: log.repoPath,
commits: commits,
sha: log.sha,
range: undefined,
count: commits.size,
limit: moreUntil == null ? (log.limit ?? 0) + moreLimit : undefined,
hasMore: moreUntil == null ? moreLog.hasMore : true,
startingCursor: last(log.commits)?.[0],
endingCursor: moreLog.endingCursor,
pagedCommits: () => {
// Remove any duplicates
for (const sha of log.commits.keys()) {
moreLog.commits.delete(sha);
}
return moreLog.commits;
},
query: (limit: number | undefined) => this.getLog(log.repoPath, { ...options, limit: limit }),
};
if (mergedLog.hasMore) {
mergedLog.more = this.getLogMoreFn(mergedLog, options);
}
return mergedLog;
} while (true);
};
}
@log()
async getLogForFile(
repoPath: string | undefined,
pathOrUri: string | Uri,
options?: {
all?: boolean;
cursor?: string;
force?: boolean | undefined;
limit?: number;
ordering?: 'date' | 'author-date' | 'topo' | null;
range?: Range;
ref?: string;
renames?: boolean;
reverse?: boolean;
since?: string;
skip?: number;
},
): Promise<GitLog | undefined> {
if (repoPath == null) return undefined;
const scope = getLogScope();
const relativePath = this.getRelativePath(pathOrUri, repoPath);
if (repoPath != null && repoPath === relativePath) {
throw new Error(`File name cannot match the repository path; path=${relativePath}`);
}
const opts: typeof options & Parameters<LocalGitProvider['getLogForFileCore']>[2] = {
reverse: false,
...options,
};
if (opts.renames == null) {
opts.renames = configuration.get('advanced.fileHistoryFollowsRenames');
}
if (opts.merges == null) {
opts.merges = configuration.get('advanced.fileHistoryShowMergeCommits');
}
let key = 'log';
if (opts.ref != null) {
key += `:${opts.ref}`;
}
if (opts.all == null) {
opts.all = configuration.get('advanced.fileHistoryShowAllBranches');
}
if (opts.all) {
key += ':all';
}
opts.limit = opts.limit ?? configuration.get('advanced.maxListItems') ?? 0;
if (opts.limit) {
key += `:n${opts.limit}`;
}
if (opts.merges) {
key += ':merges';
}
if (opts.renames) {
key += ':follow';
}
if (opts.reverse) {
key += ':reverse';
}
if (opts.since) {
key += `:since=${opts.since}`;
}
if (opts.skip) {
key += `:skip${opts.skip}`;
}
const doc = await this.container.documentTracker.getOrAdd(GitUri.fromFile(relativePath, repoPath, opts.ref));
if (!opts.force && this.useCaching && opts.range == null) {
if (doc.state != null) {
const cachedLog = doc.state.getLog(key);
if (cachedLog != null) {
Logger.debug(scope, `Cache hit: '${key}'`);
return cachedLog.item;
}
if (opts.ref != null || (opts.limit != null && opts.limit !== 0)) {
// Since we are looking for partial log, see if we have the log of the whole file
const cachedLog = doc.state.getLog(
`log${opts.renames ? ':follow' : ''}${opts.reverse ? ':reverse' : ''}`,
);
if (cachedLog != null) {
if (opts.ref == null) {
Logger.debug(scope, `Cache hit: ~'${key}'`);
return cachedLog.item;
}
Logger.debug(scope, `Cache ?: '${key}'`);
let log = await cachedLog.item;
if (log != null && !log.hasMore && log.commits.has(opts.ref)) {
Logger.debug(scope, `Cache hit: '${key}'`);
// Create a copy of the log starting at the requested commit
let skip = true;
let i = 0;
const commits = new Map(
filterMapIterable<[string, GitCommit], [string, GitCommit]>(
log.commits.entries(),
([ref, c]) => {
if (skip) {
if (ref !== opts?.ref) return undefined;
skip = false;
}
i++;
if (opts?.limit != null && i > opts.limit) {
return undefined;
}
return [ref, c];
},
),
);
const optsCopy = { ...opts };
log = {
...log,
limit: optsCopy.limit,
count: commits.size,
commits: commits,
query: (limit: number | undefined) =>
this.getLogForFile(repoPath, pathOrUri, { ...optsCopy, limit: limit }),
};
return log;
}
}
}
}
Logger.debug(scope, `Cache miss: '${key}'`);
doc.state ??= new GitDocumentState();
}
const promise = this.getLogForFileCore(repoPath, relativePath, opts, doc, key, scope);
if (doc.state != null && opts.range == null) {
Logger.debug(scope, `Cache add: '${key}'`);
const value: CachedLog = {
item: promise as Promise<GitLog>,
};
doc.state.setLog(key, value);
}
return promise;
}
private async getLogForFileCore(
repoPath: string | undefined,
path: string,
{
ref,
range,
...options
}: {
all?: boolean;
cursor?: string;
limit?: number;
merges?: boolean;
ordering?: 'date' | 'author-date' | 'topo' | null;
range?: Range;
ref?: string;
renames?: boolean;
reverse?: boolean;
since?: string;
skip?: number;
},
document: TrackedGitDocument,
key: string,
scope: LogScope | undefined,
): Promise<GitLog | undefined> {
const paths = await this.isTrackedWithDetails(path, repoPath, ref);
if (paths == null) {
Logger.log(scope, `Skipping log; '${path}' is not tracked`);
return emptyPromise as Promise<GitLog>;
}
const [relativePath, root] = paths;
try {
if (range != null && range.start.line > range.end.line) {
range = new Range(range.end, range.start);
}
let data = await this.git.log__file(root, relativePath, ref, {
ordering: configuration.get('advanced.commitOrdering'),
...options,
startLine: range == null ? undefined : range.start.line + 1,
endLine: range == null ? undefined : range.end.line + 1,
});
// If we didn't find any history from the working tree, check to see if the file was renamed
if (!data && ref == null) {
const status = await this.getStatusForFile(root, relativePath);
if (status?.originalPath != null) {
data = await this.git.log__file(root, status.originalPath, ref, {
ordering: configuration.get('advanced.commitOrdering'),
...options,
startLine: range == null ? undefined : range.start.line + 1,
endLine: range == null ? undefined : range.end.line + 1,
});
}
}
const log = parseGitLog(
this.container,
data,
// If this is the log of a folder, parse it as a normal log rather than a file log
isFolderGlob(relativePath) ? LogType.Log : LogType.LogFile,
root,
relativePath,
ref,
await this.getCurrentUser(root),
options.limit,
options.reverse ?? false,
range,
);
if (log != null) {
const opts = { ...options, ref: ref, range: range };
log.query = (limit: number | undefined) =>
this.getLogForFile(repoPath, path, { ...opts, limit: limit });
if (log.hasMore) {
log.more = this.getLogForFileMoreFn(log, path, opts);
}
}
return log;
} catch (ex) {
// Trap and cache expected log errors
if (document.state != null && range == null && !options.reverse) {
const msg: string = ex?.toString() ?? '';
Logger.debug(scope, `Cache replace (with empty promise): '${key}'`);
const value: CachedLog = {
item: emptyPromise as Promise<GitLog>,
errorMessage: msg,
};
document.state.setLog(key, value);
return emptyPromise as Promise<GitLog>;
}
return undefined;
}
}
private getLogForFileMoreFn(
log: GitLog,
relativePath: string,
options: {
all?: boolean;
limit?: number;
ordering?: 'date' | 'author-date' | 'topo' | null;
range?: Range;
ref?: string;
renames?: boolean;
reverse?: boolean;
},
): (limit: number | { until: string } | undefined) => Promise<GitLog> {
return async (limit: number | { until: string } | undefined) => {
const moreUntil = limit != null && typeof limit === 'object' ? limit.until : undefined;
let moreLimit = typeof limit === 'number' ? limit : undefined;
if (moreUntil && some(log.commits.values(), c => c.ref === moreUntil)) {
return log;
}
moreLimit = moreLimit ?? configuration.get('advanced.maxSearchItems') ?? 0;
const commit = last(log.commits.values());
let ref;
if (commit != null) {
ref = commit.ref;
// Check to make sure the filename hasn't changed and if it has use the previous
if (commit.file != null) {
const path = commit.file.originalPath ?? commit.file.path;
if (path !== relativePath) {
relativePath = path;
}
}
}
const moreLog = await this.getLogForFile(log.repoPath, relativePath, {
...options,
limit: moreUntil == null ? moreLimit : 0,
ref: options.all ? undefined : moreUntil == null ? `${ref}^` : `${moreUntil}^..${ref}^`,
skip: options.all ? log.count : undefined,
});
// If we can't find any more, assume we have everything
if (moreLog == null) return { ...log, hasMore: false, more: undefined };
const commits = new Map([...log.commits, ...moreLog.commits]);
const mergedLog: GitLog = {
repoPath: log.repoPath,
commits: commits,
sha: log.sha,
range: log.range,
count: commits.size,
limit: moreUntil == null ? (log.limit ?? 0) + moreLimit : undefined,
hasMore: moreUntil == null ? moreLog.hasMore : true,
query: (limit: number | undefined) =>
this.getLogForFile(log.repoPath, relativePath, { ...options, limit: limit }),
};
if (options.renames) {
const renamed = find(
moreLog.commits.values(),
c => Boolean(c.file?.originalPath) && c.file?.originalPath !== relativePath,
);
relativePath = renamed?.file?.originalPath ?? relativePath;
}
if (mergedLog.hasMore) {
mergedLog.more = this.getLogForFileMoreFn(mergedLog, relativePath, options);
}
return mergedLog;
};
}
@log()
async getMergeBase(repoPath: string, ref1: string, ref2: string, options?: { forkPoint?: boolean }) {
const scope = getLogScope();
try {
const data = await this.git.merge_base(repoPath, ref1, ref2, options);
if (data == null) return undefined;
return data.split('\n')[0].trim() || undefined;
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
@log()
async getMergeStatus(repoPath: string): Promise<GitMergeStatus | undefined> {
let status = this.useCaching ? this._mergeStatusCache.get(repoPath) : undefined;
if (status == null) {
async function getCore(this: LocalGitProvider): Promise<GitMergeStatus | undefined> {
const merge = await this.git.rev_parse__verify(repoPath, 'MERGE_HEAD');
if (merge == null) return undefined;
const [branchResult, mergeBaseResult, possibleSourceBranchesResult] = await Promise.allSettled([
this.getBranch(repoPath),
this.getMergeBase(repoPath, 'MERGE_HEAD', 'HEAD'),
this.getCommitBranches(repoPath, ['MERGE_HEAD'], undefined, { all: true, mode: 'pointsAt' }),
]);
const branch = getSettledValue(branchResult);
const mergeBase = getSettledValue(mergeBaseResult);
const possibleSourceBranches = getSettledValue(possibleSourceBranchesResult);
return {
type: 'merge',
repoPath: repoPath,
mergeBase: mergeBase,
HEAD: createReference(merge, repoPath, { refType: 'revision' }),
current: getReferenceFromBranch(branch!),
incoming:
possibleSourceBranches?.length === 1
? createReference(possibleSourceBranches[0], repoPath, {
refType: 'branch',
name: possibleSourceBranches[0],
remote: false,
})
: undefined,
} satisfies GitMergeStatus;
}
status = getCore.call(this);
if (this.useCaching) {
this._mergeStatusCache.set(repoPath, status);
}
}
return status;
}
@log()
async getRebaseStatus(repoPath: string): Promise<GitRebaseStatus | undefined> {
let status = this.useCaching ? this._rebaseStatusCache.get(repoPath) : undefined;
if (status == null) {
async function getCore(this: LocalGitProvider): Promise<GitRebaseStatus | undefined> {
const gitDir = await this.getGitDir(repoPath);
const [rebaseMergeHeadResult, rebaseApplyHeadResult] = await Promise.allSettled([
this.git.readDotGitFile(gitDir, ['rebase-merge', 'head-name']),
this.git.readDotGitFile(gitDir, ['rebase-apply', 'head-name']),
]);
const rebaseMergeHead = getSettledValue(rebaseMergeHeadResult);
const rebaseApplyHead = getSettledValue(rebaseApplyHeadResult);
let branch = rebaseApplyHead ?? rebaseMergeHead;
if (branch == null) return undefined;
const path = rebaseApplyHead != null ? 'rebase-apply' : 'rebase-merge';
const [
rebaseHeadResult,
origHeadResult,
ontoResult,
stepsNumberResult,
stepsTotalResult,
stepsMessageResult,
] = await Promise.allSettled([
this.git.rev_parse__verify(repoPath, 'REBASE_HEAD'),
this.git.readDotGitFile(gitDir, [path, 'orig-head']),
this.git.readDotGitFile(gitDir, [path, 'onto']),
this.git.readDotGitFile(gitDir, [path, 'msgnum'], { numeric: true }),
this.git.readDotGitFile(gitDir, [path, 'end'], { numeric: true }),
this.git
.readDotGitFile(gitDir, [path, 'message'], { throw: true })
.catch(() => this.git.readDotGitFile(gitDir, [path, 'message-squashed'])),
]);
const origHead = getSettledValue(origHeadResult);
const onto = getSettledValue(ontoResult);
if (origHead == null || onto == null) return undefined;
let mergeBase;
const rebaseHead = getSettledValue(rebaseHeadResult);
if (rebaseHead != null) {
mergeBase = await this.getMergeBase(repoPath, rebaseHead, 'HEAD');
} else {
mergeBase = await this.getMergeBase(repoPath, onto, origHead);
}
if (branch.startsWith('refs/heads/')) {
branch = branch.substr(11).trim();
}
const [branchTipsResult, tagTipsResult] = await Promise.allSettled([
this.getCommitBranches(repoPath, [onto], undefined, { all: true, mode: 'pointsAt' }),
this.getCommitTags(repoPath, onto, { mode: 'pointsAt' }),
]);
const branchTips = getSettledValue(branchTipsResult);
const tagTips = getSettledValue(tagTipsResult);
let ontoRef: GitBranchReference | GitTagReference | undefined;
if (branchTips != null) {
for (const ref of branchTips) {
if (ref.startsWith('(no branch, rebasing')) continue;
ontoRef = createReference(ref, repoPath, {
refType: 'branch',
name: ref,
remote: false,
});
break;
}
}
if (ontoRef == null && tagTips != null) {
for (const ref of tagTips) {
if (ref.startsWith('(no branch, rebasing')) continue;
ontoRef = createReference(ref, repoPath, {
refType: 'tag',
name: ref,
});
break;
}
}
return {
type: 'rebase',
repoPath: repoPath,
mergeBase: mergeBase,
HEAD: createReference(rebaseHead ?? origHead, repoPath, { refType: 'revision' }),
onto: createReference(onto, repoPath, { refType: 'revision' }),
current: ontoRef,
incoming: createReference(branch, repoPath, {
refType: 'branch',
name: branch,
remote: false,
}),
steps: {
current: {
number: getSettledValue(stepsNumberResult) ?? 0,
commit:
rebaseHead != null
? createReference(rebaseHead, repoPath, {
refType: 'revision',
message: getSettledValue(stepsMessageResult),
})
: undefined,
},
total: getSettledValue(stepsTotalResult) ?? 0,
},
} satisfies GitRebaseStatus;
}
status = getCore.call(this);
if (this.useCaching) {
this._rebaseStatusCache.set(repoPath, status);
}
}
return status;
}
@log()
async getNextComparisonUris(
repoPath: string,
uri: Uri,
ref: string | undefined,
skip: number = 0,
): Promise<NextComparisonUrisResult | undefined> {
// If we have no ref (or staged ref) there is no next commit
if (!ref) return undefined;
const relativePath = this.getRelativePath(uri, repoPath);
if (isUncommittedStaged(ref)) {
return {
current: GitUri.fromFile(relativePath, repoPath, ref),
next: GitUri.fromFile(relativePath, repoPath, undefined),
};
}
const next = await this.getNextUri(repoPath, uri, ref, skip);
if (next == null) {
const status = await this.getStatusForFile(repoPath, uri);
if (status != null) {
// If the file is staged, diff with the staged version
if (status.indexStatus != null) {
return {
current: GitUri.fromFile(relativePath, repoPath, ref),
next: GitUri.fromFile(relativePath, repoPath, uncommittedStaged),
};
}
}
return {
current: GitUri.fromFile(relativePath, repoPath, ref),
next: GitUri.fromFile(relativePath, repoPath, undefined),
};
}
return {
current:
skip === 0
? GitUri.fromFile(relativePath, repoPath, ref)
: (await this.getNextUri(repoPath, uri, ref, skip - 1))!,
next: next,
};
}
@log()
private async getNextUri(
repoPath: string,
uri: Uri,
ref?: string,
skip: number = 0,
// editorLine?: number
): Promise<GitUri | undefined> {
// If we have no ref (or staged ref) there is no next commit
if (!ref || isUncommittedStaged(ref)) return undefined;
let filters: GitDiffFilter[] | undefined;
if (ref === deletedOrMissing) {
// If we are trying to move next from a deleted or missing ref then get the first commit
ref = undefined;
filters = ['A'];
}
const relativePath = this.getRelativePath(uri, repoPath);
let data = await this.git.log__file(repoPath, relativePath, ref, {
argsOrFormat: parseGitLogSimpleFormat,
fileMode: 'simple',
filters: filters,
limit: skip + 1,
ordering: configuration.get('advanced.commitOrdering'),
reverse: true,
// startLine: editorLine != null ? editorLine + 1 : undefined,
});
if (data == null || data.length === 0) return undefined;
const [nextRef, file, status] = parseGitLogSimple(data, skip);
// If the file was deleted, check for a possible rename
if (status === 'D') {
data = await this.git.log__file(repoPath, '.', nextRef, {
argsOrFormat: parseGitLogSimpleFormat,
fileMode: 'simple',
filters: ['R', 'C'],
limit: 1,
ordering: configuration.get('advanced.commitOrdering'),
// startLine: editorLine != null ? editorLine + 1 : undefined
});
if (data == null || data.length === 0) {
return GitUri.fromFile(file ?? relativePath, repoPath, nextRef);
}
const [nextRenamedRef, renamedFile] = parseGitLogSimpleRenamed(data, file ?? relativePath);
return GitUri.fromFile(
renamedFile ?? file ?? relativePath,
repoPath,
nextRenamedRef ?? nextRef ?? deletedOrMissing,
);
}
return GitUri.fromFile(file ?? relativePath, repoPath, nextRef);
}
@log()
async getOldestUnpushedRefForFile(repoPath: string, uri: Uri): Promise<string | undefined> {
const [relativePath, root] = splitPath(uri, repoPath);
const data = await this.git.log__file(root, relativePath, '@{u}..', {
argsOrFormat: ['-z', '--format=%H'],
fileMode: 'none',
ordering: configuration.get('advanced.commitOrdering'),
renames: true,
});
if (!data) return undefined;
// -2 to skip the ending null
const index = data.lastIndexOf('\0', data.length - 2);
return index === -1 ? undefined : data.slice(index + 1, data.length - 2);
}
@log()
async getPreviousComparisonUris(
repoPath: string,
uri: Uri,
ref: string | undefined,
skip: number = 0,
): Promise<PreviousComparisonUrisResult | undefined> {
if (ref === deletedOrMissing) return undefined;
const relativePath = this.getRelativePath(uri, repoPath);
// If we are at the working tree (i.e. no ref), we need to dig deeper to figure out where to go
if (!ref) {
// First, check the file status to see if there is anything staged
const status = await this.getStatusForFile(repoPath, uri);
if (status != null) {
// If the file is staged with working changes, diff working with staged (index)
// If the file is staged without working changes, diff staged with HEAD
if (status.indexStatus != null) {
// Backs up to get to HEAD
if (status.workingTreeStatus == null) {
skip++;
}
if (skip === 0) {
// Diff working with staged
return {
current: GitUri.fromFile(relativePath, repoPath, undefined),
previous: GitUri.fromFile(relativePath, repoPath, uncommittedStaged),
};
}
return {
// Diff staged with HEAD (or prior if more skips)
current: GitUri.fromFile(relativePath, repoPath, uncommittedStaged),
previous: await this.getPreviousUri(repoPath, uri, ref, skip - 1),
};
} else if (status.workingTreeStatus != null) {
if (skip === 0) {
return {
current: GitUri.fromFile(relativePath, repoPath, undefined),
previous: await this.getPreviousUri(repoPath, uri, undefined, skip),
};
}
}
} else if (skip === 0) {
skip++;
}
}
// If we are at the index (staged), diff staged with HEAD
else if (isUncommittedStaged(ref)) {
const current =
skip === 0
? GitUri.fromFile(relativePath, repoPath, ref)
: (await this.getPreviousUri(repoPath, uri, undefined, skip - 1))!;
if (current == null || current.sha === deletedOrMissing) return undefined;
return {
current: current,
previous: await this.getPreviousUri(repoPath, uri, undefined, skip),
};
}
// If we are at a commit, diff commit with previous
const current =
skip === 0
? GitUri.fromFile(relativePath, repoPath, ref)
: (await this.getPreviousUri(repoPath, uri, ref, skip - 1))!;
if (current == null || current.sha === deletedOrMissing) return undefined;
return {
current: current,
previous: await this.getPreviousUri(repoPath, uri, ref, skip),
};
}
@log()
async getPreviousComparisonUrisForLine(
repoPath: string,
uri: Uri,
editorLine: number, // 0-based, Git is 1-based
ref: string | undefined,
skip: number = 0,
): Promise<PreviousLineComparisonUrisResult | undefined> {
if (ref === deletedOrMissing) return undefined;
let relativePath = this.getRelativePath(uri, repoPath);
let previous;
// If we are at the working tree (i.e. no ref), we need to dig deeper to figure out where to go
if (!ref) {
// First, check the blame on the current line to see if there are any working/staged changes
const gitUri = new GitUri(uri, repoPath);
const document = await workspace.openTextDocument(uri);
const blameLine = document.isDirty
? await this.getBlameForLineContents(gitUri, editorLine, document.getText())
: await this.getBlameForLine(gitUri, editorLine);
if (blameLine == null) return undefined;
// If line is uncommitted, we need to dig deeper to figure out where to go (because blame can't be trusted)
if (blameLine.commit.isUncommitted) {
// Check the file status to see if there is anything staged
const status = await this.getStatusForFile(repoPath, uri);
if (status != null) {
// If the file is staged, diff working with staged (index)
// If the file is not staged, diff working with HEAD
if (status.indexStatus != null) {
// Diff working with staged
return {
current: GitUri.fromFile(relativePath, repoPath, undefined),
previous: GitUri.fromFile(relativePath, repoPath, uncommittedStaged),
line: editorLine,
};
}
}
// Diff working with HEAD (or prior if more skips)
return {
current: GitUri.fromFile(relativePath, repoPath, undefined),
previous: await this.getPreviousUri(repoPath, uri, undefined, skip, editorLine),
line: editorLine,
};
}
// If line is committed, diff with line ref with previous
ref = blameLine.commit.sha;
relativePath = blameLine.commit.file?.path ?? blameLine.commit.file?.originalPath ?? relativePath;
uri = this.getAbsoluteUri(relativePath, repoPath);
editorLine = blameLine.line.originalLine - 1;
if (skip === 0 && blameLine.commit.file?.previousSha) {
previous = GitUri.fromFile(relativePath, repoPath, blameLine.commit.file.previousSha);
}
} else {
if (isUncommittedStaged(ref)) {
const current =
skip === 0
? GitUri.fromFile(relativePath, repoPath, ref)
: (await this.getPreviousUri(repoPath, uri, undefined, skip - 1, editorLine))!;
if (current.sha === deletedOrMissing) return undefined;
return {
current: current,
previous: await this.getPreviousUri(repoPath, uri, undefined, skip, editorLine),
line: editorLine,
};
}
const gitUri = new GitUri(uri, { repoPath: repoPath, sha: ref });
const blameLine = await this.getBlameForLine(gitUri, editorLine);
if (blameLine == null) return undefined;
// Diff with line ref with previous
ref = blameLine.commit.sha;
relativePath = blameLine.commit.file?.path ?? blameLine.commit.file?.originalPath ?? relativePath;
uri = this.getAbsoluteUri(relativePath, repoPath);
editorLine = blameLine.line.originalLine - 1;
if (skip === 0 && blameLine.commit.file?.previousSha) {
previous = GitUri.fromFile(relativePath, repoPath, blameLine.commit.file.previousSha);
}
}
const current =
skip === 0
? GitUri.fromFile(relativePath, repoPath, ref)
: (await this.getPreviousUri(repoPath, uri, ref, skip - 1, editorLine))!;
if (current.sha === deletedOrMissing) return undefined;
return {
current: current,
previous: previous ?? (await this.getPreviousUri(repoPath, uri, ref, skip, editorLine)),
line: editorLine,
};
}
@log()
private async getPreviousUri(
repoPath: string,
uri: Uri,
ref?: string,
skip: number = 0,
editorLine?: number,
): Promise<GitUri | undefined> {
if (ref === deletedOrMissing) return undefined;
const scope = getLogScope();
if (ref === uncommitted) {
ref = undefined;
}
const relativePath = this.getRelativePath(uri, repoPath);
// TODO: Add caching
let data;
try {
data = await this.git.log__file(repoPath, relativePath, ref, {
argsOrFormat: parseGitLogSimpleFormat,
fileMode: 'simple',
limit: skip + 2,
ordering: configuration.get('advanced.commitOrdering'),
startLine: editorLine != null ? editorLine + 1 : undefined,
});
} catch (ex) {
const msg: string = ex?.toString() ?? '';
// If the line count is invalid just fallback to the most recent commit
if ((ref == null || isUncommittedStaged(ref)) && GitErrors.invalidLineCount.test(msg)) {
if (ref == null) {
const status = await this.getStatusForFile(repoPath, uri);
if (status?.indexStatus != null) {
return GitUri.fromFile(relativePath, repoPath, uncommittedStaged);
}
}
ref = await this.git.log__file_recent(repoPath, relativePath, {
ordering: configuration.get('advanced.commitOrdering'),
});
return GitUri.fromFile(relativePath, repoPath, ref ?? deletedOrMissing);
}
Logger.error(ex, scope);
throw ex;
}
if (data == null || data.length === 0) return undefined;
const [previousRef, file] = parseGitLogSimple(data, skip, ref);
// If the previous ref matches the ref we asked for assume we are at the end of the history
if (ref != null && ref === previousRef) return undefined;
return GitUri.fromFile(file ?? relativePath, repoPath, previousRef ?? deletedOrMissing);
}
@log()
async getIncomingActivity(
repoPath: string,
options?: {
all?: boolean;
branch?: string;
limit?: number;
ordering?: 'date' | 'author-date' | 'topo' | null;
skip?: number;
},
): Promise<GitReflog | undefined> {
const scope = getLogScope();
const args = ['--walk-reflogs', `--format=${parseGitRefLogDefaultFormat}`, '--date=iso8601'];
const ordering = options?.ordering ?? configuration.get('advanced.commitOrdering');
if (ordering) {
args.push(`--${ordering}-order`);
}
if (options?.all) {
args.push('--all');
}
// Pass a much larger limit to reflog, because we aggregate the data and we won't know how many lines we'll need
const limit = (options?.limit ?? configuration.get('advanced.maxListItems') ?? 0) * 100;
if (limit) {
args.push(`-n${limit}`);
}
if (options?.skip) {
args.push(`--skip=${options.skip}`);
}
try {
const data = await this.git.log(repoPath, undefined, ...args);
if (data == null) return undefined;
const reflog = parseGitRefLog(data, repoPath, reflogCommands, limit, limit * 100);
if (reflog?.hasMore) {
reflog.more = this.getReflogMoreFn(reflog, options);
}
return reflog;
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
private getReflogMoreFn(
reflog: GitReflog,
options?: {
all?: boolean;
branch?: string;
limit?: number;
ordering?: 'date' | 'author-date' | 'topo' | null;
skip?: number;
},
): (limit: number) => Promise<GitReflog> {
return async (limit: number | undefined) => {
limit = limit ?? configuration.get('advanced.maxSearchItems') ?? 0;
const moreLog = await this.getIncomingActivity(reflog.repoPath, {
...options,
limit: limit,
skip: reflog.total,
});
if (moreLog == null) {
// If we can't find any more, assume we have everything
return { ...reflog, hasMore: false, more: undefined };
}
const mergedLog: GitReflog = {
repoPath: reflog.repoPath,
records: [...reflog.records, ...moreLog.records],
count: reflog.count + moreLog.count,
total: reflog.total + moreLog.total,
limit: (reflog.limit ?? 0) + limit,
hasMore: moreLog.hasMore,
};
if (mergedLog.hasMore) {
mergedLog.more = this.getReflogMoreFn(mergedLog, options);
}
return mergedLog;
};
}
@log({ args: { 1: false } })
async getRemotes(repoPath: string | undefined, options?: { sort?: boolean }): Promise<GitRemote[]> {
if (repoPath == null) return [];
const scope = getLogScope();
let remotesPromise = this.useCaching ? this._remotesCache.get(repoPath) : undefined;
if (remotesPromise == null) {
async function load(this: LocalGitProvider): Promise<GitRemote[]> {
const providers = loadRemoteProviders(
configuration.get('remotes', this.container.git.getRepository(repoPath!)?.folder?.uri ?? null),
);
try {
const data = await this.git.remote(repoPath!);
const remotes = parseGitRemotes(
this.container,
data,
repoPath!,
getRemoteProviderMatcher(this.container, providers),
);
return remotes;
} catch (ex) {
this._remotesCache.delete(repoPath!);
Logger.error(ex, scope);
return [];
}
}
remotesPromise = load.call(this);
if (this.useCaching) {
this._remotesCache.set(repoPath, remotesPromise);
}
}
const remotes = await remotesPromise;
if (options?.sort) {
sortRemotes(remotes);
}
return remotes;
}
@gate()
@log()
getRevisionContent(repoPath: string, path: string, ref: string): Promise<Uint8Array | undefined> {
const [relativePath, root] = splitPath(path, repoPath);
return this.git.show<Buffer>(root, relativePath, ref, { encoding: 'buffer' });
}
@gate()
@log()
async getStash(repoPath: string | undefined): Promise<GitStash | undefined> {
if (repoPath == null) return undefined;
let stash = this.useCaching ? this._stashesCache.get(repoPath) : undefined;
if (stash === undefined) {
const parser = createLogParserWithFiles<{
sha: string;
date: string;
committedDate: string;
parents: string;
stashName: string;
summary: string;
}>({
sha: '%H',
date: '%at',
committedDate: '%ct',
parents: '%P',
stashName: '%gd',
summary: '%gs',
});
const data = await this.git.stash__list(repoPath, {
args: parser.arguments,
similarityThreshold: configuration.get('advanced.similarityThreshold'),
});
const commits = new Map<string, GitStashCommit>();
const stashes = parser.parse(data);
for (const s of stashes) {
let onRef;
let summary;
let message;
const match = stashSummaryRegex.exec(s.summary);
if (match?.groups != null) {
onRef = match.groups.onref;
summary = match.groups.summary.trim();
if (summary.length === 0) {
message = 'WIP';
} else if (match.groups.wip) {
message = `WIP: ${summary}`;
} else {
message = summary;
}
} else {
message = s.summary.trim();
}
commits.set(
s.sha,
new GitCommit(
this.container,
repoPath,
s.sha,
new GitCommitIdentity('You', undefined, new Date((s.date as any) * 1000)),
new GitCommitIdentity('You', undefined, new Date((s.committedDate as any) * 1000)),
message.split('\n', 1)[0] ?? '',
s.parents.split(' '),
message,
s.files?.map(
f => new GitFileChange(repoPath, f.path, f.status as GitFileStatus, f.originalPath),
) ?? [],
undefined,
[],
undefined,
s.stashName,
onRef,
) as GitStashCommit,
);
}
stash = { repoPath: repoPath, commits: commits };
if (this.useCaching) {
this._stashesCache.set(repoPath, stash ?? null);
}
}
return stash ?? undefined;
}
@log()
async getStatusForFile(repoPath: string, pathOrUri: string | Uri): Promise<GitStatusFile | undefined> {
const status = await this.getStatusForRepo(repoPath);
if (!status?.files.length) return undefined;
const [relativePath] = splitPath(pathOrUri, repoPath);
const file = status.files.find(f => f.path === relativePath);
return file;
}
@log()
async getStatusForFiles(repoPath: string, pathOrGlob: Uri): Promise<GitStatusFile[] | undefined> {
let [relativePath] = splitPath(pathOrGlob, repoPath);
if (!relativePath.endsWith('/*')) {
return this.getStatusForFile(repoPath, pathOrGlob).then(f => (f != null ? [f] : undefined));
}
relativePath = relativePath.substring(0, relativePath.length - 1);
const status = await this.getStatusForRepo(repoPath);
if (!status?.files.length) return undefined;
const files = status.files.filter(f => f.path.startsWith(relativePath));
return files;
}
@log()
async getStatusForRepo(repoPath: string | undefined): Promise<GitStatus | undefined> {
if (repoPath == null) return undefined;
const porcelainVersion = (await this.git.isAtLeastVersion('2.11')) ? 2 : 1;
const data = await this.git.status(repoPath, porcelainVersion, {
similarityThreshold: configuration.get('advanced.similarityThreshold') ?? undefined,
});
const status = parseGitStatus(data, repoPath, porcelainVersion);
if (status?.detached) {
const rebaseStatus = await this.getRebaseStatus(repoPath);
if (rebaseStatus != null) {
return new GitStatus(
repoPath,
rebaseStatus.incoming.name,
status.sha,
status.files,
status.state,
status.upstream,
true,
);
}
}
return status;
}
@log({ args: { 1: false } })
async getTags(
repoPath: string | undefined,
options?: {
filter?: (t: GitTag) => boolean;
paging?: PagingOptions;
sort?: boolean | TagSortOptions;
},
): Promise<PagedResult<GitTag>> {
if (repoPath == null) return emptyPagedResult;
let resultsPromise = this.useCaching ? this._tagsCache.get(repoPath) : undefined;
if (resultsPromise == null) {
async function load(this: LocalGitProvider): Promise<PagedResult<GitTag>> {
try {
const data = await this.git.tag(repoPath!);
return { values: parseGitTags(data, repoPath!) };
} catch (ex) {
this._tagsCache.delete(repoPath!);
return emptyPagedResult;
}
}
resultsPromise = load.call(this);
if (this.useCaching && options?.paging?.cursor == null) {
this._tagsCache.set(repoPath, resultsPromise);
}
}
let result = await resultsPromise;
if (options?.filter != null) {
result = {
...result,
values: result.values.filter(options.filter),
};
}
if (options?.sort) {
sortTags(result.values, typeof options.sort === 'boolean' ? undefined : options.sort);
}
return result;
}
@log()
async getTreeEntryForRevision(repoPath: string, path: string, ref: string): Promise<GitTreeEntry | undefined> {
if (repoPath == null || !path) return undefined;
const [relativePath, root] = splitPath(path, repoPath);
if (isUncommittedStaged(ref)) {
const data = await this.git.ls_files(root, relativePath, { ref: ref });
const [result] = parseGitLsFiles(data);
if (result == null) return undefined;
const size = await this.git.cat_file__size(repoPath, result.oid);
return {
ref: ref,
oid: result.oid,
path: relativePath,
size: size,
type: 'blob',
};
}
const data = await this.git.ls_tree(root, ref, relativePath);
return parseGitTree(data, ref)[0];
}
@log()
async getTreeForRevision(repoPath: string, ref: string): Promise<GitTreeEntry[]> {
if (repoPath == null) return [];
const data = await this.git.ls_tree(repoPath, ref);
return parseGitTree(data, ref);
}
@log({ args: { 1: false } })
async hasBranchOrTag(
repoPath: string | undefined,
options?: {
filter?: { branches?: (b: GitBranch) => boolean; tags?: (t: GitTag) => boolean };
},
) {
const [{ values: branches }, { values: tags }] = await Promise.all([
this.getBranches(repoPath, {
filter: options?.filter?.branches,
sort: false,
}),
this.getTags(repoPath, {
filter: options?.filter?.tags,
sort: false,
}),
]);
return branches.length !== 0 || tags.length !== 0;
}
@log()
async hasCommitBeenPushed(repoPath: string, ref: string): Promise<boolean> {
if (repoPath == null) return false;
return this.git.merge_base__is_ancestor(repoPath, ref, '@{u}');
}
hasUnsafeRepositories(): boolean {
return this.unsafePaths.size !== 0;
}
@log()
async isAncestorOf(repoPath: string, ref1: string, ref2: string): Promise<boolean> {
if (repoPath == null) return false;
return this.git.merge_base__is_ancestor(repoPath, ref1, ref2);
}
isTrackable(uri: Uri): boolean {
return this.supportedSchemes.has(uri.scheme);
}
async isTracked(uri: Uri): Promise<boolean> {
return (await this.isTrackedWithDetails(uri)) != null;
}
private async isTrackedWithDetails(uri: Uri | GitUri): Promise<[string, string] | undefined>;
private async isTrackedWithDetails(
path: string,
repoPath?: string,
ref?: string,
): Promise<[string, string] | undefined>;
@log<LocalGitProvider['isTrackedWithDetails']>({
exit: tracked => `returned ${tracked != null ? `[${tracked[0]},[${tracked[1]}]` : 'false'}`,
})
private async isTrackedWithDetails(
pathOrUri: string | Uri | GitUri,
repoPath?: string,
ref?: string,
): Promise<[string, string] | undefined> {
let relativePath: string;
let repository: Repository | undefined;
if (typeof pathOrUri === 'string') {
if (ref === deletedOrMissing) return undefined;
repository = this.container.git.getRepository(Uri.file(pathOrUri));
repoPath ||= repository?.path;
[relativePath, repoPath] = splitPath(pathOrUri, repoPath);
} else {
if (!this.isTrackable(pathOrUri)) return undefined;
if (isGitUri(pathOrUri)) {
// Always use the ref of the GitUri
ref = pathOrUri.sha;
if (ref === deletedOrMissing) return undefined;
}
repository = this.container.git.getRepository(pathOrUri);
repoPath = repoPath || repository?.path;
[relativePath, repoPath] = splitPath(pathOrUri, repoPath);
}
const path = repoPath ? `${repoPath}/${relativePath}` : relativePath;
let key = path;
key = `${ref ?? ''}:${key.startsWith('/') ? key : `/${key}`}`;
let tracked = this._trackedPaths.get(key);
if (tracked != null) return tracked;
tracked = this.isTrackedCore(path, relativePath, repoPath ?? '', ref, repository);
this._trackedPaths.set(key, tracked);
tracked = await tracked;
this._trackedPaths.set(key, tracked);
return tracked;
}
@debug()
private async isTrackedCore(
path: string,
relativePath: string,
repoPath: string,
ref: string | undefined,
repository: Repository | undefined,
): Promise<[string, string] | undefined> {
if (ref === deletedOrMissing) return undefined;
const scope = getLogScope();
try {
while (true) {
if (!repoPath) {
[relativePath, repoPath] = splitPath(path, '', true);
}
// Even if we have a ref, check first to see if the file exists (that way the cache will be better reused)
let tracked = Boolean(await this.git.ls_files(repoPath, relativePath));
if (tracked) return [relativePath, repoPath];
if (repoPath) {
const [newRelativePath, newRepoPath] = splitPath(path, '', true);
if (newRelativePath !== relativePath) {
// If we didn't find it, check it as close to the file as possible (will find nested repos)
tracked = Boolean(await this.git.ls_files(newRepoPath, newRelativePath));
if (tracked) {
repository = await this.container.git.getOrOpenRepository(Uri.file(path), {
detectNested: true,
});
if (repository != null) {
return splitPath(path, repository.path);
}
return [newRelativePath, newRepoPath];
}
}
}
if (!tracked && ref && !isUncommitted(ref)) {
tracked = Boolean(await this.git.ls_files(repoPath, relativePath, { ref: ref }));
// If we still haven't found this file, make sure it wasn't deleted in that ref (i.e. check the previous)
if (!tracked) {
tracked = Boolean(await this.git.ls_files(repoPath, relativePath, { ref: `${ref}^` }));
}
}
// Since the file isn't tracked, make sure it isn't part of a nested repository we don't know about yet
if (!tracked) {
if (repository != null) {
// Don't look for a nested repository if the file isn't at least one folder deep
const index = relativePath.indexOf('/');
if (index < 0 || index === relativePath.length - 1) return undefined;
const nested = await this.container.git.getOrOpenRepository(Uri.file(path), {
detectNested: true,
});
if (nested != null && nested !== repository) {
[relativePath, repoPath] = splitPath(path, repository.path);
repository = undefined;
continue;
}
}
return undefined;
}
return [relativePath, repoPath];
}
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
@log()
async getDiffTool(repoPath?: string): Promise<string | undefined> {
return (
(await this.git.config__get('diff.guitool', repoPath, { local: true })) ??
this.git.config__get('diff.tool', repoPath, { local: true })
);
}
@log()
async openDiffTool(
repoPath: string,
uri: Uri,
options?: { ref1?: string; ref2?: string; staged?: boolean; tool?: string },
): Promise<void> {
const scope = getLogScope();
const [relativePath, root] = splitPath(uri, repoPath);
try {
let tool = options?.tool;
if (!tool) {
const scope = getLogScope();
tool = configuration.get('advanced.externalDiffTool') || (await this.getDiffTool(root));
if (tool == null) throw new Error('No diff tool found');
Logger.log(scope, `Using tool=${tool}`);
}
await this.git.difftool(root, relativePath, tool, options);
} catch (ex) {
const msg: string = ex?.toString() ?? '';
if (msg === 'No diff tool found' || /Unknown .+? tool/.test(msg)) {
const viewDocs = 'View Git Docs';
const result = await window.showWarningMessage(
'Unable to open changes because the specified diff tool cannot be found or no Git diff tool is configured',
viewDocs,
);
if (result === viewDocs) {
void env.openExternal(
Uri.parse('path_to_url#Documentation/git-config.txt-difftool'),
);
}
return;
}
Logger.error(ex, scope);
void showGenericErrorMessage('Unable to open compare');
}
}
@log()
async openDirectoryCompare(repoPath: string, ref1: string, ref2?: string, tool?: string): Promise<void> {
const scope = getLogScope();
try {
if (!tool) {
const scope = getLogScope();
tool = configuration.get('advanced.externalDirectoryDiffTool') || (await this.getDiffTool(repoPath));
if (tool == null) throw new Error('No diff tool found');
Logger.log(scope, `Using tool=${tool}`);
}
await this.git.difftool__dir_diff(repoPath, tool, ref1, ref2);
} catch (ex) {
const msg: string = ex?.toString() ?? '';
if (msg === 'No diff tool found' || /Unknown .+? tool/.test(msg)) {
const viewDocs = 'View Git Docs';
const result = await window.showWarningMessage(
'Unable to open directory compare because the specified diff tool cannot be found or no Git diff tool is configured',
viewDocs,
);
if (result === viewDocs) {
void env.openExternal(
Uri.parse('path_to_url#Documentation/git-config.txt-difftool'),
);
}
return;
}
Logger.error(ex, scope);
void showGenericErrorMessage('Unable to open directory compare');
}
}
@log()
async resolveReference(
repoPath: string,
ref: string,
pathOrUri?: string | Uri,
options?: { force?: boolean; timeout?: number },
) {
if (
!ref ||
ref === deletedOrMissing ||
(pathOrUri == null && isSha(ref)) ||
(pathOrUri != null && isUncommitted(ref))
) {
return ref;
}
if (pathOrUri == null) {
// If it doesn't look like a sha at all (e.g. branch name) or is a stash ref (^3) don't try to resolve it
if ((!options?.force && !isShaLike(ref)) || ref.endsWith('^3')) return ref;
return (await this.git.rev_parse__verify(repoPath, ref)) ?? ref;
}
const relativePath = this.getRelativePath(pathOrUri, repoPath);
let cancellation: TimedCancellationSource | undefined;
if (options?.timeout != null) {
cancellation = new TimedCancellationSource(options.timeout);
}
const [verifiedResult, resolvedResult] = await Promise.allSettled([
this.git.rev_parse__verify(repoPath, ref, relativePath),
this.git.log__file_recent(repoPath, relativePath, {
ref: ref,
cancellation: cancellation?.token,
}),
]);
const verified = getSettledValue(verifiedResult);
if (verified == null) return deletedOrMissing;
const resolved = getSettledValue(resolvedResult);
const cancelled = cancellation?.token.isCancellationRequested;
cancellation?.dispose();
return cancelled ? ref : resolved ?? ref;
}
@log<LocalGitProvider['richSearchCommits']>({
args: {
1: s =>
`[${s.matchAll ? 'A' : ''}${s.matchCase ? 'C' : ''}${s.matchRegex ? 'R' : ''}]: ${
s.query.length > 500 ? `${s.query.substring(0, 500)}...` : s.query
}`,
},
})
async richSearchCommits(
repoPath: string,
search: SearchQuery,
options?: { limit?: number; ordering?: 'date' | 'author-date' | 'topo' | null; skip?: number },
): Promise<GitLog | undefined> {
search = { matchAll: false, matchCase: false, matchRegex: true, ...search };
try {
const limit = options?.limit ?? configuration.get('advanced.maxSearchItems') ?? 0;
const similarityThreshold = configuration.get('advanced.similarityThreshold');
const currentUser = await this.getCurrentUser(repoPath);
const { args, files, shas } = getGitArgsFromSearchQuery(search, currentUser);
args.push(`-M${similarityThreshold == null ? '' : `${similarityThreshold}%`}`, '--');
if (files.length !== 0) {
args.push(...files);
}
let stashes: Map<string, GitStashCommit> | undefined;
let stdin: string | undefined;
if (shas == null) {
const stash = await this.getStash(repoPath);
// TODO@eamodio this is insanity -- there *HAS* to be a better way to get git log to return stashes
if (stash?.commits.size) {
stashes = new Map();
for (const commit of stash.commits.values()) {
stashes.set(commit.sha, commit);
for (const p of commit.parents) {
stashes.set(p, commit);
}
}
stdin = join(
map(stash.commits.values(), c => c.sha.substring(0, 9)),
'\n',
);
}
}
const data = await this.git.log__search(repoPath, shas?.size ? undefined : args, {
ordering: configuration.get('advanced.commitOrdering'),
...options,
limit: limit,
shas: shas,
stdin: stdin,
});
const log = parseGitLog(
this.container,
data,
LogType.Log,
repoPath,
undefined,
undefined,
currentUser,
limit,
false,
undefined,
stashes,
);
if (log != null) {
function richSearchCommitsCore(
this: LocalGitProvider,
log: GitLog,
): (limit: number | undefined) => Promise<GitLog> {
return async (limit: number | undefined) => {
limit = limit ?? configuration.get('advanced.maxSearchItems') ?? 0;
const moreLog = await this.richSearchCommits(log.repoPath, search, {
...options,
limit: limit,
skip: log.count,
});
// If we can't find any more, assume we have everything
if (moreLog == null) return { ...log, hasMore: false, more: undefined };
const commits = new Map([...log.commits, ...moreLog.commits]);
const mergedLog: GitLog = {
repoPath: log.repoPath,
commits: commits,
sha: log.sha,
range: log.range,
count: commits.size,
limit: (log.limit ?? 0) + limit,
hasMore: moreLog.hasMore,
query: (limit: number | undefined) =>
this.richSearchCommits(log.repoPath, search, { ...options, limit: limit }),
};
if (mergedLog.hasMore) {
mergedLog.more = richSearchCommitsCore.call(this, mergedLog);
}
return mergedLog;
};
}
log.query = (limit: number | undefined) =>
this.richSearchCommits(repoPath, search, { ...options, limit: limit });
if (log.hasMore) {
log.more = richSearchCommitsCore.call(this, log);
}
}
return log;
} catch (ex) {
return undefined;
}
}
@log()
async searchCommits(
repoPath: string,
search: SearchQuery,
options?: {
cancellation?: CancellationToken;
limit?: number;
ordering?: 'date' | 'author-date' | 'topo';
},
): Promise<GitSearch> {
search = { matchAll: false, matchCase: false, matchRegex: true, ...search };
const comparisonKey = getSearchQueryComparisonKey(search);
try {
const refAndDateParser = getRefAndDateParser();
const currentUser = search.query.includes('@me') ? await this.getCurrentUser(repoPath) : undefined;
const { args: searchArgs, files, shas } = getGitArgsFromSearchQuery(search, currentUser);
if (shas?.size) {
const data = await this.git.show2(
repoPath,
{ cancellation: options?.cancellation },
'-s',
...refAndDateParser.arguments,
...shas.values(),
...searchArgs,
'--',
);
let i = 0;
const results: GitSearchResults = new Map<string, GitSearchResultData>(
map(refAndDateParser.parse(data), c => [
c.sha,
{
i: i++,
date: Number(options?.ordering === 'author-date' ? c.authorDate : c.committerDate) * 1000,
},
]),
);
return {
repoPath: repoPath,
query: search,
comparisonKey: comparisonKey,
results: results,
};
}
const limit = options?.limit ?? configuration.get('advanced.maxSearchItems') ?? 0;
const similarityThreshold = configuration.get('advanced.similarityThreshold');
const stash = await this.getStash(repoPath);
let stdin: string | undefined;
// TODO@eamodio this is insanity -- there *HAS* to be a better way to get git log to return stashes
if (stash?.commits.size) {
stdin = join(
map(stash.commits.values(), c => c.sha.substring(0, 9)),
'\n',
);
}
const args = [
...refAndDateParser.arguments,
`-M${similarityThreshold == null ? '' : `${similarityThreshold}%`}`,
'--use-mailmap',
];
const results: GitSearchResults = new Map<string, GitSearchResultData>();
let total = 0;
async function searchForCommitsCore(
this: LocalGitProvider,
limit: number,
cursor?: { sha: string; skip: number },
): Promise<GitSearch> {
if (options?.cancellation?.isCancellationRequested) {
return { repoPath: repoPath, query: search, comparisonKey: comparisonKey, results: results };
}
let data;
try {
data = await this.git.log(
repoPath,
{
cancellation: options?.cancellation,
configs: ['-C', repoPath, ...gitLogDefaultConfigs],
errors: GitErrorHandling.Throw,
stdin: stdin,
},
...args,
...searchArgs,
...(options?.ordering ? [`--${options.ordering}-order`] : emptyArray),
...(limit ? [`-n${limit + 1}`] : emptyArray),
...(cursor?.skip ? [`--skip=${cursor.skip}`] : emptyArray),
'--',
...files,
);
} catch (ex) {
if (ex instanceof CancelledRunError || options?.cancellation?.isCancellationRequested) {
return { repoPath: repoPath, query: search, comparisonKey: comparisonKey, results: results };
}
throw new GitSearchError(ex);
}
if (options?.cancellation?.isCancellationRequested) {
return { repoPath: repoPath, query: search, comparisonKey: comparisonKey, results: results };
}
let count = total;
for (const r of refAndDateParser.parse(data)) {
if (results.has(r.sha)) {
limit--;
continue;
}
results.set(r.sha, {
i: total++,
date: Number(options?.ordering === 'author-date' ? r.authorDate : r.committerDate) * 1000,
});
}
count = total - count;
const lastSha = last(results)?.[0];
cursor =
lastSha != null
? {
sha: lastSha,
skip: total,
}
: undefined;
return {
repoPath: repoPath,
query: search,
comparisonKey: comparisonKey,
results: results,
paging:
limit !== 0 && count > limit
? {
limit: limit,
hasMore: true,
}
: undefined,
more: async (limit: number): Promise<GitSearch> => searchForCommitsCore.call(this, limit, cursor),
};
}
return searchForCommitsCore.call(this, limit);
} catch (ex) {
if (ex instanceof GitSearchError) {
throw ex;
}
throw new GitSearchError(ex);
}
}
@log({ args: { 2: false } })
async runGitCommandViaTerminal(
repoPath: string,
command: string,
args: string[],
options?: { execute?: boolean },
): Promise<void> {
await this.git.runGitCommandViaTerminal(repoPath, command, args, options);
// Right now we are reliant on the Repository class to fire the change event (as a stop gap if we don't detect a change through the normal mechanisms)
// setTimeout(() => this.fireChange(RepositoryChange.Unknown), 2500);
}
@log()
validateBranchOrTagName(repoPath: string, ref: string): Promise<boolean> {
return this.git.check_ref_format(ref, repoPath);
}
@log({ args: { 1: false } })
async validatePatch(repoPath: string | undefined, contents: string): Promise<boolean> {
try {
await this.git.apply2(repoPath!, { stdin: contents }, '--check');
return true;
} catch (ex) {
if (ex instanceof Error && ex.message) {
if (ex.message.includes('No valid patches in input')) {
return false;
}
return true;
}
return false;
}
}
@log()
async validateReference(repoPath: string, ref: string): Promise<boolean> {
if (ref == null || ref.length === 0) return false;
if (ref === deletedOrMissing || isUncommitted(ref)) return true;
return (await this.git.rev_parse__verify(repoPath, ref)) != null;
}
@log()
async stageFile(repoPath: string, pathOrUri: string | Uri): Promise<void> {
await this.git.add(repoPath, typeof pathOrUri === 'string' ? pathOrUri : splitPath(pathOrUri, repoPath)[0]);
}
@log()
async stageDirectory(repoPath: string, directoryOrUri: string | Uri): Promise<void> {
await this.git.add(
repoPath,
typeof directoryOrUri === 'string' ? directoryOrUri : splitPath(directoryOrUri, repoPath)[0],
);
}
@log()
async unstageFile(repoPath: string, pathOrUri: string | Uri): Promise<void> {
await this.git.reset(repoPath, typeof pathOrUri === 'string' ? pathOrUri : splitPath(pathOrUri, repoPath)[0]);
}
@log()
async unstageDirectory(repoPath: string, directoryOrUri: string | Uri): Promise<void> {
await this.git.reset(
repoPath,
typeof directoryOrUri === 'string' ? directoryOrUri : splitPath(directoryOrUri, repoPath)[0],
);
}
@log()
async stashApply(repoPath: string, stashName: string, options?: { deleteAfter?: boolean }): Promise<void> {
try {
await this.git.stash__apply(repoPath, stashName, Boolean(options?.deleteAfter));
} catch (ex) {
if (ex instanceof Error) {
const msg: string = ex.message ?? '';
if (msg.includes('Your local changes to the following files would be overwritten by merge')) {
throw new StashApplyError(StashApplyErrorReason.WorkingChanges, ex);
}
if (
(msg.includes('Auto-merging') && msg.includes('CONFLICT')) ||
(ex instanceof RunError &&
((ex.stdout.includes('Auto-merging') && ex.stdout.includes('CONFLICT')) ||
ex.stdout.includes('needs merge')))
) {
void window.showInformationMessage('Stash applied with conflicts');
return;
}
throw new StashApplyError(`Unable to apply stash \u2014 ${msg.trim().replace(/\n+?/g, '; ')}`, ex);
}
throw new StashApplyError(`Unable to apply stash \u2014 ${String(ex)}`, ex);
}
}
@log()
async stashDelete(repoPath: string, stashName: string, ref?: string): Promise<void> {
await this.git.stash__delete(repoPath, stashName, ref);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['stashes'] });
}
@log()
async stashRename(
repoPath: string,
stashName: string,
ref: string,
message: string,
stashOnRef?: string,
): Promise<void> {
await this.git.stash__rename(repoPath, stashName, ref, message, stashOnRef);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['stashes'] });
}
@log<LocalGitProvider['stashSave']>({ args: { 2: uris => uris?.length } })
async stashSave(
repoPath: string,
message?: string,
uris?: Uri[],
options?: { includeUntracked?: boolean; keepIndex?: boolean; onlyStaged?: boolean },
): Promise<void> {
try {
if (!uris?.length) {
await this.git.stash__push(repoPath, message, options);
return;
}
await this.ensureGitVersion(
'2.13.2',
'Stashing individual files',
' Please retry by stashing everything or install a more recent version of Git and try again.',
);
const pathspecs = uris.map(u => `./${splitPath(u, repoPath)[0]}`);
const stdinVersion = '2.30.0';
let stdin = await this.git.isAtLeastVersion(stdinVersion);
if (stdin && options?.onlyStaged && uris.length) {
// Since Git doesn't support --staged with --pathspec-from-file try to pass them in directly
stdin = false;
}
// If we don't support stdin, then error out if we are over the maximum allowed git cli length
if (!stdin && countStringLength(pathspecs) > maxGitCliLength) {
await this.ensureGitVersion(
stdinVersion,
`Stashing so many files (${pathspecs.length}) at once`,
' Please retry by stashing fewer files or install a more recent version of Git and try again.',
);
}
await this.git.stash__push(repoPath, message, {
...options,
pathspecs: pathspecs,
stdin: stdin,
});
} finally {
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['stashes', 'status'] });
}
}
@log()
async stashSaveSnapshot(repoPath: string, message?: string): Promise<void> {
const id = await this.git.stash__create(repoPath);
if (id == null) return;
await this.git.stash__store(repoPath, id, message);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['stashes'] });
}
@log()
async createWorktree(
repoPath: string,
path: string,
options?: { commitish?: string; createBranch?: string; detach?: boolean; force?: boolean },
) {
const scope = getLogScope();
try {
await this.git.worktree__add(repoPath, path, options);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['worktrees'] });
if (options?.createBranch) {
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['branches'] });
}
} catch (ex) {
Logger.error(ex, scope);
const msg = String(ex);
if (GitErrors.alreadyCheckedOut.test(msg)) {
throw new WorktreeCreateError(WorktreeCreateErrorReason.AlreadyCheckedOut, ex);
}
if (GitErrors.alreadyExists.test(msg)) {
throw new WorktreeCreateError(WorktreeCreateErrorReason.AlreadyExists, ex);
}
throw new WorktreeCreateError(undefined, ex);
}
}
@log()
async getWorktrees(repoPath: string): Promise<GitWorktree[]> {
await this.ensureGitVersion(
'2.7.6',
'Displaying worktrees',
' Please install a more recent version of Git and try again.',
);
let worktrees = this.useCaching ? this._worktreesCache.get(repoPath) : undefined;
if (worktrees == null) {
async function load(this: LocalGitProvider) {
try {
const [data, branches] = await Promise.all([
this.git.worktree__list(repoPath),
this.getBranches(repoPath),
]);
return parseGitWorktrees(this.container, data, repoPath, branches.values);
} catch (ex) {
this._worktreesCache.delete(repoPath);
throw ex;
}
}
worktrees = load.call(this);
if (this.useCaching) {
this._worktreesCache.set(repoPath, worktrees);
}
}
return worktrees;
}
@log()
// eslint-disable-next-line @typescript-eslint/require-await
async getWorktreesDefaultUri(repoPath: string): Promise<Uri | undefined> {
let location = configuration.get('worktrees.defaultLocation');
if (location == null) return undefined;
if (location.startsWith('~')) {
location = joinPaths(homedir(), location.slice(1));
}
const folder = this.container.git.getRepository(repoPath)?.folder;
location = interpolate(location, {
userHome: homedir(),
workspaceFolder: folder?.uri.fsPath,
workspaceFolderBasename: folder?.name,
});
return this.getAbsoluteUri(location, repoPath);
}
@log()
async deleteWorktree(repoPath: string, path: string, options?: { force?: boolean }) {
const scope = getLogScope();
await this.ensureGitVersion(
'2.17.0',
'Deleting worktrees',
' Please install a more recent version of Git and try again.',
);
try {
await this.git.worktree__remove(repoPath, normalizePath(path), options);
this.container.events.fire('git:cache:reset', { repoPath: repoPath, caches: ['worktrees'] });
} catch (ex) {
Logger.error(ex, scope);
const msg = String(ex);
if (GitErrors.mainWorkingTree.test(msg)) {
throw new WorktreeDeleteError(WorktreeDeleteErrorReason.MainWorkingTree, ex);
}
if (GitErrors.uncommittedChanges.test(msg)) {
throw new WorktreeDeleteError(WorktreeDeleteErrorReason.HasChanges, ex);
}
throw new WorktreeDeleteError(undefined, ex);
}
}
private _scmGitApi: Promise<ScmGitApi | undefined> | undefined;
private async getScmGitApi(): Promise<ScmGitApi | undefined> {
return this._scmGitApi ?? (this._scmGitApi = this.getScmGitApiCore());
}
@log()
private async getScmGitApiCore(): Promise<ScmGitApi | undefined> {
try {
const extension = extensions.getExtension<GitExtension>('vscode.git');
if (extension == null) return undefined;
const gitExtension = extension.isActive ? extension.exports : await extension.activate();
return gitExtension?.getAPI(1);
} catch {
return undefined;
}
}
private getScmGitUri(path: string, repoPath: string): Uri {
// If the repoPath is a canonical path, then we need to remap it to the real path, because the vscode.git extension always uses the real path
const realUri = this.fromCanonicalMap.get(repoPath);
const uri = this.getAbsoluteUri(path, realUri ?? repoPath);
return Uri.from({
scheme: Schemes.Git,
path: uri.path,
query: JSON.stringify({
// Ensure we use the fsPath here, otherwise the url won't open properly
path: uri.fsPath,
ref: '~',
}),
});
}
@log()
async getOpenScmRepositories(): Promise<ScmRepository[]> {
const scope = getLogScope();
try {
const gitApi = await this.getScmGitApi();
return gitApi?.repositories ?? [];
} catch (ex) {
Logger.error(ex, scope);
return [];
}
}
@log({ exit: true })
async getScmRepository(repoPath: string): Promise<ScmRepository | undefined> {
const scope = getLogScope();
try {
const gitApi = await this.getScmGitApi();
return gitApi?.getRepository(Uri.file(repoPath)) ?? undefined;
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
@log({ exit: true })
async getOrOpenScmRepository(repoPath: string | Uri): Promise<ScmRepository | undefined> {
const scope = getLogScope();
try {
const uri = repoPath instanceof Uri ? repoPath : Uri.file(repoPath);
const gitApi = await this.getScmGitApi();
if (gitApi == null) return undefined;
// `getRepository` will return an opened repository that "contains" that path, so for nested repositories, we need to force the opening of the nested path, otherwise we will only get the root repository
let repo = gitApi.getRepository(uri);
if (repo == null || (repo != null && repo.rootUri.toString() !== uri.toString())) {
Logger.debug(
scope,
repo == null
? '\u2022 no existing repository found, opening repository...'
: `\u2022 existing, non-matching repository '${repo.rootUri.toString(
true,
)}' found, opening repository...`,
);
repo = await gitApi.openRepository?.(uri);
}
return repo ?? undefined;
} catch (ex) {
Logger.error(ex, scope);
return undefined;
}
}
private async ensureGitVersion(version: string, prefix: string, suffix: string): Promise<void> {
if (await this.git.isAtLeastVersion(version)) return;
throw new Error(
`${prefix} requires a newer version of Git (>= ${version}) than is currently installed (${await this.git.version()}).${suffix}`,
);
}
}
async function getEncoding(uri: Uri): Promise<string> {
const encoding = configuration.getCore('files.encoding', uri);
if (encoding == null || encoding === 'utf8') return 'utf8';
const encodingExists = (await import(/* webpackChunkName: "lib-encoding" */ 'iconv-lite')).encodingExists;
return encodingExists(encoding) ? encoding : 'utf8';
}
``` | /content/code_sandbox/src/env/node/git/localGitProvider.ts | xml | 2016-08-08T14:50:30 | 2024-08-15T21:25:09 | vscode-gitlens | gitkraken/vscode-gitlens | 8,889 | 47,577 |
```xml
import type { FetchGetUrlFunc, FetchRequest, FetchCancelSignal, GetUrlResponse } from "./fetch.js";
declare global {
class Headers {
constructor(values: Array<[string, string]>);
forEach(func: (v: string, k: string) => void): void;
}
class Response {
status: number;
statusText: string;
headers: Headers;
arrayBuffer(): Promise<ArrayBuffer>;
}
type FetchInit = {
method?: string;
headers?: Headers;
body?: Uint8Array;
};
function fetch(url: string, init: FetchInit): Promise<Response>;
}
export declare function createGetUrl(options?: Record<string, any>): FetchGetUrlFunc;
export declare function getUrl(req: FetchRequest, _signal?: FetchCancelSignal): Promise<GetUrlResponse>;
//# sourceMappingURL=geturl-browser.d.ts.map
``` | /content/code_sandbox/lib.commonjs/utils/geturl-browser.d.ts | xml | 2016-07-16T04:35:37 | 2024-08-16T13:37:46 | ethers.js | ethers-io/ethers.js | 7,843 | 182 |
```xml
/*
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
// TypeScript Version: 4.1
/// <reference types="@stdlib/types"/>
import { Collection, ArrayLike } from '@stdlib/types/array';
import { ndarray, DataType, Mode, Order, Shape, Strides } from '@stdlib/types/ndarray';
import { Buffer } from 'buffer';
/**
* Interface defining function options.
*/
interface Options {
/**
* Specifies how to handle a linear index which exceeds array dimensions (default: 'throw').
*/
mode?: Mode;
/**
* Specifies how to handle subscripts which exceed array dimensions on a per dimension basis (default: ['throw']).
*/
submode?: ArrayLike<Mode>;
/**
* Boolean indicating whether an array should be read-only (default: false).
*/
readonly?: boolean;
}
/**
* Interface defining a ndarray constructor which is both "newable" and "callable".
*/
interface Constructor {
/**
* ndarray constructor.
*
* @param dtype - data type
* @param buffer - data buffer
* @param shape - array shape
* @param strides - array strides
* @param offset - index offset
* @param order - specifies whether an array is row-major (C-style) or column-major (Fortran-style)
* @param options - function options
* @param options.mode - specifies how to handle indices which exceed array dimensions (default: 'throw')
* @param options.submode - specifies how to handle subscripts which exceed array dimensions on a per dimension basis (default: ['throw'])
* @param options.readonly - specifies whether an array should be read-only (default: false)
* @throws `buffer` argument `get` and `set` properties must be functions
* @throws `shape` argument must be an array-like object containing nonnegative integers
* @throws `shape` argument length must equal the number of dimensions
* @throws `strides` argument must be an array-like object containing integers
* @throws `strides` argument length must equal the number of dimensions (except for zero-dimensional arrays; in which case, the `strides` argument length must be equal to `1`)
* @throws for zero-dimensional ndarrays, the `strides` argument must contain a single element equal to `0`
* @throws `offset` argument must be a nonnegative integer
* @throws `buffer` argument must be compatible with specified meta data
* @throws must provide valid options
* @throws too many dimensions
* @returns ndarray instance
*
* @example
* var buffer = [ 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 ];
* var shape = [ 3, 2 ];
* var strides = [ 2, 1 ];
* var offset = 0;
*
* var out = new ndarray( 'generic', buffer, shape, strides, offset, 'row-major' );
*/
new ( dtype: DataType, buffer: Collection | Buffer, shape: Shape, strides: Strides, offset: number, order: Order, options?: Options ): ndarray;
/**
* ndarray constructor.
*
* @param dtype - data type
* @param buffer - data buffer
* @param shape - array shape
* @param strides - array strides
* @param offset - index offset
* @param order - specifies whether an array is row-major (C-style) or column-major (Fortran-style)
* @param options - function options
* @param options.mode - specifies how to handle indices which exceed array dimensions (default: 'throw')
* @param options.submode - specifies how to handle subscripts which exceed array dimensions on a per dimension basis (default: ['throw'])
* @param options.readonly - specifies whether an array should be read-only (default: false)
* @throws `buffer` argument `get` and `set` properties must be functions
* @throws `shape` argument must be an array-like object containing nonnegative integers
* @throws `shape` argument length must equal the number of dimensions
* @throws `strides` argument must be an array-like object containing integers
* @throws `strides` argument length must equal the number of dimensions (except for zero-dimensional arrays; in which case, the `strides` argument length must be equal to `1`)
* @throws for zero-dimensional ndarrays, the `strides` argument must contain a single element equal to `0`
* @throws `offset` argument must be a nonnegative integer
* @throws `buffer` argument must be compatible with specified meta data
* @throws must provide valid options
* @throws too many dimensions
* @returns ndarray instance
*
* @example
* var buffer = [ 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 ];
* var shape = [ 3, 2 ];
* var strides = [ 2, 1 ];
* var offset = 0;
*
* var out = ndarray( 'generic', buffer, shape, strides, offset, 'row-major' );
*/
( dtype: DataType, buffer: Collection | Buffer, shape: Shape, strides: Strides, offset: number, order: Order, options?: Options ): ndarray;
}
/**
* ndarray constructor.
*
* @param dtype - data type
* @param buffer - data buffer
* @param shape - array shape
* @param strides - array strides
* @param offset - index offset
* @param order - specifies whether an array is row-major (C-style) or column-major (Fortran-style)
* @param options - function options
* @param options.mode - specifies how to handle indices which exceed array dimensions (default: 'throw')
* @param options.submode - specifies how to handle subscripts which exceed array dimensions on a per dimension basis (default: ['throw'])
* @param options.readonly - specifies whether an array should be read-only (default: false)
* @throws `buffer` argument `get` and `set` properties must be functions
* @throws `shape` argument must be an array-like object containing nonnegative integers
* @throws `shape` argument length must equal the number of dimensions
* @throws `strides` argument must be an array-like object containing integers
* @throws `strides` argument length must equal the number of dimensions (except for zero-dimensional arrays; in which case, the `strides` argument length must be equal to `1`)
* @throws for zero-dimensional ndarrays, the `strides` argument must contain a single element equal to `0`
* @throws `offset` argument must be a nonnegative integer
* @throws `buffer` argument must be compatible with specified meta data
* @throws must provide valid options
* @throws too many dimensions
* @returns ndarray instance
*
* @example
* var buffer = [ 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 ];
* var shape = [ 3, 2 ];
* var strides = [ 2, 1 ];
* var offset = 0;
*
* var out = ndarray( 'generic', buffer, shape, strides, offset, 'row-major' );
*/
declare var ctor: Constructor;
// EXPORTS //
export = ctor;
``` | /content/code_sandbox/lib/node_modules/@stdlib/ndarray/ctor/docs/types/index.d.ts | xml | 2016-03-24T04:19:52 | 2024-08-16T09:03:19 | stdlib | stdlib-js/stdlib | 4,266 | 1,657 |
```xml
export * from './compute'
export * from './linearScale'
export * from './logScale'
export * from './symlogScale'
export * from './pointScale'
export * from './timeScale'
export * from './timeHelpers'
export * from './bandScale'
export * from './ticks'
export * from './types'
``` | /content/code_sandbox/packages/scales/src/index.ts | xml | 2016-04-16T03:27:56 | 2024-08-16T03:38:37 | nivo | plouc/nivo | 13,010 | 69 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="path_to_url">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="debug|x64">
<Configuration>debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="release|x64">
<Configuration>release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="profile|x64">
<Configuration>profile</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="checked|x64">
<Configuration>checked</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{5C0CC9F4-4FF4-EF91-BA2C-535447265908}</ProjectGuid>
<RootNamespace>SnippetResourcesLoading</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='profile|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='checked|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='debug|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="../../../compiler/paths.vsprops" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='release|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="../../../compiler/paths.vsprops" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='profile|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="../../../compiler/paths.vsprops" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='checked|x64'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="../../../compiler/paths.vsprops" />
</ImportGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='debug|x64'">
<OutDir>./../../../bin/vc12win64-PhysX_3.4\</OutDir>
<IntDir>./build/x64/SnippetResourcesLoading/debug\</IntDir>
<TargetExt>.exe</TargetExt>
<TargetName>$(ProjectName)DEBUG</TargetName>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
<CodeAnalysisRules />
<CodeAnalysisRuleAssemblies />
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='debug|x64'">
<ClCompile>
<TreatWarningAsError>true</TreatWarningAsError>
<StringPooling>true</StringPooling>
<RuntimeTypeInfo>false</RuntimeTypeInfo>
<FloatingPointModel>Fast</FloatingPointModel>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<AdditionalOptions>/wd4201 /wd4324 /Wall /wd4514 /wd4820 /wd4127 /wd4710 /wd4711 /wd4061 /wd4668 /wd4626 /wd4266 /wd4263 /wd4264 /wd4640 /wd4625 /wd4574 /wd4191 /wd4987 /wd4986 /wd4946 /wd4836 /wd4571 /wd4826 /wd4577 /wd4458 /MP /wd4350 /d2Zi+</AdditionalOptions>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>../../../../PxShared/include;../../../../PxShared/src/foundation/include;./../../../shared/general/RenderDebug/include;../../../../PhysX_3.4/Include;./../../../shared/external/include;./../../../shared/general/shared;./../../../shared/general/RenderDebug/public;./../../../include;./../../../include/PhysX3;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;WIN64;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_DEBUG;PX_DEBUG;PX_CHECKED;PHYSX_PROFILE_SDK;PX_SUPPORT_VISUAL_DEBUGGER;PX_PROFILE;PX_NVTX=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ExceptionHandling>Sync</ExceptionHandling>
<WarningLevel>Level3</WarningLevel>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<PrecompiledHeaderFile></PrecompiledHeaderFile>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<AdditionalOptions>PhysX3CommonDEBUG_x64.lib PhysX3DEBUG_x64.lib PhysX3CookingDEBUG_x64.lib PhysX3ExtensionsDEBUG.lib PxPvdSDKDEBUG_x64.lib PxTaskDEBUG_x64.lib PxFoundationDEBUG_x64.lib ApexFrameworkDEBUG_x64.lib Apex_DestructibleDEBUG_x64.lib /INCREMENTAL:NO</AdditionalOptions>
<AdditionalDependencies>shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(ProjectName)DEBUG.exe</OutputFile>
<AdditionalLibraryDirectories>../../../../PxShared/lib/vc12win64;./../../../lib/vc12WIN64-PhysX_3.4;../../../../PhysX_3.4/Lib/vc12WIN64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<ProgramDatabaseFile>$(OutDir)/$(ProjectName)DEBUG.exe.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<ImportLibrary>$(OutDir)$(TargetName).lib</ImportLibrary>
<GenerateDebugInformation>true</GenerateDebugInformation>
<TargetMachine>MachineX64</TargetMachine>
</Link>
<ResourceCompile>
</ResourceCompile>
<ProjectReference>
</ProjectReference>
</ItemDefinitionGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release|x64'">
<OutDir>./../../../bin/vc12win64-PhysX_3.4\</OutDir>
<IntDir>./build/x64/SnippetResourcesLoading/release\</IntDir>
<TargetExt>.exe</TargetExt>
<TargetName>$(ProjectName)</TargetName>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
<CodeAnalysisRules />
<CodeAnalysisRuleAssemblies />
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='release|x64'">
<ClCompile>
<TreatWarningAsError>true</TreatWarningAsError>
<StringPooling>true</StringPooling>
<RuntimeTypeInfo>false</RuntimeTypeInfo>
<FloatingPointModel>Fast</FloatingPointModel>
<AdditionalOptions>/wd4201 /wd4324 /Wall /wd4514 /wd4820 /wd4127 /wd4710 /wd4711 /wd4061 /wd4668 /wd4626 /wd4266 /wd4263 /wd4264 /wd4640 /wd4625 /wd4574 /wd4191 /wd4987 /wd4986 /wd4946 /wd4836 /wd4571 /wd4826 /wd4577 /wd4458 /MP /wd4350 /d2Zi+</AdditionalOptions>
<Optimization>Full</Optimization>
<AdditionalIncludeDirectories>../../../../PxShared/include;../../../../PxShared/src/foundation/include;./../../../shared/general/RenderDebug/include;../../../../PhysX_3.4/Include;./../../../shared/external/include;./../../../shared/general/shared;./../../../shared/general/RenderDebug/public;./../../../include;./../../../include/PhysX3;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;WIN64;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;NDEBUG;APEX_SHIPPING;_SECURE_SCL=0;_ITERATOR_DEBUG_LEVEL=0;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ExceptionHandling>Sync</ExceptionHandling>
<WarningLevel>Level3</WarningLevel>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<PrecompiledHeaderFile></PrecompiledHeaderFile>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<AdditionalOptions>PhysX3Common_x64.lib PhysX3_x64.lib PhysX3Cooking_x64.lib PhysX3Extensions.lib PxPvdSDK_x64.lib PxTask_x64.lib PxFoundation_x64.lib ApexFramework_x64.lib Apex_Destructible_x64.lib /INCREMENTAL:NO</AdditionalOptions>
<AdditionalDependencies>shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(ProjectName).exe</OutputFile>
<AdditionalLibraryDirectories>../../../../PxShared/lib/vc12win64;./../../../lib/vc12WIN64-PhysX_3.4;../../../../PhysX_3.4/Lib/vc12WIN64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<ProgramDatabaseFile>$(OutDir)/$(ProjectName).exe.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<ImportLibrary>$(OutDir)$(TargetName).lib</ImportLibrary>
<GenerateDebugInformation>true</GenerateDebugInformation>
<TargetMachine>MachineX64</TargetMachine>
</Link>
<ResourceCompile>
</ResourceCompile>
<ProjectReference>
</ProjectReference>
</ItemDefinitionGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='profile|x64'">
<OutDir>./../../../bin/vc12win64-PhysX_3.4\</OutDir>
<IntDir>./build/x64/SnippetResourcesLoading/profile\</IntDir>
<TargetExt>.exe</TargetExt>
<TargetName>$(ProjectName)PROFILE</TargetName>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
<CodeAnalysisRules />
<CodeAnalysisRuleAssemblies />
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='profile|x64'">
<ClCompile>
<TreatWarningAsError>true</TreatWarningAsError>
<StringPooling>true</StringPooling>
<RuntimeTypeInfo>false</RuntimeTypeInfo>
<FloatingPointModel>Fast</FloatingPointModel>
<AdditionalOptions>/wd4201 /wd4324 /Wall /wd4514 /wd4820 /wd4127 /wd4710 /wd4711 /wd4061 /wd4668 /wd4626 /wd4266 /wd4263 /wd4264 /wd4640 /wd4625 /wd4574 /wd4191 /wd4987 /wd4986 /wd4946 /wd4836 /wd4571 /wd4826 /wd4577 /wd4458 /MP /wd4350 /d2Zi+</AdditionalOptions>
<Optimization>Full</Optimization>
<AdditionalIncludeDirectories>../../../../PxShared/include;../../../../PxShared/src/foundation/include;./../../../shared/general/RenderDebug/include;../../../../PhysX_3.4/Include;./../../../shared/external/include;./../../../shared/general/shared;./../../../shared/general/RenderDebug/public;./../../../include;./../../../include/PhysX3;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;WIN64;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;NDEBUG;PHYSX_PROFILE_SDK;PX_SUPPORT_VISUAL_DEBUGGER;PX_PROFILE;PX_NVTX=1;_SECURE_SCL=0;_ITERATOR_DEBUG_LEVEL=0;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ExceptionHandling>Sync</ExceptionHandling>
<WarningLevel>Level3</WarningLevel>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<PrecompiledHeaderFile></PrecompiledHeaderFile>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<AdditionalOptions>PhysX3CommonPROFILE_x64.lib PhysX3PROFILE_x64.lib PhysX3CookingPROFILE_x64.lib PhysX3ExtensionsPROFILE.lib PxPvdSDKPROFILE_x64.lib PxTaskPROFILE_x64.lib PxFoundationPROFILE_x64.lib ApexFrameworkPROFILE_x64.lib Apex_DestructiblePROFILE_x64.lib /INCREMENTAL:NO</AdditionalOptions>
<AdditionalDependencies>shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(ProjectName)PROFILE.exe</OutputFile>
<AdditionalLibraryDirectories>../../../../PxShared/lib/vc12win64;./../../../lib/vc12WIN64-PhysX_3.4;../../../../PhysX_3.4/Lib/vc12WIN64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<ProgramDatabaseFile>$(OutDir)/$(ProjectName)PROFILE.exe.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<ImportLibrary>$(OutDir)$(TargetName).lib</ImportLibrary>
<GenerateDebugInformation>true</GenerateDebugInformation>
<TargetMachine>MachineX64</TargetMachine>
</Link>
<ResourceCompile>
</ResourceCompile>
<ProjectReference>
</ProjectReference>
</ItemDefinitionGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='checked|x64'">
<OutDir>./../../../bin/vc12win64-PhysX_3.4\</OutDir>
<IntDir>./build/x64/SnippetResourcesLoading/checked\</IntDir>
<TargetExt>.exe</TargetExt>
<TargetName>$(ProjectName)CHECKED</TargetName>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
<CodeAnalysisRules />
<CodeAnalysisRuleAssemblies />
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='checked|x64'">
<ClCompile>
<TreatWarningAsError>true</TreatWarningAsError>
<StringPooling>true</StringPooling>
<RuntimeTypeInfo>false</RuntimeTypeInfo>
<FloatingPointModel>Fast</FloatingPointModel>
<AdditionalOptions>/wd4201 /wd4324 /Wall /wd4514 /wd4820 /wd4127 /wd4710 /wd4711 /wd4061 /wd4668 /wd4626 /wd4266 /wd4263 /wd4264 /wd4640 /wd4625 /wd4574 /wd4191 /wd4987 /wd4986 /wd4946 /wd4836 /wd4571 /wd4826 /wd4577 /wd4458 /MP /wd4350 /d2Zi+</AdditionalOptions>
<Optimization>Full</Optimization>
<AdditionalIncludeDirectories>../../../../PxShared/include;../../../../PxShared/src/foundation/include;./../../../shared/general/RenderDebug/include;../../../../PhysX_3.4/Include;./../../../shared/external/include;./../../../shared/general/shared;./../../../shared/general/RenderDebug/public;./../../../include;./../../../include/PhysX3;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;WIN64;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;NDEBUG;PX_CHECKED;PHYSX_PROFILE_SDK;PX_SUPPORT_VISUAL_DEBUGGER;PX_ENABLE_CHECKED_ASSERTS;PX_NVTX=1;_SECURE_SCL=0;_ITERATOR_DEBUG_LEVEL=0;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ExceptionHandling>Sync</ExceptionHandling>
<WarningLevel>Level3</WarningLevel>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<PrecompiledHeaderFile></PrecompiledHeaderFile>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<AdditionalOptions>PhysX3CommonCHECKED_x64.lib PhysX3CHECKED_x64.lib PhysX3CookingCHECKED_x64.lib PhysX3ExtensionsCHECKED.lib PxPvdSDKCHECKED_x64.lib PxTaskCHECKED_x64.lib PxFoundationCHECKED_x64.lib ApexFrameworkCHECKED_x64.lib Apex_DestructibleCHECKED_x64.lib /INCREMENTAL:NO</AdditionalOptions>
<AdditionalDependencies>shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(ProjectName)CHECKED.exe</OutputFile>
<AdditionalLibraryDirectories>../../../../PxShared/lib/vc12win64;./../../../lib/vc12WIN64-PhysX_3.4;../../../../PhysX_3.4/Lib/vc12WIN64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<ProgramDatabaseFile>$(OutDir)/$(ProjectName)CHECKED.exe.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<ImportLibrary>$(OutDir)$(TargetName).lib</ImportLibrary>
<GenerateDebugInformation>true</GenerateDebugInformation>
<TargetMachine>MachineX64</TargetMachine>
</Link>
<ResourceCompile>
</ResourceCompile>
<ProjectReference>
</ProjectReference>
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="..\..\SnippetCommon\SnippetCommon.h">
</ClInclude>
<ClCompile Include="..\..\SnippetResourcesLoading\SnippetResourcesLoading.cpp">
</ClCompile>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets"></ImportGroup>
</Project>
``` | /content/code_sandbox/APEX_1.4/snippets/compiler/vc12win64-PhysX_3.4/SnippetResourcesLoading.vcxproj | xml | 2016-10-12T16:34:31 | 2024-08-16T09:40:38 | PhysX-3.4 | NVIDIAGameWorks/PhysX-3.4 | 2,343 | 4,404 |
```xml
import{ IOffer } from '../../../model/IOffer';
export interface IOfferCreationFormProps {
offerCreated: boolean;
createOffer: (offer: IOffer) => void;
}
``` | /content/code_sandbox/samples/react-office-offer-creation/src/webparts/offerCreationSpFx/components/IOfferCreationFormProps.ts | xml | 2016-08-30T17:21:43 | 2024-08-16T18:41:32 | sp-dev-fx-webparts | pnp/sp-dev-fx-webparts | 2,027 | 41 |
```xml
/// <reference path="../../../__typings__/index.d.ts"/>
import path from 'path'
import { assertProject } from '../src'
test('assertProject()', async () => {
const project = assertProject(path.join(__dirname, '../../..'))
project.has('rimraf')
project.hasNot('sfdsff3g34') // cspell:disable-line
expect(typeof project.requireModule('rimraf')).toBe('function')
project.isExecutable('.bin/rimraf')
})
test('assertProject() store functions', async () => {
const project = assertProject(path.join(__dirname, 'fixture/project'), 'registry.npmjs.org')
expect(typeof project.getStorePath()).toBe('string')
project.storeHas('is-positive', '3.1.0')
expect(typeof project.resolve('is-positive', '3.1.0')).toBe('string')
project.storeHasNot('is-positive', '3.100.0')
expect(project.readLockfile()).toBeTruthy()
expect(project.readCurrentLockfile()).toBeTruthy()
expect(project.readModulesManifest()).toBeTruthy()
})
``` | /content/code_sandbox/__utils__/assert-project/test/index.ts | xml | 2016-01-28T07:40:43 | 2024-08-16T12:38:47 | pnpm | pnpm/pnpm | 28,869 | 227 |
```xml
import clsx from 'clsx';
type Props = Readonly<{
className?: string;
}>;
export default function HorizontalDivider({ className }: Props) {
return (
<hr
aria-hidden={true}
className={clsx('my-2 h-0 border-t border-slate-200', className)}
/>
);
}
``` | /content/code_sandbox/apps/portal/src/ui/HorizontalDivider/HorizontalDivider.tsx | xml | 2016-07-05T05:00:48 | 2024-08-16T19:01:19 | tech-interview-handbook | yangshun/tech-interview-handbook | 115,302 | 72 |
```xml
import React from 'react';
import { FlatList, SafeAreaView, StyleSheet } from 'react-native';
import { Image } from '@rneui/themed';
import { Header } from '../components/header';
const BASE_URI = 'path_to_url
const ImageAPI = () => {
return (
<>
<Header title="Image" view="image" />
<SafeAreaView>
<FlatList
data={[...new Array(10)].map((_, i) => i.toString())}
style={styles.list}
numColumns={2}
keyExtractor={(e) => e}
renderItem={({ item }) => (
<Image
source={{ uri: BASE_URI + item }}
containerStyle={styles.item}
/>
)}
/>
</SafeAreaView>
</>
);
};
const styles = StyleSheet.create({
list: {
width: '100%',
backgroundColor: '#000',
},
item: {
aspectRatio: 1,
width: '100%',
flex: 1,
},
});
export default ImageAPI;
``` | /content/code_sandbox/example/src/views/image.tsx | xml | 2016-09-08T14:21:41 | 2024-08-16T10:11:29 | react-native-elements | react-native-elements/react-native-elements | 24,875 | 224 |
```xml
import { IPlaygroundProject } from "../../../shared";
export function findLastIndex<T>(
array: T[],
predicate: (value: T) => boolean
): number {
for (let i = array.length - 1; i >= 0; i--) {
if (predicate(array[i])) {
return i;
}
}
return -1;
}
export function projectEquals(
project1: IPlaygroundProject,
project2: IPlaygroundProject
): boolean {
return (
normalizeLineEnding(project1.css) ===
normalizeLineEnding(project2.css) &&
normalizeLineEnding(project1.html) ===
normalizeLineEnding(project2.html) &&
normalizeLineEnding(project1.js) === normalizeLineEnding(project2.js)
);
}
export function normalizeLineEnding(str: string): string {
return str.replace(/\r\n/g, "\n");
}
``` | /content/code_sandbox/website/src/website/pages/playground/utils.ts | xml | 2016-06-07T16:56:31 | 2024-08-16T17:17:05 | monaco-editor | microsoft/monaco-editor | 39,508 | 189 |
```xml
import { IContext } from "../../../connectionResolver";
import { sendCoreMessage, sendProductsMessage } from "../../../messageBroker";
import { IDayPlanDocument } from "../../../models/definitions/dayPlans";
export default {
async __resolveReference({ _id }, { models }: IContext) {
return models.DayPlans.findOne({ _id });
},
async branch(plan: IDayPlanDocument, _, { subdomain }: IContext) {
if (!plan.branchId) {
return;
}
return await sendCoreMessage({
subdomain,
action: "branches.findOne",
data: { _id: plan.branchId },
isRPC: true
});
},
async department(plan: IDayPlanDocument, _, { subdomain }: IContext) {
if (!plan.departmentId) {
return;
}
return await sendCoreMessage({
subdomain,
action: "departments.findOne",
data: { _id: plan.departmentId },
isRPC: true
});
},
async product(plan: IDayPlanDocument, _, { subdomain }: IContext) {
if (!plan.productId) {
return;
}
return await sendProductsMessage({
subdomain,
action: "productFindOne",
data: { _id: plan.productId },
isRPC: true
});
}
};
``` | /content/code_sandbox/packages/plugin-salesplans-api/src/graphql/resolvers/customResolvers/dayPlan.ts | xml | 2016-11-11T06:54:50 | 2024-08-16T10:26:06 | erxes | erxes/erxes | 3,479 | 285 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<com.balysv.materialripple.MaterialRippleLayout xmlns:android="path_to_url"
xmlns:app="path_to_url"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:baselineAligned="false"
android:orientation="horizontal"
android:weightSum="7">
<LinearLayout
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="5"
android:gravity="center_vertical"
android:orientation="horizontal">
<ImageView
android:id="@+id/operationImage"
android:layout_width="30dp"
android:layout_height="30dp"
android:layout_margin="10dp"
android:contentDescription="@string/default_content_description"
app:srcCompat="@drawable/ic_compress_image"
app:tint="?attr/historyIconColor" />
<TextView
android:id="@+id/fileName"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="10dp"
android:padding="8dp"
android:textSize="18sp" />
</LinearLayout>
<LinearLayout
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="2"
android:orientation="vertical">
<TextView
android:id="@+id/operationDate"
android:layout_width="wrap_content"
android:layout_height="0dp"
android:layout_weight="1"
android:paddingLeft="8dp"
android:paddingTop="8dp"
android:paddingRight="8dp"
android:textSize="12sp" />
<TextView
android:id="@+id/operationType"
android:layout_width="wrap_content"
android:layout_height="0dp"
android:layout_weight="1"
android:padding="8dp" />
</LinearLayout>
</LinearLayout>
</com.balysv.materialripple.MaterialRippleLayout>
``` | /content/code_sandbox/app/src/main/res/layout/layout_item_history.xml | xml | 2016-02-22T10:00:46 | 2024-08-16T15:37:50 | Images-to-PDF | Swati4star/Images-to-PDF | 1,174 | 471 |
```xml
import { commitLocalUpdate, Environment } from "relay-runtime";
import { createMutation, LOCAL_ID } from "coral-framework/lib/relay";
export interface SetDuplicateEmailInput {
duplicateEmail: string | null;
}
/**
* SetDuplicateEmailMutation is used to set the duplicateEmail in localState.
* It is used in the `LINK_ACCOUNT` view.
*/
const SetDuplicateEmailMutation = createMutation(
"setDuplicateEmail",
(environment: Environment, input: SetDuplicateEmailInput) => {
return commitLocalUpdate(environment, (store) => {
const record = store.get(LOCAL_ID)!;
record.setValue(input.duplicateEmail, "duplicateEmail");
});
}
);
export default SetDuplicateEmailMutation;
``` | /content/code_sandbox/client/src/core/client/auth/mutations/SetDuplicateEmailMutation.ts | xml | 2016-10-31T16:14:05 | 2024-08-06T16:15:57 | talk | coralproject/talk | 1,881 | 154 |
```xml
/* eslint-disable no-console */
import { readFile } from 'node:fs/promises';
import { createServer } from 'node:http';
import { createServer as createSecureServer } from 'node:https';
import selfsigned from 'selfsigned';
import handleServe from 'serve-handler';
const {
// eslint-disable-next-line no-magic-numbers
env: { PORT = 5081, PORTS = 5443 }
} = process;
(async () => {
const config = JSON.parse(await readFile('./serve.json', 'utf8'));
const attrs = [{ name: 'commonName', value: 'webchat2' }];
const pems = selfsigned.generate(attrs, { days: 365 });
const handler = (req, res) => handleServe(req, res, config);
createSecureServer(
{
cert: pems.cert,
key: pems.private
},
handler
).listen(PORTS, () => console.log(`Listening to port ${PORTS} (secure).`));
createServer(handler).listen(PORT, () => console.log(`Listening to port ${PORT} (insecure).`));
})();
``` | /content/code_sandbox/packages/test/web-server/src/index.ts | xml | 2016-07-07T23:16:57 | 2024-08-16T00:12:37 | BotFramework-WebChat | microsoft/BotFramework-WebChat | 1,567 | 243 |
```xml
import { IFormatter } from './BaseFormatter';
import { Observable, of } from 'rxjs';
import { TextEdit } from 'vscode-languageserver';
export default class NullFormatter implements IFormatter {
format(): Observable<TextEdit[]> {
return of([]);
}
}
``` | /content/code_sandbox/packages/language-server-ruby/src/formatters/NullFormatter.ts | xml | 2016-02-24T04:14:24 | 2024-08-12T08:08:29 | vscode-ruby | rubyide/vscode-ruby | 1,260 | 57 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="path_to_url">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{86771818-366E-4E0D-8EBF-C37699F444E9}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>asm_irony</RootNamespace>
<AssemblyName>asm-irony</AssemblyName>
<TargetFrameworkVersion>v4.8.1</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>
<TargetFrameworkProfile />
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<PropertyGroup>
<StartupObject />
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x86'">
<DebugSymbols>true</DebugSymbols>
<OutputPath>bin\x86\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<DebugType>full</DebugType>
<PlatformTarget>x86</PlatformTarget>
<ErrorReport>prompt</ErrorReport>
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
<OutputPath>bin\x86\Release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<Optimize>true</Optimize>
<DebugType>pdbonly</DebugType>
<PlatformTarget>x86</PlatformTarget>
<ErrorReport>prompt</ErrorReport>
<CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Net.Http" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="Example.cs" />
<Compile Include="Example2.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\.editorconfig">
<Link>.editorconfig</Link>
</None>
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Irony.Interpreter">
<Version>0.9.1</Version>
</PackageReference>
<PackageReference Include="System.ValueTuple">
<Version>4.5.0</Version>
</PackageReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
``` | /content/code_sandbox/VS/CSHARP/asm-irony/asm-irony.csproj | xml | 2016-02-19T15:43:23 | 2024-08-16T10:20:31 | asm-dude | HJLebbink/asm-dude | 4,118 | 1,008 |
```xml
import {
Company,
Contact,
ContactNote,
Deal,
DealNote,
Sale,
Tag,
Task,
} from '../../../types';
export interface Db {
companies: Required<Company>[];
contacts: Required<Contact>[];
contactNotes: ContactNote[];
deals: Deal[];
dealNotes: DealNote[];
sales: Sale[];
tags: Tag[];
tasks: Task[];
}
``` | /content/code_sandbox/examples/crm/src/providers/fakerest/dataGenerator/types.ts | xml | 2016-07-13T07:58:54 | 2024-08-16T18:32:27 | react-admin | marmelab/react-admin | 24,624 | 88 |
```xml
<!--
Description: entry rights - text
-->
<feed xmlns="path_to_url">
<entry>
<rights type="text">Entry Rights</rights>
</entry>
</feed>
``` | /content/code_sandbox/testdata/parser/atom/atom10_feed_entry_rights_text.xml | xml | 2016-01-23T02:44:34 | 2024-08-16T15:16:03 | gofeed | mmcdole/gofeed | 2,547 | 41 |
```xml
import { Point } from 'slate'
export const input = {
point: {
path: [0, 1],
offset: 7,
},
another: {
path: [0, 1],
offset: 7,
},
}
export const test = ({ point, another }) => {
return Point.compare(point, another)
}
export const output = 0
``` | /content/code_sandbox/packages/slate/test/interfaces/Point/compare/path-equal-offset-equal.tsx | xml | 2016-06-18T01:52:42 | 2024-08-16T18:43:42 | slate | ianstormtaylor/slate | 29,492 | 85 |
```xml
import React from 'react';
import { ViewProps } from '../primitives/View';
export declare const Table: React.ComponentType<ViewProps>;
export declare const THead: React.ComponentType<ViewProps>;
export declare const TBody: React.ComponentType<ViewProps>;
export declare const TFoot: React.ComponentType<ViewProps>;
export declare const TH: React.ComponentType<ViewProps>;
export declare const TR: React.ComponentType<ViewProps>;
export declare const TD: React.ComponentType<ViewProps>;
export declare const Caption: React.ComponentType<ViewProps>;
//# sourceMappingURL=Table.web.d.ts.map
``` | /content/code_sandbox/packages/html-elements/build/elements/Table.web.d.ts | xml | 2016-08-15T17:14:25 | 2024-08-16T19:54:44 | expo | expo/expo | 32,004 | 118 |
```xml
import React, { FC, useEffect, ReactNode } from 'react';
import { IconButton, useTheme } from '@grafana/ui';
import { getStyles } from './Modal.styles';
interface ModalWindow {
onClose: () => void;
closeOnClickaway?: boolean;
closeOnEscape?: boolean;
isVisible: boolean;
title: ReactNode | string;
}
export const Modal: FC<ModalWindow> = (props) => {
const {
isVisible, children, title, onClose, closeOnClickaway = true, closeOnEscape = true,
} = props;
const theme = useTheme();
const styles = getStyles(theme);
useEffect(() => {
if (closeOnEscape) {
const escapeHandler = (e) => {
if (e.key === 'Escape') {
onClose();
}
};
document.addEventListener('keydown', escapeHandler);
return () => document.removeEventListener('keydown', escapeHandler);
}
return undefined;
});
return isVisible ? (
<div className="modal-wrapper">
<div
className={styles.background}
onClick={closeOnClickaway ? onClose : undefined}
data-testid="modal-background"
/>
<div className={styles.body} data-testid="modal-body">
<div className={styles.modalHeader}>
{title}
<div className={styles.modalHeaderClose}>
<IconButton
data-testid="modal-close-button"
name="times"
size="lg"
onClick={onClose}
/>
</div>
</div>
<div className={styles.content} data-testid="modal-content">
{children}
</div>
</div>
</div>
) : null;
};
``` | /content/code_sandbox/pmm-app/src/shared/components/Elements/Modal/Modal.tsx | xml | 2016-01-22T07:14:23 | 2024-08-13T13:01:59 | grafana-dashboards | percona/grafana-dashboards | 2,661 | 360 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<!--
path_to_url
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<selector xmlns:android="path_to_url">
<item android:color="?attr/colorSurface" android:state_selected="true" />
<item android:color="?attr/colorOnBackground" />
</selector>
``` | /content/code_sandbox/lib/java/com/google/android/material/timepicker/res/color/m3_timepicker_clock_text_color.xml | xml | 2016-12-05T16:11:29 | 2024-08-16T17:51:42 | material-components-android | material-components/material-components-android | 16,176 | 96 |
```xml
<persistence xmlns="path_to_url"
xmlns:xsi="path_to_url"
xsi:schemaLocation="path_to_url
path_to_url"
version="2.1">
<persistence-unit name="default" transaction-type="RESOURCE_LOCAL">
<provider>org.hibernate.jpa.HibernatePersistenceProvider</provider>
<non-jta-data-source>DefaultDS</non-jta-data-source>
<properties>
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
</properties>
</persistence-unit>
</persistence>
``` | /content/code_sandbox/dev/sbt-plugin/src/sbt-test/sbt-plugin/akka-persistence-typed-migration-java/shopping-cart-lagom-persistence/src/main/resources/META-INF/persistence.xml | xml | 2016-02-23T01:51:06 | 2024-07-17T04:00:50 | lagom | lagom/lagom | 2,630 | 126 |
```xml
<vector xmlns:android="path_to_url"
xmlns:aapt="path_to_url"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
``` | /content/code_sandbox/sample/src/main/res/drawable-v24/ic_launcher_foreground.xml | xml | 2016-08-11T12:08:46 | 2024-08-15T07:21:16 | LoadingButtonAndroid | leandroBorgesFerreira/LoadingButtonAndroid | 1,942 | 773 |
```xml
<clickhouse>
<path replace="replace">/var/lib/clickhouse</path>
<tmp_path replace="replace">/var/lib/clickhouse/tmp/</tmp_path>
<user_files_path replace="replace">/var/lib/clickhouse/user_files/</user_files_path>
<format_schema_path replace="replace">/var/lib/clickhouse/format_schemas/</format_schema_path>
<access_control_path replace="replace">/var/lib/clickhouse/access/</access_control_path>
<top_level_domains_path replace="replace">/var/lib/clickhouse/top_level_domains/</top_level_domains_path>
</clickhouse>
``` | /content/code_sandbox/tests/integration/test_config_yaml_main/configs/config.d/path.xml | xml | 2016-06-02T08:28:18 | 2024-08-16T18:39:33 | ClickHouse | ClickHouse/ClickHouse | 36,234 | 140 |
```xml
import { FeatureCode } from './interface';
import useFeatures, { FeatureContextValue } from './useFeatures';
const useFeature = <FeatureValue = any>(code: FeatureCode, prefetch = true) => {
// Forcing type, not sure how to type a generic array
const { featuresFlags } = useFeatures([code], prefetch);
return featuresFlags[0] as FeatureContextValue<FeatureValue>;
};
export default useFeature;
``` | /content/code_sandbox/packages/features/useFeature.ts | xml | 2016-06-08T11:16:51 | 2024-08-16T14:14:27 | WebClients | ProtonMail/WebClients | 4,300 | 95 |
```xml
// Example of how you would create a table and add data to it
import * as fs from "fs";
import { Document, HeadingLevel, Packer, Paragraph, Table, TableCell, TableRow, VerticalAlign, TextDirection } from "docx";
const doc = new Document({
sections: [
{
children: [
new Table({
rows: [
new TableRow({
children: [
new TableCell({
children: [new Paragraph({}), new Paragraph({})],
verticalAlign: VerticalAlign.CENTER,
}),
new TableCell({
children: [new Paragraph({}), new Paragraph({})],
verticalAlign: VerticalAlign.CENTER,
}),
new TableCell({
children: [new Paragraph({ text: "bottom to top" }), new Paragraph({})],
textDirection: TextDirection.BOTTOM_TO_TOP_LEFT_TO_RIGHT,
}),
new TableCell({
children: [new Paragraph({ text: "top to bottom" }), new Paragraph({})],
textDirection: TextDirection.TOP_TO_BOTTOM_RIGHT_TO_LEFT,
}),
],
}),
new TableRow({
children: [
new TableCell({
children: [
new Paragraph({
text: "Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah Blah",
heading: HeadingLevel.HEADING_1,
}),
],
}),
new TableCell({
children: [
new Paragraph({
text: "This text should be in the middle of the cell",
}),
],
verticalAlign: VerticalAlign.CENTER,
}),
new TableCell({
children: [
new Paragraph({
text: "Text above should be vertical from bottom to top",
}),
],
verticalAlign: VerticalAlign.CENTER,
}),
new TableCell({
children: [
new Paragraph({
text: "Text above should be vertical from top to bottom",
}),
],
verticalAlign: VerticalAlign.CENTER,
}),
],
}),
],
}),
],
},
],
});
Packer.toBuffer(doc).then((buffer) => {
fs.writeFileSync("My Document.docx", buffer);
});
``` | /content/code_sandbox/demo/31-tables.ts | xml | 2016-03-26T23:43:56 | 2024-08-16T13:02:47 | docx | dolanmiu/docx | 4,139 | 472 |
```xml
import validate from '../src/numeric';
test('validates that the string only contains numeric characters', () => {
// valid.
expect(validate('1234567890')).toBe(true);
expect(validate(123)).toBe(true);
expect(validate('')).toBe(true);
expect(validate('')).toBe(true);
expect(validate(undefined)).toBe(true);
expect(validate(null)).toBe(true);
expect(validate('')).toBe(true);
expect(validate([])).toBe(true);
expect(validate(0)).toBe(true);
// invalid
expect(validate('a')).toBe(false);
expect(validate('1234567a89')).toBe(false);
expect(validate(true)).toBe(false);
expect(validate(false)).toBe(false);
expect(validate({})).toBe(false);
expect(validate('+123')).toBe(false);
expect(validate('-123')).toBe(false);
});
``` | /content/code_sandbox/packages/rules/tests/numeric.spec.ts | xml | 2016-07-30T01:10:44 | 2024-08-16T10:19:58 | vee-validate | logaretm/vee-validate | 10,699 | 176 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="path_to_url">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="debug|Win32">
<Configuration>debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="release|Win32">
<Configuration>release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="release-assert|Win32">
<Configuration>release-assert</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup>
<BaseIntermediateOutputPath>$(SolutionDir)..\..\..\..\..\bld\msvc\$(SolutionName)\$(ProjectName)</BaseIntermediateOutputPath>
</PropertyGroup>
<ItemGroup>
<!-- reverge_begin cpps -->
<!-- reverge_end cpps -->
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{58495107-524D-262A-14F9-75054C6228FA}</ProjectGuid>
<Keyword>MakeFileProj</Keyword>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release-assert|Win32'" Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='debug|Win32'" Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release|Win32'" Label="Configuration">
<ConfigurationType>Makefile</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='release-assert|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='debug|Win32'">
<!-- reverge_begin defines(debug) -->
<NMakePreprocessorDefinitions>
</NMakePreprocessorDefinitions>
<!-- reverge_end defines(debug) -->
<!-- reverge_begin includes(debug) -->
<NMakeIncludeSearchPath>
</NMakeIncludeSearchPath>
<!-- reverge_end includes(debug) -->
<!-- reverge_begin options(debug) -->
<AdditionalOptions>
</AdditionalOptions>
<!-- reverge_end options(debug) -->
<OutDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</OutDir>
<IntDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</IntDir>
<ExecutablePath>$(PATH)</ExecutablePath>
<IncludePath />
<ReferencePath />
<LibraryPath />
<LibraryWPath />
<SourcePath />
<ExcludePath />
<NMakeBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0</NMakeBuildCommandLine>
<NMakeReBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0 -a</NMakeReBuildCommandLine>
<NMakeCleanCommandLine>
</NMakeCleanCommandLine>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release|Win32'">
<!-- reverge_begin defines(release) -->
<NMakePreprocessorDefinitions>
</NMakePreprocessorDefinitions>
<!-- reverge_end defines(release) -->
<!-- reverge_begin includes(release) -->
<NMakeIncludeSearchPath>
</NMakeIncludeSearchPath>
<!-- reverge_end includes(release) -->
<!-- reverge_begin options(release) -->
<AdditionalOptions>
</AdditionalOptions>
<!-- reverge_end options(release) -->
<OutDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</OutDir>
<IntDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</IntDir>
<ExecutablePath>$(PATH)</ExecutablePath>
<IncludePath />
<ReferencePath />
<LibraryPath />
<LibraryWPath />
<SourcePath />
<ExcludePath />
<NMakeBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0</NMakeBuildCommandLine>
<NMakeReBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0 -a</NMakeReBuildCommandLine>
<NMakeCleanCommandLine>
</NMakeCleanCommandLine>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release-assert|Win32'">
<!-- reverge_begin defines(release-assert) -->
<NMakePreprocessorDefinitions>
</NMakePreprocessorDefinitions>
<!-- reverge_end defines(release-assert) -->
<!-- reverge_begin includes(release-assert) -->
<NMakeIncludeSearchPath>
</NMakeIncludeSearchPath>
<!-- reverge_end includes(release-assert) -->
<!-- reverge_begin options(release-assert) -->
<AdditionalOptions>
</AdditionalOptions>
<!-- reverge_end options(release-assert) -->
<OutDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</OutDir>
<IntDir>$(SolutionDir)..\..\..\..\..\bld\lib\$(SolutionName)\libs\$(SolutionName)\test\$(ProjectName).test\msvc-12.0\$(Configuration)\address-model-64\link-static\threading-multi\</IntDir>
<ExecutablePath>$(PATH)</ExecutablePath>
<IncludePath />
<ReferencePath />
<LibraryPath />
<LibraryWPath />
<SourcePath />
<ExcludePath />
<NMakeBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0</NMakeBuildCommandLine>
<NMakeReBuildCommandLine>cd $(SolutionDir)..\test && $(SolutionDir)..\..\..\b2 address-model=64 variant=$(configuration) toolset=msvc-14.0 -a</NMakeReBuildCommandLine>
<NMakeCleanCommandLine>
</NMakeCleanCommandLine>
</PropertyGroup>
<ItemDefinitionGroup>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>
``` | /content/code_sandbox/deps/boost_1_66_0/libs/qvm/bld/test/all tests.vcxproj | xml | 2016-09-05T10:18:44 | 2024-08-11T13:21:40 | LiquidCore | LiquidPlayer/LiquidCore | 1,010 | 2,049 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Nextcloud - Android Client
~
-->
<LinearLayout xmlns:android="path_to_url"
android:layout_width="match_parent"
android:layout_height="@dimen/standard_list_item_size"
android:orientation="horizontal">
<ImageView
android:id="@+id/thumbnail"
android:layout_width="@dimen/file_icon_size"
android:layout_height="@dimen/file_icon_size"
android:layout_gravity="center_vertical"
android:layout_margin="@dimen/uploader_list_item_layout_image_margin"
android:src="@drawable/folder"
android:contentDescription="@null"/>
<LinearLayout
android:layout_width="0dp"
android:layout_height="@dimen/standard_list_item_size"
android:layout_weight="1"
android:gravity="center_vertical"
android:orientation="vertical">
<TextView
android:id="@+id/filename"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_marginStart="@dimen/zero"
android:layout_marginEnd="@dimen/standard_quarter_margin"
android:ellipsize="middle"
android:singleLine="true"
android:text="@string/placeholder_filename"
android:textColor="@color/text_color"
android:textSize="@dimen/two_line_primary_text_size"/>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/zero"
android:layout_marginEnd="@dimen/standard_quarter_margin"
android:orientation="horizontal">
<TextView
android:id="@+id/file_size"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/placeholder_fileSize"
android:textColor="@color/list_item_lastmod_and_filesize_text"
android:textSize="@dimen/two_line_secondary_text_size"/>
<TextView
android:id="@+id/file_separator"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:paddingStart="@dimen/zero"
android:paddingEnd="@dimen/standard_quarter_padding"
android:gravity="end"
android:text="@string/info_separator"
android:textColor="@color/list_item_lastmod_and_filesize_text"
android:textSize="@dimen/two_line_secondary_text_size"
/>
<TextView
android:id="@+id/last_mod"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="end"
android:text="@string/placeholder_media_time"
android:textColor="@color/list_item_lastmod_and_filesize_text"
android:textSize="@dimen/two_line_secondary_text_size"/>
</LinearLayout>
</LinearLayout>
</LinearLayout>
``` | /content/code_sandbox/app/src/main/res/layout/uploader_list_item_layout.xml | xml | 2016-06-06T21:23:36 | 2024-08-16T18:22:36 | android | nextcloud/android | 4,122 | 632 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="path_to_url"
android:layout_width="match_parent"
android:layout_height="200dp"
android:orientation="vertical"
android:tag="header">
<ImageView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:scaleType="centerCrop"
android:src="@drawable/a5"
android:tag="zoom"/>
<ImageView
android:id="@+id/icon"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_centerHorizontal="true"
android:layout_marginTop="30dp"
android:src="@drawable/default_avtar"
android:tag="zoom"/>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/icon"
android:layout_centerHorizontal="true"
android:layout_marginTop="5dp"
android:gravity="center"
android:shadowRadius="3"
android:text=""
android:textColor="#ffffff"
android:textSize="18sp"/>
<LinearLayout
android:id="@+id/ll_action_button"
android:layout_width="match_parent"
android:layout_height="50dp"
android:layout_alignParentBottom="true"
android:background="#D555"
android:dividerPadding="16dp"
android:showDividers="middle">
<TextView
android:id="@+id/tv_register"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="center"
android:text=""
android:textColor="#ffffff"
android:textSize="18sp"/>
<TextView
android:id="@+id/tv_login"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="center"
android:text=""
android:textColor="#ffffff"
android:textSize="18sp"/>
</LinearLayout>
</RelativeLayout>
``` | /content/code_sandbox/app/src/main/res/layout/include_header.xml | xml | 2016-08-08T08:52:10 | 2024-08-12T19:24:13 | AndroidAnimationExercise | REBOOTERS/AndroidAnimationExercise | 1,868 | 482 |
```xml
export * from './BulkActionsToolbar';
export * from './Count';
export * from './datagrid';
export * from './Empty';
export * from './InfiniteList';
export * from './filter';
export * from './FilterContext';
export * from './List';
export * from './ListActions';
export * from './listFieldTypes';
export * from './ListGuesser';
export * from './ListNoResults';
export * from './ListToolbar';
export * from './ListView';
export * from './pagination';
export * from './Placeholder';
export * from './SimpleList';
export * from './SingleFieldList';
``` | /content/code_sandbox/packages/ra-ui-materialui/src/list/index.ts | xml | 2016-07-13T07:58:54 | 2024-08-16T18:32:27 | react-admin | marmelab/react-admin | 24,624 | 125 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="path_to_url" xmlns:flowable="path_to_url"
xmlns:cmmndi="path_to_url" xmlns:dc="path_to_url"
xmlns:di="path_to_url"
targetNamespace="path_to_url">
<case id="methodExpressionCase" name="Method Expression Case">
<casePlanModel id="casePlanModel" name="Case plan model">
<planItem id="planItem1" name="Service task" definitionRef="serviceTask1"/>
<planItem id="planItem2" name="Service task" definitionRef="serviceTask2">
<entryCriterion id="entryCriterion2" sentryRef="sentry1"/>
</planItem>
<sentry id="sentry1">
<planItemOnPart id="sentryOnPart1" sourceRef="planItem1">
<standardEvent>complete</standardEvent>
</planItemOnPart>
</sentry>
<task id="serviceTask1" name="Service task" flowable:type="java" flowable:expression="${aString.substring(3,7)}"/>
<task id="serviceTask2" name="Service task" flowable:type="java" flowable:expression="${aString.toString()}"/>
</casePlanModel>
</case>
<cmmndi:CMMNDI>
<cmmndi:CMMNDiagram id="CMMNDiagram_testAuthenticatedUserIdAvailableCase">
<cmmndi:CMMNShape id="CMMNShape_casePlanModel" cmmnElementRef="casePlanModel">
<dc:Bounds height="754.0" width="758.0" x="30.0" y="45.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNShape>
<cmmndi:CMMNShape id="CMMNShape_planItem1" cmmnElementRef="planItem1">
<dc:Bounds height="80.0" width="100.0" x="121.0" y="211.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNShape>
<cmmndi:CMMNShape id="CMMNShape_planItem2" cmmnElementRef="planItem2">
<dc:Bounds height="80.0" width="100.0" x="316.0" y="211.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNShape>
<cmmndi:CMMNShape id="CMMNShape_entryCriterion2" cmmnElementRef="entryCriterion2">
<dc:Bounds height="28.0" width="18.0" x="307.0" y="237.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNShape>
<cmmndi:CMMNEdge id="CMMNEdge_connector3" cmmnElementRef="planItem1" targetCMMNElementRef="entryCriterion2">
<di:waypoint x="220.9499999999887" y="251.0"/>
<di:waypoint x="268.5" y="251.0"/>
<di:waypoint x="268.5" y="251.0"/>
<di:waypoint x="307.0" y="251.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNEdge>
<cmmndi:CMMNEdge id="CMMNEdge_connector4" cmmnElementRef="planItem2" targetCMMNElementRef="exitCriterion2">
<di:waypoint x="366.0" y="211.0"/>
<di:waypoint x="366.0" y="21.0"/>
<di:waypoint x="812.0" y="21.0"/>
<di:waypoint x="812.0" y="251.0"/>
<di:waypoint x="796.9298148536527" y="251.0"/>
<cmmndi:CMMNLabel/>
</cmmndi:CMMNEdge>
</cmmndi:CMMNDiagram>
</cmmndi:CMMNDI>
</definitions>
``` | /content/code_sandbox/modules/flowable-cmmn-engine/src/test/resources/org/flowable/cmmn/test/el/CmmnExpressionManagerTest.testMethodExpressions.cmmn.xml | xml | 2016-10-13T07:21:43 | 2024-08-16T15:23:14 | flowable-engine | flowable/flowable-engine | 7,715 | 973 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<!--
~
~
~ path_to_url
~
~ Unless required by applicable law or agreed to in writing, software
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<LinearLayout xmlns:android="path_to_url"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:fitsSystemWindows="true">
<include layout="@layout/common_toolbar" />
<com.justwayward.reader.view.SelectionLayout
android:id="@+id/slOverall"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<fragment
android:id="@+id/fragmentCO"
android:name="com.justwayward.reader.ui.fragment.BookHelpFragment"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</LinearLayout>
``` | /content/code_sandbox/app/src/main/res/layout/activity_community_book_help.xml | xml | 2016-08-03T05:17:27 | 2024-08-16T14:23:57 | BookReader | smuyyh/BookReader | 6,557 | 207 |
```xml
/*your_sha256_hash-----------------------------
*your_sha256_hash----------------------------*/
import type { languages } from '../../fillers/monaco-editor-core';
export const conf: languages.LanguageConfiguration = {
comments: {
lineComment: '//',
blockComment: ['(*', '*)']
},
brackets: [
['{', '}'],
['[', ']'],
['(', ')'],
['<', '>']
],
autoClosingPairs: [
{ open: '{', close: '}' },
{ open: '[', close: ']' },
{ open: '(', close: ')' },
{ open: '<', close: '>' },
{ open: "'", close: "'" },
{ open: '"', close: '"' },
{ open: '(*', close: '*)' }
],
surroundingPairs: [
{ open: '{', close: '}' },
{ open: '[', close: ']' },
{ open: '(', close: ')' },
{ open: '<', close: '>' },
{ open: "'", close: "'" },
{ open: '"', close: '"' },
{ open: '(*', close: '*)' }
]
};
export const language = <languages.IMonarchLanguage>{
defaultToken: '',
tokenPostfix: '.cameligo',
ignoreCase: true,
brackets: [
{ open: '{', close: '}', token: 'delimiter.curly' },
{ open: '[', close: ']', token: 'delimiter.square' },
{ open: '(', close: ')', token: 'delimiter.parenthesis' },
{ open: '<', close: '>', token: 'delimiter.angle' }
],
keywords: [
'abs',
'assert',
'block',
'Bytes',
'case',
'Crypto',
'Current',
'else',
'failwith',
'false',
'for',
'fun',
'if',
'in',
'let',
'let%entry',
'let%init',
'List',
'list',
'Map',
'map',
'match',
'match%nat',
'mod',
'not',
'operation',
'Operation',
'of',
'record',
'Set',
'set',
'sender',
'skip',
'source',
'String',
'then',
'to',
'true',
'type',
'with'
],
typeKeywords: ['int', 'unit', 'string', 'tz', 'nat', 'bool'],
operators: [
'=',
'>',
'<',
'<=',
'>=',
'<>',
':',
':=',
'and',
'mod',
'or',
'+',
'-',
'*',
'/',
'@',
'&',
'^',
'%',
'->',
'<-',
'&&',
'||'
],
// we include these common regular expressions
symbols: /[=><:@\^&|+\-*\/\^%]+/,
// The main tokenizer for our languages
tokenizer: {
root: [
// identifiers and keywords
[
/[a-zA-Z_][\w]*/,
{
cases: {
'@keywords': { token: 'keyword.$0' },
'@default': 'identifier'
}
}
],
// whitespace
{ include: '@whitespace' },
// delimiters and operators
[/[{}()\[\]]/, '@brackets'],
[/[<>](?!@symbols)/, '@brackets'],
[
/@symbols/,
{
cases: {
'@operators': 'delimiter',
'@default': ''
}
}
],
// numbers
[/\d*\.\d+([eE][\-+]?\d+)?/, 'number.float'],
[/\$[0-9a-fA-F]{1,16}/, 'number.hex'],
[/\d+/, 'number'],
// delimiter: after number because of .\d floats
[/[;,.]/, 'delimiter'],
// strings
[/'([^'\\]|\\.)*$/, 'string.invalid'], // non-teminated string
[/'/, 'string', '@string'],
// characters
[/'[^\\']'/, 'string'],
[/'/, 'string.invalid'],
[/\#\d+/, 'string']
],
/* */
comment: [
[/[^\(\*]+/, 'comment'],
//[/\(\*/, 'comment', '@push' ], // nested comment not allowed :-(
[/\*\)/, 'comment', '@pop'],
[/\(\*/, 'comment']
],
string: [
[/[^\\']+/, 'string'],
[/\\./, 'string.escape.invalid'],
[/'/, { token: 'string.quote', bracket: '@close', next: '@pop' }]
],
whitespace: [
[/[ \t\r\n]+/, 'white'],
[/\(\*/, 'comment', '@comment'],
[/\/\/.*$/, 'comment']
]
}
};
``` | /content/code_sandbox/src/basic-languages/cameligo/cameligo.ts | xml | 2016-06-07T16:56:31 | 2024-08-16T17:17:05 | monaco-editor | microsoft/monaco-editor | 39,508 | 1,257 |
```xml
<vector xmlns:android="path_to_url" android:height="34.0dp" android:tint="?attr/colorControlNormal" android:viewportHeight="15" android:viewportWidth="15" android:width="34.0dp">
<path android:fillColor="@android:color/white" android:pathData="M7 4.17L7 1L9 0L11 1L13 0L13 3L11 4L9 3L8 3.5L8 4.17L13.5 6L13.5 7L1.5 7L1.5 6L7 4.17ZM2.5 7.5L12.5 7.5L12.5 12L13.5 13L13.5 14L1.5 14L1.5 13L2.5 12L2.5 7.5ZM4 8.5L4 12.5L5 12.5L5 8.5L4 8.5ZM7 8.5L7 12.5L8 12.5L8 8.5L7 8.5ZM10 8.5L10 12.5L11 12.5L11 8.5L10 8.5Z"/>
</vector>
``` | /content/code_sandbox/app/src/main/res/drawable/ic_preset_temaki_town_hall.xml | xml | 2016-07-02T10:44:04 | 2024-08-16T18:55:54 | StreetComplete | streetcomplete/StreetComplete | 3,781 | 304 |
```xml
<menu xmlns:android="path_to_url" xmlns:app="path_to_url">
<item
android:id="@+id/action_select_root"
android:icon="@drawable/ic_action_save"
app:showAsAction="ifRoom"
android:title="@string/action_select_root_dir"/>
<!-- TODO: allow creating new folders -->
<!-- item
android:id="@+id/action_create_new_dir"
android:icon="@drawable/ic_content_new"
app:showAsAction="ifRoom"
android:title="@string/action_create_new_dir"/ -->
</menu>
``` | /content/code_sandbox/app/src/main/res/menu/select_root.xml | xml | 2016-10-14T02:54:01 | 2024-08-16T16:01:33 | MGit | maks/MGit | 1,193 | 121 |
```xml
export interface Terminal {
terminalId: string;
isEnabledByDefault?: boolean;
getTerminalName(): string;
getAssetFileName(): string;
launchWithCommand(command: string): Promise<void>;
}
``` | /content/code_sandbox/src/main/Core/Terminal/Contract/Terminal.ts | xml | 2016-10-11T04:59:52 | 2024-08-16T11:53:31 | ueli | oliverschwendener/ueli | 3,543 | 44 |
```xml
import { PartialDeep } from 'type-fest';
import { cleanupNonNestedPath, isNotNestedPath, type TypedSchema, type TypedSchemaError } from 'vee-validate';
import {
InferOutput,
InferInput,
InferIssue,
BaseSchema,
BaseSchemaAsync,
safeParseAsync,
safeParse,
BaseIssue,
getDefault,
optional,
ArraySchema,
ObjectSchema,
ErrorMessage,
ArrayIssue,
ObjectEntries,
LooseObjectIssue,
ObjectIssue,
ObjectWithRestSchema,
ObjectWithRestIssue,
StrictObjectIssue,
StrictObjectSchema,
LooseObjectSchema,
getDotPath,
Config,
} from 'valibot';
import { isIndex, isObject, merge, normalizeFormPath } from '../../shared';
export function toTypedSchema<
TSchema extends
| BaseSchema<unknown, unknown, BaseIssue<unknown>>
| BaseSchemaAsync<unknown, unknown, BaseIssue<unknown>>,
TInferOutput = InferOutput<TSchema>,
TInferInput = PartialDeep<InferInput<TSchema>>,
>(valibotSchema: TSchema, config?: Config<InferIssue<TSchema>> ): TypedSchema<TInferInput, TInferOutput> {
const schema: TypedSchema = {
__type: 'VVTypedSchema',
async parse(value) {
const result = await safeParseAsync(valibotSchema, value, config);
if (result.success) {
return {
value: result.output,
errors: [],
};
}
const errors: Record<string, TypedSchemaError> = {};
processIssues(result.issues, errors);
return {
errors: Object.values(errors),
};
},
cast(values) {
if (valibotSchema.async) {
return values;
}
const result = safeParse(valibotSchema, values, config);
if (result.success) {
return result.output;
}
const defaults = getDefault(optional(valibotSchema));
if (isObject(defaults) && isObject(values)) {
return merge(defaults, values);
}
return values;
},
describe(path) {
try {
if (!path) {
return {
required: !queryOptional(valibotSchema),
exists: true,
};
}
const pathSchema = getSchemaForPath(path, valibotSchema);
if (!pathSchema) {
return {
required: false,
exists: false,
};
}
return {
required: !queryOptional(pathSchema),
exists: true,
};
} catch {
if (__DEV__) {
console.warn(`Failed to describe path ${path} on the schema, returning a default description.`);
}
return {
required: false,
exists: false,
};
}
},
};
return schema;
}
function processIssues(issues: BaseIssue<unknown>[], errors: Record<string, TypedSchemaError>): void {
issues.forEach(issue => {
const path = normalizeFormPath(getDotPath(issue) || '');
if (issue.issues) {
processIssues(
issue.issues.flatMap(ue => ue.issues || []),
errors,
);
if (!path) {
return;
}
}
if (!errors[path]) {
errors[path] = { errors: [], path };
}
errors[path].errors.push(issue.message);
});
}
function getSchemaForPath(
path: string,
schema: BaseSchema<unknown, unknown, BaseIssue<unknown>> | BaseSchemaAsync<unknown, unknown, BaseIssue<unknown>>,
): BaseSchema<unknown, unknown, BaseIssue<unknown>> | null {
if (!isObjectSchema(schema)) {
return null;
}
if (isNotNestedPath(path)) {
return schema.entries[cleanupNonNestedPath(path)];
}
const paths = (path || '').split(/\.|\[(\d+)\]/).filter(Boolean);
let currentSchema: BaseSchema<unknown, unknown, BaseIssue<unknown>> = schema;
for (let i = 0; i <= paths.length; i++) {
const p = paths[i];
if (!p || !currentSchema) {
return currentSchema;
}
if (isObjectSchema(currentSchema)) {
currentSchema = currentSchema.entries[p] || null;
continue;
}
if (isIndex(p) && isArraySchema(currentSchema)) {
currentSchema = currentSchema.item;
}
}
return null;
}
function queryOptional(
schema: BaseSchema<unknown, unknown, BaseIssue<unknown>> | BaseSchemaAsync<unknown, unknown, BaseIssue<unknown>>,
): boolean {
return schema.type === 'optional';
}
function isArraySchema(
schema: unknown,
): schema is ArraySchema<BaseSchema<unknown, unknown, BaseIssue<unknown>>, ErrorMessage<ArrayIssue> | undefined> {
return isObject(schema) && 'item' in schema;
}
function isObjectSchema(
schema: unknown,
): schema is
| LooseObjectSchema<ObjectEntries, ErrorMessage<LooseObjectIssue> | undefined>
| ObjectSchema<ObjectEntries, ErrorMessage<ObjectIssue> | undefined>
| ObjectWithRestSchema<
ObjectEntries,
BaseSchema<unknown, unknown, BaseIssue<unknown>>,
ErrorMessage<ObjectWithRestIssue> | undefined
>
| StrictObjectSchema<ObjectEntries, ErrorMessage<StrictObjectIssue> | undefined> {
return isObject(schema) && 'entries' in schema;
}
``` | /content/code_sandbox/packages/valibot/src/index.ts | xml | 2016-07-30T01:10:44 | 2024-08-16T10:19:58 | vee-validate | logaretm/vee-validate | 10,699 | 1,170 |
```xml
/*
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
// TypeScript Version: 4.1
/**
* Interface defining function options.
*/
interface Options {
/**
* Boolean indicating whether to create a capture group for the match.
*/
capture?: boolean;
/**
* Regular expression flags.
*/
flags?: string;
}
/**
* Interface for a regular expression to match a decimal number.
*/
interface ReDecimalNumber {
/**
* Returns a regular expression to match a decimal number.
*
* @param options - function options
* @param options.flags - regular expression flags (default: '')
* @param options.capture - boolean indicating whether to create a capture group for the match (default: false)
* @returns regular expression
*
* @example
* var RE_DECIMAL_NUMBER = reDecimalNumber();
*
* var bool = RE_DECIMAL_NUMBER.test( 'beep 1.0 boop' );
* // returns true
*
* @example
* var RE_DECIMAL_NUMBER = reDecimalNumber({
* 'capture': true
* });
*
* var bool = RE_DECIMAL_NUMBER.exec( '1.234' );
* // returns [ '1.234', '1.234' ]
*
* @example
* var RE_DECIMAL_NUMBER = reDecimalNumber({
* 'capture': false
* });
*
* var bool = RE_DECIMAL_NUMBER.exec( '1.234' );
* // returns [ '1.234' ]
*/
( options?: Options ): RegExp;
/**
* Regular expression to match a decimal number.
*
* @example
* var bool = reDecimalNumber.REGEXP.test( '2:3' );
* // returns false
*/
REGEXP: RegExp;
/**
* Regular expression to capture decimal number.
*
* @example
* var parts = reDecimalNumber.REGEXP_CAPTURE.exec( '1.234' );
* // returns [ '1.234', '1.234' ]
*/
REGEXP_CAPTURE: RegExp;
}
/**
* Returns a regular expression to match a decimal number.
*
* @param options - function options
* @param options.flags - regular expression flags (default: '')
* @param options.capture - boolean indicating whether to create a capture group for the match (default: false)
* @returns regular expression
*
* @example
* var RE_DECIMAL_NUMBER = reDecimalNumber();
*
* var bool = RE_DECIMAL_NUMBER.test( 'beep 1.0 boop' );
* // returns true
*
* @example
* var RE_DECIMAL_NUMBER = reDecimalNumber({
* 'flags': 'gm'
* });
* var bool = RE_DECIMAL_NUMBER.test( 'beep 1.0 boop' );
* // returns true
*
* @example
* var bool = reDecimalNumber.REGEXP.test( '2:3' );
* // returns false
*/
declare var reDecimalNumber: ReDecimalNumber;
// EXPORTS //
export = reDecimalNumber;
``` | /content/code_sandbox/lib/node_modules/@stdlib/regexp/decimal-number/docs/types/index.d.ts | xml | 2016-03-24T04:19:52 | 2024-08-16T09:03:19 | stdlib | stdlib-js/stdlib | 4,266 | 697 |
```xml
export * from './interface';
export * from './system-info.service';
export * from './replication.service';
export * from './RequestQueryParams';
export * from './scanning.service';
export * from './job-log.service';
export * from './project.service';
export * from './label.service';
export * from './permission.service';
export * from './permission-static';
``` | /content/code_sandbox/src/portal/src/app/shared/services/index.ts | xml | 2016-01-28T21:10:28 | 2024-08-16T15:28:34 | harbor | goharbor/harbor | 23,335 | 75 |
```xml
import * as React from 'react';
import { Form, FormDatepicker, FormButton } from '@fluentui/react-northstar';
const FormExampleDatepicker = () => {
const [errorMessage, setErrorMessage] = React.useState(null);
const errorMessageHandler = (e, v) => {
setErrorMessage(v.error);
};
const successMessageHandler = (e, v) => {
setErrorMessage(null);
};
return (
<Form
onSubmit={() => {
alert('Form was submitted');
}}
>
<FormDatepicker
label="Select a date"
errorMessage={errorMessage}
onDateChangeError={errorMessageHandler}
onDateChange={successMessageHandler}
/>
<FormButton content="Submit" />
</Form>
);
};
export default FormExampleDatepicker;
``` | /content/code_sandbox/packages/fluentui/docs/src/examples/components/Form/Usage/FormExampleDatepicker.tsx | xml | 2016-06-06T15:03:44 | 2024-08-16T18:49:29 | fluentui | microsoft/fluentui | 18,221 | 176 |
```xml
/** @public */
export declare class MyClass {
}
export { }
``` | /content/code_sandbox/build-tests/api-extractor-scenarios/etc/projectFolderUrl/rollup.d.ts | xml | 2016-09-30T00:28:20 | 2024-08-16T18:54:35 | rushstack | microsoft/rushstack | 5,790 | 14 |
```xml
export type Func = (...args: any[]) => any;
export type Ctx = Record<string, any>;
``` | /content/code_sandbox/packages/miniapp-runtime/src/interface/utils.ts | xml | 2016-11-03T06:59:15 | 2024-08-16T10:11:29 | ice | alibaba/ice | 17,815 | 23 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<!--
~
~
~ path_to_url
~
~ Unless required by applicable law or agreed to in writing, software
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<resources>
<string name="mtrl_chip_close_icon_content_description">%1$s- </string>
</resources>
``` | /content/code_sandbox/lib/java/com/google/android/material/chip/res/values-ka/strings.xml | xml | 2016-12-05T16:11:29 | 2024-08-16T17:51:42 | material-components-android | material-components/material-components-android | 16,176 | 88 |
```xml
import { NotificationEventType } from "@shared/types";
import { Notification, User } from "@server/models";
import { DocumentUserEvent } from "@server/types";
import BaseTask, { TaskPriority } from "./BaseTask";
export default class DocumentAddUserNotificationsTask extends BaseTask<DocumentUserEvent> {
public async perform(event: DocumentUserEvent) {
const recipient = await User.findByPk(event.userId);
if (!recipient) {
return;
}
if (
!recipient.isSuspended &&
recipient.subscribedToEventType(NotificationEventType.AddUserToDocument)
) {
await Notification.create({
event: NotificationEventType.AddUserToDocument,
userId: event.userId,
actorId: event.actorId,
teamId: event.teamId,
documentId: event.documentId,
});
}
}
public get options() {
return {
priority: TaskPriority.Background,
};
}
}
``` | /content/code_sandbox/server/queues/tasks/DocumentAddUserNotificationsTask.ts | xml | 2016-05-22T21:31:47 | 2024-08-16T19:57:22 | outline | outline/outline | 26,751 | 193 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="path_to_url"
android:height="34dp"
android:viewportHeight="34"
android:viewportWidth="33"
android:width="33dp">
<group
android:translateX="6.885517">
<path
android:fillColor="#F9C73D"
android:pathData="M25.6749453,23.2024709 C25.8138032,23.0636129 25.9182335,22.8977867
25.9899577,22.7078612 C25.9182335,22.897213 25.8138032,23.0636129
25.6749453,23.2024709 L25.6749453,23.2024709 L25.6749453,23.2024709 Z"
android:strokeWidth="1" />
<path
android:fillColor="#F9C73D"
android:pathData="M19.2444462,20.7879496 C17.9476738,19.4911772 17.0399332,18.0727607
16.6348353,16.8603359 C16.6394256,16.8729594 16.6417208,16.885009
16.6463111,16.8982063 C17.1581345,18.3900683 17.4983938,19.9352931
17.4983938,21.5120765 L17.4983938,25.7219963 C17.4983938,26.92065
16.6222118,27.9913479 15.4287222,28.104959 C14.0596519,28.23521
12.9080492,27.1622169 12.9080492,25.8206887 L12.9080492,24.6731025
L12.3342561,24.6731025 L11.760463,24.0993095 L11.760463,22.9517233
L12.3342561,22.3779302 L12.9080492,22.3779302 L12.9080492,20.656551
L12.3342561,20.656551 L11.760463,20.0827579 L11.760463,18.9351717
L12.3342561,18.3613786 L12.9080492,18.3613786 L12.9080492,16.6399994
L12.3342561,16.6399994 L11.760463,16.0662063 L11.760463,14.9186201
L12.3342561,14.3448271 L12.9080492,14.3448271 L12.9080492,12.6234478
L12.3342561,12.6234478 L11.760463,12.0496547 L11.760463,10.9020686
L12.3342561,10.3282755 L12.9080492,10.3282755 L12.9080492,8.60689624
L12.3342561,8.60689624 L11.760463,8.03310315 L11.760463,6.88551699
L12.3342561,6.31172391 L12.9080492,6.31172391 C12.9080492,4.53296535
12.2332685,2.9114261 11.1264217,1.68982063 C9.23519966,-0.397064813
6.00359702,-0.558300669 3.8403971,1.24513099 C3.02503713,1.92392821
1.99852131,2.29517233 0.938725483,2.29517233 L0.284027576,2.29517233
L0.284027576,5.73793082 L1.12922479,5.73793082 C2.07254061,5.73793082
2.99175713,5.44357497 3.75949228,4.89617637 L5.40054049,3.72621228
C6.95207699,2.61993921 9.22659277,3.50415435 9.44520793,5.39709773
C9.45783138,5.50669221 9.4641431,5.61743428 9.4641431,5.73047151
L9.4641431,25.5739577 C9.4641431,29.6777258 12.672794,33.199094
16.7759884,33.2782774 C21.1023882,33.3620512 24.6094115,29.7609258
24.3712874,25.401246 C24.3253839,24.5606391 24.8136818,23.8290529
25.5022335,23.3459191 C25.5636294,23.3028847 25.6210087,23.254686
25.6743715,23.2018971 C24.5658032,24.3116129 21.6865095,23.2305867
19.2444462,20.7879496 L19.2444462,20.7879496 Z"
android:strokeWidth="1" />
</group>
<path
android:fillColor="#C78829"
android:pathData="M26.1047163,26.3944818 L23.809544,26.3944818 C22.8604902,26.3944818
22.0881647,25.6221563 22.0881647,24.6731025 C22.0881647,23.7240488
22.8604902,22.9517233 23.809544,22.9517233 L26.1047163,22.9517233
C26.4220239,22.9517233 26.6785094,23.2082088 26.6785094,23.5255164
C26.6785094,23.842824 26.4220239,24.0993095 26.1047163,24.0993095
L23.809544,24.0993095 C23.4928102,24.0993095 23.2357509,24.3563688
23.2357509,24.6731025 C23.2357509,24.9898363 23.4928102,25.2468956
23.809544,25.2468956 L26.1047163,25.2468956 C26.4220239,25.2468956
26.6785094,25.5033811 26.6785094,25.8206887 C26.6785094,26.1379963
26.4220239,26.3944818 26.1047163,26.3944818 L26.1047163,26.3944818 Z"
android:strokeWidth="1" />
<path
android:fillColor="#C78829"
android:pathData="M17.49782,18.9351717 L15.2026477,18.9351717 C14.8853401,18.9351717
14.6288546,18.6786862 14.6288546,18.3613786 C14.6288546,18.0440711
14.8853401,17.7875856 15.2026477,17.7875856 L17.49782,17.7875856
C17.8151276,17.7875856 18.0716131,18.0440711 18.0716131,18.3613786
C18.0716131,18.6786862 17.8151276,18.9351717 17.49782,18.9351717
L17.49782,18.9351717 Z"
android:strokeWidth="1" />
<path
android:fillColor="#C78829"
android:pathData="M17.49782,21.230344 L15.2026477,21.230344 C14.8853401,21.230344
14.6288546,20.9738585 14.6288546,20.656551 C14.6288546,20.3392434
14.8853401,20.0827579 15.2026477,20.0827579 L17.49782,20.0827579
C17.8151276,20.0827579 18.0716131,20.3392434 18.0716131,20.656551
C18.0716131,20.9738585 17.8151276,21.230344 17.49782,21.230344
L17.49782,21.230344 Z"
android:strokeWidth="1" />
<path
android:fillColor="#C78829"
android:pathData="M17.49782,10.9020686 L15.2026477,10.9020686 C14.8853401,10.9020686
14.6288546,10.6455831 14.6288546,10.3282755 C14.6288546,10.0109679
14.8853401,9.7544824 15.2026477,9.7544824 L17.49782,9.7544824
C17.8151276,9.7544824 18.0716131,10.0109679 18.0716131,10.3282755
C18.0716131,10.6455831 17.8151276,10.9020686 17.49782,10.9020686
L17.49782,10.9020686 Z"
android:strokeWidth="1" />
<path
android:fillColor="#C78829"
android:pathData="M17.49782,13.1972409 L15.2026477,13.1972409 C14.8853401,13.1972409
14.6288546,12.9407554 14.6288546,12.6234478 C14.6288546,12.3061402
14.8853401,12.0496547 15.2026477,12.0496547 L17.49782,12.0496547
C17.8151276,12.0496547 18.0716131,12.3061402 18.0716131,12.6234478
C18.0716131,12.9407554 17.8151276,13.1972409 17.49782,13.1972409
L17.49782,13.1972409 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 28.1376955 12.52538858 C 29.7063214433 12.52538858 30.97794402 15.3255271833 30.97794402 18.7796732 C 30.97794402 22.2338192167 29.7063214433 25.03395782 28.1376955 25.03395782 C 26.5690695567 25.03395782 25.29744698 22.2338192167 25.29744698 18.7796732 C 25.29744698 15.3255271833 26.5690695567 12.52538858 28.1376955 12.52538858 Z"
android:strokeWidth="1" />
<path
android:fillColor="#C78829"
android:pathData="M 5.44816532 1.72137925 H 7.16954457 V 6.31172391 H 5.44816532 V 1.72137925 Z"
android:strokeWidth="1" />
<path
android:fillColor="#AF8066"
android:pathData="M 3.15299299 2.29517233 H 5.44816532 V 5.73793082 H 3.15299299 V 2.29517233 Z"
android:strokeWidth="1" />
<path
android:fillColor="#F9C73D"
android:pathData="M 3.15299299 5.73793082 L 0.284027576 5.73793082 L 0.284027576 4.01655158 L 3.15299299 2.29517233 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 20.3667855 8.60689624 L 20.9405785 8.03310315 L 20.9405785 6.88551699 L 20.3667855 6.31172391 L 19.2191993 6.31172391 L 18.6454062 6.88551699 L 18.6454062 8.03310315 L 19.2191993 8.60689624 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 20.3667855 12.6234478 L 20.9405785 12.0496547 L 20.9405785 10.9020686 L 20.3667855 10.3282755 L 19.2191993 10.3282755 L 18.6454062 10.9020686 L 18.6454062 12.0496547 L 19.2191993 12.6234478 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 20.3667855 16.6399994 L 20.9405785 16.0662063 L 20.9405785 14.9186201 L 20.3667855 14.3448271 L 19.2191993 14.3448271 L 18.6454062 14.9186201 L 18.6454062 16.0662063 L 19.2191993 16.6399994 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 20.3667855 20.656551 L 20.9405785 20.0827579 L 20.9405785 18.9351717 L 20.3667855 18.3613786 L 19.2191993 18.3613786 L 18.6454062 18.9351717 L 18.6454062 20.0827579 L 19.2191993 20.656551 Z"
android:strokeWidth="1" />
<path
android:fillColor="#DDB245"
android:pathData="M 20.3667855 24.6731025 L 20.9405785 24.0993095 L 20.9405785 22.9517233 L 20.3667855 22.3779302 L 19.2191993 22.3779302 L 18.6454062 22.9517233 L 18.6454062 24.0993095 L 19.2191993 24.6731025 Z"
android:strokeWidth="1" />
</vector>
``` | /content/code_sandbox/sample/src/main/res/drawable/ic_saxaphone.xml | xml | 2016-07-22T23:32:30 | 2024-08-07T09:37:58 | expandable-recycler-view | thoughtbot/expandable-recycler-view | 2,119 | 3,632 |
```xml
import assert from 'assert';
import { getScriptName } from '../src';
describe('Test `getScriptName()`', () => {
it('should return "vercel-*"', () => {
const pkg = {
scripts: {
'vercel-dev': '',
'vercel-build': '',
dev: '',
build: '',
},
};
assert.equal(
getScriptName(pkg, ['vercel-dev', 'now-dev', 'dev']),
'vercel-dev'
);
assert.equal(
getScriptName(pkg, ['vercel-build', 'now-build', 'build']),
'vercel-build'
);
assert.equal(getScriptName(pkg, ['dev']), 'dev');
assert.equal(getScriptName(pkg, ['build']), 'build');
});
it('should return "now-*"', () => {
const pkg = {
scripts: {
'now-dev': '',
'now-build': '',
dev: '',
build: '',
},
};
assert.equal(
getScriptName(pkg, ['vercel-dev', 'now-dev', 'dev']),
'now-dev'
);
assert.equal(
getScriptName(pkg, ['vercel-build', 'now-build', 'build']),
'now-build'
);
assert.equal(getScriptName(pkg, ['dev']), 'dev');
assert.equal(getScriptName(pkg, ['build']), 'build');
});
it('should return base script name', () => {
const pkg = {
scripts: {
dev: '',
build: '',
},
};
assert.equal(getScriptName(pkg, ['dev']), 'dev');
assert.equal(getScriptName(pkg, ['build']), 'build');
});
it('should return `null`', () => {
assert.equal(getScriptName(undefined, ['build']), null);
assert.equal(getScriptName({}, ['build']), null);
assert.equal(getScriptName({ scripts: {} }, ['build']), null);
const pkg = {
scripts: {
dev: '',
build: '',
},
};
assert.equal(getScriptName(pkg, ['vercel-dev', 'now-dev']), null);
assert.equal(getScriptName(pkg, ['vercel-build', 'now-build']), null);
});
});
``` | /content/code_sandbox/packages/build-utils/test/unit.get-script-name.test.ts | xml | 2016-09-09T01:12:08 | 2024-08-16T17:39:45 | vercel | vercel/vercel | 12,545 | 478 |
```xml
<project xmlns="path_to_url" xmlns:xsi="path_to_url"
xsi:schemaLocation="path_to_url path_to_url">
<parent>
<artifactId>ghosp-parent</artifactId>
<groupId>org.goshop</groupId>
<version>2.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>goshop-service-users</artifactId>
<packaging>jar</packaging>
<!-- -->
<dependencies>
<dependency>
<groupId>org.goshop</groupId>
<artifactId>goshop-facade-users</artifactId>
</dependency>
<dependency>
<groupId>org.goshop</groupId>
<artifactId>goshop-common-config</artifactId>
</dependency>
<dependency>
<groupId>org.goshop</groupId>
<artifactId>goshop-common-service</artifactId>
</dependency>
<dependency>
<groupId>org.goshop</groupId>
<artifactId>goshop-common-shiro</artifactId>
</dependency>
<dependency>
<groupId>org.goshop</groupId>
<artifactId>goshop-facade-email</artifactId>
</dependency>
<!-- Mybatis -->
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
</dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-spring</artifactId>
</dependency>
<dependency>
<groupId>com.github.miemiedev</groupId>
<artifactId>mybatis-paginator</artifactId>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper</artifactId>
</dependency>
<!-- MySql -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<!-- -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>dubbo</artifactId>
</dependency>
<dependency>
<groupId>com.github.sgroschupf</groupId>
<artifactId>zkclient</artifactId>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jms</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-all</artifactId>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-pool</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
<filtering>false</filtering>
</resource>
<resource>
<targetPath>${project.build.directory}/classes</targetPath>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/*.xml</include>
<include>**/*.properties</include>
</includes>
</resource>
<resource>
<targetPath>${project.build.directory}/classes/META-INF/spring</targetPath>
<directory>src/main/resources/spring</directory>
<filtering>true</filtering>
<includes>
<include>spring-context.xml</include>
</includes>
</resource>
</resources>
<plugins>
<!-- jarmanifestlibjar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<classesDirectory>target/classes/</classesDirectory>
<archive>
<manifest>
<mainClass>com.alibaba.dubbo.container.Main</mainClass>
<!-- MANIFEST.MF -->
<useUniqueVersions>false</useUniqueVersions>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
</manifest>
<manifestEntries>
<Class-Path>.</Class-Path>
</manifestEntries>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>copy-dependencies</id>
<phase>package</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<type>jar</type>
<includeTypes>jar</includeTypes>
<outputDirectory>
${project.build.directory}/lib
</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
``` | /content/code_sandbox/goshop-service-users/pom.xml | xml | 2016-06-18T10:16:23 | 2024-08-01T09:11:36 | goshop2 | pzhgugu/goshop2 | 1,106 | 1,429 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13771" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="lzP-4L-1y3">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13772"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--Clear Table View Controller-->
<scene sceneID="SV1-s4-tLf">
<objects>
<tableViewController id="lzP-4L-1y3" customClass="ClearTableViewController" customModule="ClearTableViewCell" customModuleProvider="target" sceneMemberID="viewController">
<tableView key="view" clipsSubviews="YES" contentMode="scaleToFill" alwaysBounceVertical="YES" dataMode="prototypes" style="plain" separatorStyle="default" rowHeight="44" sectionHeaderHeight="28" sectionFooterHeight="28" id="kHq-aC-GbH">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<prototypes>
<tableViewCell clipsSubviews="YES" contentMode="scaleToFill" selectionStyle="default" indentationWidth="10" reuseIdentifier="tableCell" id="9XS-z5-AwB" customClass="TableViewCell" customModule="ClearTableViewCell" customModuleProvider="target">
<rect key="frame" x="0.0" y="28" width="375" height="44"/>
<autoresizingMask key="autoresizingMask"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="9XS-z5-AwB" id="iuU-I0-clC">
<rect key="frame" x="0.0" y="0.0" width="375" height="43.5"/>
<autoresizingMask key="autoresizingMask"/>
</tableViewCellContentView>
</tableViewCell>
</prototypes>
<connections>
<outlet property="dataSource" destination="lzP-4L-1y3" id="cu5-5f-Ntj"/>
<outlet property="delegate" destination="lzP-4L-1y3" id="bmz-J6-Loz"/>
</connections>
</tableView>
</tableViewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="gQI-u0-W9u" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="853" y="339"/>
</scene>
</scenes>
</document>
``` | /content/code_sandbox/Project 11 - ClearTableViewCell/ClearTableViewCell/Base.lproj/Main.storyboard | xml | 2016-02-13T14:02:12 | 2024-08-16T09:41:59 | 30DaysofSwift | allenwong/30DaysofSwift | 11,506 | 748 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~
~ path_to_url
~
~ Unless required by applicable law or agreed to in writing, software
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<project xmlns="path_to_url" xmlns:xsi="path_to_url" xsi:schemaLocation="path_to_url path_to_url">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.shardingsphere</groupId>
<artifactId>shardingsphere-features</artifactId>
<version>5.5.1-SNAPSHOT</version>
</parent>
<artifactId>shardingsphere-broadcast</artifactId>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<modules>
<module>api</module>
<module>core</module>
<module>distsql</module>
</modules>
</project>
``` | /content/code_sandbox/features/broadcast/pom.xml | xml | 2016-01-18T12:49:26 | 2024-08-16T15:48:11 | shardingsphere | apache/shardingsphere | 19,707 | 252 |
```xml
#!/usr/bin/env node
import meow from 'meow';
import React from 'react';
import {render} from 'ink';
import Ui from './ui.js';
const cli = meow(`
Usage
$ fast
$ fast > file
Options
--upload, -u Measure upload speed in addition to download speed
--single-line Reduce spacing and output to a single line
--json JSON output
Examples
$ fast --upload > file && cat file
17 Mbps
4.4 Mbps
$ fast --upload --json
`, {
importMeta: import.meta,
flags: {
upload: {
type: 'boolean',
shortFlag: 'u',
},
singleLine: {
type: 'boolean',
},
json: {
type: 'boolean',
},
} as const,
});
function App() {
return (
<Ui
singleLine={cli.flags.singleLine}
upload={cli.flags.upload}
json={cli.flags.json}
/>
);
}
const app = render(<App/>);
await app.waitUntilExit();
``` | /content/code_sandbox/source/cli.tsx | xml | 2016-05-18T21:24:15 | 2024-08-12T10:51:39 | fast-cli | sindresorhus/fast-cli | 2,574 | 251 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="path_to_url"
debug="false">
<!-- A time/date based rolling appender -->
<appender name="FILE" class="org.apache.log4j.RollingFileAppender">
<param name="File" value="logs/system.log" />
<param name="Append" value="true" />
<param name="ImmediateFlush" value="true" />
<param name="MaxFileSize" value="200MB" />
<param name="MaxBackupIndex" value="100" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %d{Z} [%t] %-5p (%F:%L) - %m%n" />
</layout>
</appender>
<appender name="journaldev-hibernate" class="org.apache.log4j.RollingFileAppender">
<param name="File" value="logs/project.log" />
<param name="Append" value="true" />
<param name="ImmediateFlush" value="true" />
<param name="MaxFileSize" value="200MB" />
<param name="MaxBackupIndex" value="50" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%d %d{Z} [%t] %-5p (%F:%L) - %m%n" />
</layout>
</appender>
<logger name="com.journaldev.hibernate" additivity="false">
<level value="DEBUG" />
<appender-ref ref="journaldev-hibernate" />
</logger>
<logger name="org.hibernate" additivity="false">
<level value="INFO" />
<appender-ref ref="FILE" />
</logger>
<root>
<priority value="INFO"></priority>
<appender-ref ref="FILE" />
</root>
</log4j:configuration>
``` | /content/code_sandbox/Hibernate/HibernateLog4JExample/log4j.xml | xml | 2016-05-02T05:43:21 | 2024-08-16T06:51:39 | journaldev | WebJournal/journaldev | 1,314 | 480 |
```xml
// Index file for "beta" utilities copied from the HIG.
// Will be combined into main index later.
export * from './activePage';
export * from './appInsightsHelper';
export * from './baseDefinition';
export * from './getEditUrl';
export * from './getNormalizedPath';
export * from './getPageFirstPlatform';
export * from './getSiteArea';
export * from './jumpToAnchor';
export * from './PlatformContext';
export * from './randomEntry';
export * from './redirects';
export * from './removeAnchorLink';
export * from './SiteDefinition.types';
export * from './string';
export * from './windowWidth';
export { getQueryParam } from '@fluentui/react-monaco-editor/lib/utilities/getQueryParam';
export { getSetting } from '@fluentui/react-monaco-editor/lib/utilities/settings';
``` | /content/code_sandbox/packages/react-docsite-components/src/utilities/index2.ts | xml | 2016-06-06T15:03:44 | 2024-08-16T18:49:29 | fluentui | microsoft/fluentui | 18,221 | 176 |
```xml
export default function RedirectedPage() {
return <h1>Redirected from /about</h1>;
}
``` | /content/code_sandbox/examples/middleware/app/redirected/page.tsx | xml | 2016-10-05T23:32:51 | 2024-08-16T19:44:30 | next.js | vercel/next.js | 124,056 | 25 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<ContentPage xmlns="path_to_url" xmlns:x="path_to_url" x:Class="Xamarin.Forms.Xaml.UnitTests.Gh6361">
<ContentPage.Resources>
<StyleSheet>
<![CDATA[
button {
background-color: red;
border-radius: 30;
}
frame {
background-color: red;
border-radius: 30;
}
]]>
</StyleSheet>
</ContentPage.Resources>
<StackLayout>
<Button />
<Frame />
</StackLayout>
</ContentPage>
``` | /content/code_sandbox/Xamarin.Forms.Xaml.UnitTests/Issues/Gh6361.xaml | xml | 2016-03-18T15:52:03 | 2024-08-16T16:25:43 | Xamarin.Forms | xamarin/Xamarin.Forms | 5,637 | 131 |
```xml
import { Column } from "../../../../../../src/decorator/columns/Column"
import { Entity } from "../../../../../../src/decorator/entity/Entity"
import { PrimaryGeneratedColumn } from "../../../../../../src"
@Entity()
export class Post {
@PrimaryGeneratedColumn()
id: number
@Column("enum", { enum: ["A", "B", "C"] })
enum: string
@Column("enum", { enum: ["A", "B", "C"], array: true })
enumArray: string[]
@Column("enum", {
enum: ["A", "B", "C"],
enumName: "enum_array",
array: true,
})
enumArray2: string[]
@Column("simple-enum", { enum: ["A", "B", "C"] })
simpleEnum: string
@Column()
name: string
}
``` | /content/code_sandbox/test/functional/database-schema/column-types/postgres-enum/entity/Post.ts | xml | 2016-02-29T07:41:14 | 2024-08-16T18:28:52 | typeorm | typeorm/typeorm | 33,875 | 192 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="15.0" xmlns="path_to_url">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release_vc6|Win32">
<Configuration>Release_vc6</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="wrap_main.cpp" />
</ItemGroup>
<ItemGroup>
<ResourceCompile Include="testdll.rc" />
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{850CF34A-9AB8-4329-9E92-5CE428130335}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>wkexe</RootNamespace>
<WindowsTargetPlatformVersion>7.0</WindowsTargetPlatformVersion>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v141_xp</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v142</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_vc6|Win32'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v140_xp</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v141_xp</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v142</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<Import Project="$(VCTargetsPath)\BuildCustomizations\masm.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release_vc6|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>true</LinkIncremental>
<IntDir>$(SolutionDir)..\out\tmp\$(ProjectName)\$(Configuration)\</IntDir>
<OutDir>$(SolutionDir)..\out\$(Configuration)\</OutDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_vc6|Win32'">
<LinkIncremental>false</LinkIncremental>
<IntDir>$(SolutionDir)..\out\tmp\$(ProjectName)\$(Configuration)\</IntDir>
<OutDir>$(SolutionDir)..\out\$(Configuration)\</OutDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
<IntDir>$(SolutionDir)..\out\tmp\$(ProjectName)\$(Configuration)\</IntDir>
<OutDir>$(SolutionDir)..\out\$(Configuration)\</OutDir>
<IntDir>$(SolutionDir)..\bin\$(Configuration)\obj\$(ProjectName)\</IntDir>
<OutDir>$(SolutionDir)..\bin\$(Configuration)\</OutDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LinkIncremental>true</LinkIncremental>
<OutDir>$(SolutionDir)..\out\$(Platform)$(Configuration)\</OutDir>
<IntDir>$(SolutionDir)..\out\tmp\$(Platform)$(ProjectName)\$(Configuration)\</IntDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<IntDir>$(SolutionDir)..\out\tmp\$(ProjectName)\$(Configuration)\</IntDir>
<OutDir>$(SolutionDir)..\out\$(Configuration)\</OutDir>
<GenerateManifest>false</GenerateManifest>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<GenerateManifest>false</GenerateManifest>
<OutDir>$(SolutionDir)..\out\$(Platform)$(Configuration)\</OutDir>
<IntDir>$(SolutionDir)..\out\tmp\$(Platform)$(ProjectName)\$(Configuration)\</IntDir>
<TargetName>mb_x64</TargetName>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>_CRT_SECURE_NO_WARNINGS;STATIC_GETOPT;WIN32;_DEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<SDLCheck>true</SDLCheck>
<AdditionalIncludeDirectories>$(SolutionDir)../wke</AdditionalIncludeDirectories>
<CallingConvention>StdCall</CallingConvention>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<OutputFile>$(OutDir)$(TargetName)$(TargetExt)</OutputFile>
<AdditionalDependencies>Version.lib;Winmm.lib;psapi.lib;wininet.lib;Propsys.lib;shell32.lib;Shlwapi.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Configuration)\</AdditionalLibraryDirectories>
<ModuleDefinitionFile>$(SolutionDir)../mbvip/mtmb.def</ModuleDefinitionFile>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release_vc6|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<Optimization>Disabled</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;MB_V857=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<SDLCheck>true</SDLCheck>
<AdditionalIncludeDirectories>$(SolutionDir)../wke;E:\mycode\miniblink49\trunk\third_party\libcurl\include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<ControlFlowGuard>false</ControlFlowGuard>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Configuration)\</AdditionalLibraryDirectories>
<AdditionalDependencies>imm32.lib;miniblink.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>version.lib;Crypt32.lib;Wldap32.lib;Imm32.lib;winmm.lib;Ws2_32.lib;Usp10.lib;Shlwapi.lib;wininet.lib;openssl.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib</AdditionalDependencies>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Configuration)\</AdditionalLibraryDirectories>
<IgnoreSpecificDefaultLibraries>
</IgnoreSpecificDefaultLibraries>
<OutputFile>$(OutDir)mb_v857.dll</OutputFile>
<ProgramDatabaseFile>$(OutDir)mb_v857.pdb</ProgramDatabaseFile>
<ModuleDefinitionFile>$(SolutionDir)../mbvip/mtmb.def</ModuleDefinitionFile>
</Link>
<PostBuildEvent>
<Command>
</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>STATIC_GETOPT;WIN32;_DEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<SDLCheck>true</SDLCheck>
<AdditionalIncludeDirectories>$(SolutionDir)../wke</AdditionalIncludeDirectories>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalDependencies>imm32.lib;miniblink.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;version.lib;Crypt32.lib;Advapi32.lib;wldap32.lib;libcurl.lib;zlib.lib;ots.lib;Imm32.lib;libxml.lib;v8.lib;skia.lib;winmm.lib;Ws2_32.lib;Usp10.lib;Shlwapi.lib;wininet.lib;GDIPlus.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>$(SolutionDir)..\build\bin\$(Configuration)64\</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;STATIC_GETOPT;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>$(SolutionDir)../wke;E:\mycode\miniblink49\trunk\third_party\libcurl\include</AdditionalIncludeDirectories>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<StringPooling>true</StringPooling>
<FavorSizeOrSpeed>Size</FavorSizeOrSpeed>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Configuration)\</AdditionalLibraryDirectories>
<AdditionalDependencies>imm32.lib;miniblink.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Configuration)\;E:\mycode\miniblink49\trunk\out\Release;</AdditionalLibraryDirectories>
<AdditionalDependencies>libcurl.lib;zlib.lib;imm32.lib;node.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;version.lib;Crypt32.lib;Advapi32.lib;wldap32.lib;Imm32.lib;winmm.lib;Ws2_32.lib;Usp10.lib;Shlwapi.lib;wininet.lib;GDIPlus.lib</AdditionalDependencies>
<OutputFile>$(OutDir)mb.dll</OutputFile>
<ProgramDatabaseFile>$(OutDir)mb.pdb</ProgramDatabaseFile>
<ModuleDefinitionFile>$(SolutionDir)../mbvip/mtmb.def</ModuleDefinitionFile>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;STATIC_GETOPT;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>$(SolutionDir)../wke</AdditionalIncludeDirectories>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<StringPooling>true</StringPooling>
<FavorSizeOrSpeed>Size</FavorSizeOrSpeed>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<EnableEnhancedInstructionSet>NotSet</EnableEnhancedInstructionSet>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalLibraryDirectories>$(SolutionDir)..\out\$(Platform)$(Configuration)\</AdditionalLibraryDirectories>
<AdditionalDependencies>imm32.lib;miniblink.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;version.lib;Crypt32.lib;Advapi32.lib;wldap32.lib;ots.lib;Imm32.lib;winmm.lib;Ws2_32.lib;Usp10.lib;Shlwapi.lib;wininet.lib;GDIPlus.lib;%(AdditionalDependencies)</AdditionalDependencies>
<IgnoreSpecificDefaultLibraries>
</IgnoreSpecificDefaultLibraries>
<OutputFile>$(OutDir)mb_x64.dll</OutputFile>
<ProgramDatabaseFile>$(OutDir)mb_x64.pdb</ProgramDatabaseFile>
<ModuleDefinitionFile>$(SolutionDir)../mbvip/mtmb.def</ModuleDefinitionFile>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<Import Project="$(VCTargetsPath)\BuildCustomizations\masm.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>
``` | /content/code_sandbox/build/mbvipwrap/mbvipwrap.vcxproj | xml | 2016-09-27T03:41:10 | 2024-08-16T10:42:57 | miniblink49 | weolar/miniblink49 | 7,069 | 4,060 |
```xml
declare module 'metro-cache-key' {
export default function getCacheKey(files: string[]): string;
}
``` | /content/code_sandbox/packages/@expo/cli/ts-declarations/metro-cache-key/index.d.ts | xml | 2016-08-15T17:14:25 | 2024-08-16T19:54:44 | expo | expo/expo | 32,004 | 23 |
```xml
<epp xmlns="urn:ietf:params:xml:ns:epp-1.0"
xmlns:xsi="path_to_url">
<response>
<result code="1000">
<msg>Command completed successfully</msg>
</result>
<resData>
<domain:infData
xmlns:domain="urn:ietf:params:xml:ns:domain-1.0">
<domain:name>example.com</domain:name>
<domain:roid>EXAMPLE1-REP</domain:roid>
<domain:status s="ok"/>
<domain:registrant>jd1234</domain:registrant>
<domain:contact type="admin">sh8013</domain:contact>
<domain:contact type="tech">sh8013</domain:contact>
<domain:ns>
<domain:hostObj>ns1.example.com</domain:hostObj>
<domain:hostObj>ns1.example.net</domain:hostObj>
</domain:ns>
<domain:host>ns1.example.com</domain:host>
<domain:host>ns2.example.com</domain:host>
<domain:clID>NewRegistrar</domain:clID>
<domain:crID>NewRegistrar</domain:crID>
<domain:crDate>2003-11-26T22:00:00.0Z</domain:crDate>
<domain:exDate>2005-11-26T22:00:00.0Z</domain:exDate>
<domain:authInfo>
<domain:pw>2fooBAR</domain:pw>
</domain:authInfo>
</domain:infData>
</resData>
<extension>
<rgp:infData xmlns:rgp="urn:ietf:params:xml:ns:rgp-1.0">
<rgp:rgpStatus s="addPeriod"/>
</rgp:infData>
</extension>
<trID>
<clTRID>ABC-12345</clTRID>
<svTRID>server-trid</svTRID>
</trID>
</response>
</epp>
``` | /content/code_sandbox/core/src/test/resources/google/registry/xjc/domain_info_response_addperiod.xml | xml | 2016-02-29T20:16:48 | 2024-08-15T19:49:29 | nomulus | google/nomulus | 1,685 | 469 |
```xml
import { Observable } from 'rxjs';
import { OrdersChart } from './orders-chart';
import { ProfitChart } from './profit-chart';
export interface OrderProfitChartSummary {
title: string;
value: number;
}
export abstract class OrdersProfitChartData {
abstract getOrderProfitChartSummary(): Observable<OrderProfitChartSummary[]>;
abstract getOrdersChartData(period: string): Observable<OrdersChart>;
abstract getProfitChartData(period: string): Observable<ProfitChart>;
}
``` | /content/code_sandbox/src/app/@core/data/orders-profit-chart.ts | xml | 2016-05-25T10:09:03 | 2024-08-16T16:34:03 | ngx-admin | akveo/ngx-admin | 25,169 | 98 |
```xml
/**
* @see path_to_url
*/
export const input = {
$schema: 'path_to_url#',
title: 'JSON-stat 2.0 Schema',
id: 'path_to_url
description: 'This is version 1.03 of the JSON-stat 2.0 Schema (2016-05-04)',
definitions: {
strarray: {
type: 'array',
items: {
type: 'string',
},
uniqueItems: true,
},
version: {
type: 'string',
enum: ['2.0'],
},
updated: {
oneOf: [
{type: 'string', format: 'date-time'},
{type: 'string', pattern: '^((19|20)\\d\\d)\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$'},
],
},
href: {
type: 'string',
format: 'uri',
},
label: {
type: 'string',
},
source: {
type: 'string',
},
extension: {
type: 'object',
},
error: {
type: 'array',
},
note: {$ref: '#/definitions/strarray'},
category: {
type: 'object',
properties: {
index: {
oneOf: [
{
$ref: '#/definitions/strarray',
},
{
type: 'object',
additionalProperties: {
type: 'number',
},
},
],
},
label: {
type: 'object',
additionalProperties: {
type: 'string',
},
},
note: {
type: 'object',
additionalProperties: {
$ref: '#/definitions/strarray',
},
},
unit: {
type: 'object',
additionalProperties: {
type: 'object',
properties: {
label: {
$ref: '#/definitions/label',
},
decimals: {
type: 'integer',
},
position: {
type: 'string',
enum: ['start', 'end'],
},
},
},
},
coordinates: {
type: 'object',
additionalProperties: {
type: 'array',
items: [{type: 'number'}, {type: 'number'}],
additionalItems: false,
},
},
child: {
type: 'object',
additionalProperties: {
$ref: '#/definitions/strarray',
},
},
},
additionalProperties: false,
},
link: {
type: 'object',
patternProperties: {
'^(about|alternate|appendix|archives|author|blocked-by|bookmark|canonical|chapter|collection|contents|copyright|create-form|current|derivedfrom|describedby|describes|disclosure|dns-prefetch|duplicate|edit|edit-form|edit-media|enclosure|first|glossary|help|hosts|hub|icon|index|item|last|latest-version|license|lrdd|memento|monitor|monitor-group|next|next-archive|nofollow|noreferrer|original|payment|pingback|preconnect|predecessor-version|prefetch|preload|prerender|prev|preview|previous|prev-archive|privacy-policy|profile|related|replies|search|section|self|service|start|stylesheet|subsection|successor-version|tag|terms-of-service|timegate|timemap|type|up|version-history|via|webmention|working-copy|working-copy-of)$':
{
type: 'array',
items: {
type: 'object',
properties: {
type: {
type: 'string',
},
class: {type: 'string', enum: ['dataset', 'collection', 'dimension']},
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
updated: {$ref: '#/definitions/updated'},
source: {$ref: '#/definitions/source'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
id: {$ref: '#/definitions/strarray'},
size: {
type: 'array',
items: {
type: 'integer',
},
},
role: {
type: 'object',
properties: {
time: {$ref: '#/definitions/strarray'},
geo: {$ref: '#/definitions/strarray'},
metric: {$ref: '#/definitions/strarray'},
},
additionalProperties: false,
},
dimension: {
type: 'object',
additionalProperties: {
type: 'object',
properties: {
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
},
additionalProperties: false,
required: ['category'],
},
},
value: {
oneOf: [
{
type: 'array',
items: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
{
type: 'object',
additionalProperties: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
],
},
status: {
oneOf: [
{
type: 'string',
},
{
type: 'array',
items: {
type: 'string',
},
},
{
type: 'object',
additionalProperties: {
type: 'string',
},
},
],
},
},
additionalProperties: false,
},
},
},
additionalProperties: false,
},
},
oneOf: [
{
type: 'object',
properties: {
class: {type: 'string', enum: ['dataset']},
version: {$ref: '#/definitions/version'},
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
updated: {$ref: '#/definitions/updated'},
source: {$ref: '#/definitions/source'},
error: {$ref: '#/definitions/error'},
extension: {$ref: '#/definitions/extension'},
id: {$ref: '#/definitions/strarray'},
size: {
type: 'array',
items: {
type: 'integer',
},
},
role: {
type: 'object',
properties: {
time: {$ref: '#/definitions/strarray'},
geo: {$ref: '#/definitions/strarray'},
metric: {$ref: '#/definitions/strarray'},
},
additionalProperties: false,
},
dimension: {
type: 'object',
additionalProperties: {
type: 'object',
properties: {
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
},
additionalProperties: false,
required: ['category'],
},
},
value: {
oneOf: [
{
type: 'array',
items: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
{
type: 'object',
additionalProperties: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
],
},
status: {
oneOf: [
{
type: 'string',
},
{
type: 'array',
items: {
type: 'string',
},
},
{
type: 'object',
additionalProperties: {
type: 'string',
},
},
],
},
},
additionalProperties: false,
required: ['version', 'class', 'value', 'id', 'size', 'dimension'],
},
{
type: 'object',
properties: {
class: {type: 'string', enum: ['dimension']},
version: {$ref: '#/definitions/version'},
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
updated: {$ref: '#/definitions/updated'},
source: {$ref: '#/definitions/source'},
error: {$ref: '#/definitions/error'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
},
additionalProperties: false,
required: ['version', 'class', 'category'],
},
{
type: 'object',
properties: {
class: {type: 'string', enum: ['collection']},
version: {$ref: '#/definitions/version'},
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {
type: 'object',
properties: {
item: {
type: 'array',
items: {
type: 'object',
properties: {
type: {
type: 'string',
},
class: {type: 'string', enum: ['dataset', 'collection', 'dimension']},
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
updated: {$ref: '#/definitions/updated'},
source: {$ref: '#/definitions/source'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
id: {$ref: '#/definitions/strarray'},
size: {
type: 'array',
items: {
type: 'integer',
},
},
role: {
type: 'object',
properties: {
time: {$ref: '#/definitions/strarray'},
geo: {$ref: '#/definitions/strarray'},
metric: {$ref: '#/definitions/strarray'},
},
additionalProperties: false,
},
dimension: {
type: 'object',
additionalProperties: {
type: 'object',
properties: {
href: {$ref: '#/definitions/href'},
label: {$ref: '#/definitions/label'},
note: {$ref: '#/definitions/note'},
link: {$ref: '#/definitions/link'},
extension: {$ref: '#/definitions/extension'},
category: {$ref: '#/definitions/category'},
},
additionalProperties: false,
required: ['category'],
},
},
value: {
oneOf: [
{
type: 'array',
items: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
{
type: 'object',
additionalProperties: {
anyOf: [{type: 'number'}, {type: 'null'}, {type: 'string'}],
},
},
],
},
status: {
oneOf: [
{
type: 'string',
},
{
type: 'array',
items: {
type: 'string',
},
},
{
type: 'object',
additionalProperties: {
type: 'string',
},
},
],
},
},
additionalProperties: false,
},
},
},
additionalProperties: false,
},
updated: {$ref: '#/definitions/updated'},
source: {$ref: '#/definitions/source'},
error: {$ref: '#/definitions/error'},
extension: {$ref: '#/definitions/extension'},
},
additionalProperties: false,
required: ['version', 'class', 'link'],
},
],
}
``` | /content/code_sandbox/test/e2e/realWorld.jsonStat.ts | xml | 2016-03-22T03:56:58 | 2024-08-12T18:37:05 | json-schema-to-typescript | bcherny/json-schema-to-typescript | 2,877 | 2,754 |
```xml
import Link from 'next/link'
const twClasses = {
footerLink: 'mb-1 text-sm text-default hover:underline',
footerText: 'mb-1 text-sm text-default',
footerTitle: 'font-semibold text-default mb-2',
}
export default function Footer() {
// const { authData } = useAuth()
return (
<footer className="container flex flex-row flex-wrap sm:justify-between md:justify-around py-4 mb-10">
<div className="flex flex-col items-start w-1/2 sm:w-auto mb-4 sm:mr-3">
<div className={twClasses.footerTitle}>Product</div>
<Link href="/download">
<a className={twClasses.footerLink}>Download</a>
</Link>
<Link href="/#features">
<a className={twClasses.footerLink}>Features</a>
</Link>
{/* <Link href="/changelog">
<a className={twClasses.footerLink}>Changelog</a>
</Link> */}
<Link href="/pricing">
<a className={twClasses.footerLink}>Pricing</a>
</Link>
{/* <Link href="/account">
<a className={twClasses.footerLink}>
{authData &&
authData.plan &&
authData.plan.amount &&
authData.plan.interval
? 'Manage subscription'
: 'My account'}
</a>
</Link> */}
</div>
<div className="flex flex-col items-start w-1/2 sm:w-auto mb-4 sm:mr-3">
<div className={twClasses.footerTitle}>Community</div>
<a
className={twClasses.footerLink}
href="path_to_url"
target="_blank"
rel="noopener noreferrer"
>
Twitter
</a>
<a
className={twClasses.footerLink}
href="path_to_url"
target="_blank"
rel="noopener noreferrer"
>
GitHub
</a>
{/*
<a
className={twClasses.footerLink}
href="path_to_url"
target="_blank"
rel="noopener noreferrer"
>
Slack
</a>
*/}
</div>
{/* <div className="flex flex-col items-start w-1/2 sm:w-auto mb-4 sm:mr-3">
<div className={twClasses.footerTitle}>Resources</div>
<Link href="/terms">
<a className={twClasses.footerLink}>Terms</a>
</Link>
<Link href="/privacy">
<a className={twClasses.footerLink}>Privacy</a>
</Link>
</div> */}
<div className="flex flex-col items-start w-1/2 sm:w-auto">
<div className={twClasses.footerTitle}>Contact a human</div>
<a
className={twClasses.footerLink}
href="path_to_url"
target="_blank"
rel="noopener noreferrer"
>
@devhub_app
</a>
<a
className={twClasses.footerLink}
href="path_to_url"
target="_blank"
rel="noopener noreferrer"
>
@brunolemos
</a>
<span className={twClasses.footerText}>
bruno<span className="text-default">@</span>devhubapp.com
</span>
</div>
</footer>
)
}
``` | /content/code_sandbox/landing/src/components/sections/footer/Footer.tsx | xml | 2016-11-30T23:24:21 | 2024-08-16T00:24:59 | devhub | devhubapp/devhub | 9,652 | 742 |
```xml
// This is technically correct, but
// we need to explicitly open/use both the request queue and the request list.
// We suggest using the request queue and batch add the requests instead.
import { RequestList, RequestQueue, PuppeteerCrawler } from 'crawlee';
// Prepare the sources array with URLs to visit (it can contain millions of URLs)
const sources = [
{ url: 'path_to_url },
{ url: 'path_to_url },
{ url: 'path_to_url },
// ...
];
// Open the request list with the initial sources array
const requestList = await RequestList.open('my-list', sources);
// Open the default request queue. It's not necessary to add any requests to the queue
const requestQueue = await RequestQueue.open();
// The crawler will automatically process requests from the list and the queue.
// It's used the same way for Cheerio/Playwright crawlers
const crawler = new PuppeteerCrawler({
requestList,
requestQueue,
// Each request from the request list is enqueued to the request queue one by one.
// At this point request with the same URL would exist in the list and the queue
async requestHandler({ crawler, enqueueLinks }) {
// Add new request to the queue
await crawler.addRequests(['path_to_url
// Add links found on page to the queue
await enqueueLinks();
// The requests above would be added to the queue (but not to the list)
// and would be processed after the request list is empty.
// No more requests could be added to the list here
},
});
// Run the crawler
await crawler.run();
``` | /content/code_sandbox/docs/guides/request_storage_queue_list.ts | xml | 2016-08-26T18:35:03 | 2024-08-16T16:40:08 | crawlee | apify/crawlee | 14,153 | 350 |
```xml
/*
* This software is released under MIT license.
* The full license information can be found in LICENSE in the root directory of this project.
*/
import { renderIcon } from '../icon.renderer.js';
import { IconShapeTuple } from '../interfaces/icon.interfaces.js';
const icon = {
outline:
'<path d="M25,4H7.83A1.89,1.89,0,0,0,6,5.91V30.09A1.89,1.89,0,0,0,7.83,32H28.17A1.87,1.87,0,0,0,30,30.09V9ZM24,5.78,28.2,10H24ZM8,30V6H22v6h6V30Z"/><path d="M22,21.81a2.11,2.11,0,0,0-1.44.62l-5.72-2.66v-.44l5.66-2.65a2.08,2.08,0,1,0,.06-2.94h0a2.14,2.14,0,0,0-.64,1.48v.23l-5.64,2.66a2.08,2.08,0,1,0-.08,2.95l.08-.08,5.67,2.66v.3A2.09,2.09,0,1,0,22,21.84Z"/>',
solid:
'<path d="M25,4.06H7.83A1.89,1.89,0,0,0,6,6V30.15a1.89,1.89,0,0,0,1.83,1.91H28.17A1.87,1.87,0,0,0,30,30.15V9ZM22,26a2.09,2.09,0,0,1-2.1-2.08v-.3L14.27,21l-.08.08a2.08,2.08,0,1,1,.08-2.95l5.64-2.66v-.23a2.14,2.14,0,0,1,.64-1.48h0a2.08,2.08,0,1,1-.06,2.94l-5.66,2.65v.44l5.72,2.66A2.11,2.11,0,0,1,22,21.81l0,0A2.09,2.09,0,0,1,22,26Zm2-16V5.84l4.2,4.22Z"/>',
};
export const fileShare2IconName = 'file-share-2';
export const fileShare2Icon: IconShapeTuple = [fileShare2IconName, renderIcon(icon)];
``` | /content/code_sandbox/packages/core/src/icon/shapes/file-share-2.ts | xml | 2016-09-29T17:24:17 | 2024-08-11T17:06:15 | clarity | vmware-archive/clarity | 6,431 | 659 |
```xml
/**
* @file Email URL
* @module transform.email
* @author Surmon <path_to_url
*/
import qs from 'qs'
export interface EmailLinkOptions {
email: string
subject?: string
body?: string
}
export const emailLink = (email: string | EmailLinkOptions) => {
if (typeof email === 'string') {
return `mailto:${email}`
}
const { email: _email, ...content } = email
return `mailto:${_email}?` + qs.stringify(content)
}
``` | /content/code_sandbox/src/transforms/email.ts | xml | 2016-09-23T02:02:33 | 2024-08-16T03:13:52 | surmon.me | surmon-china/surmon.me | 2,092 | 117 |
```xml
export const concatenateUint8Arrays = (arrays: Uint8Array[]) => {
const totalLength = arrays.map((array) => array.length).reduce((prev, next) => prev + next, 0)
const concatenatedArray = new Uint8Array(totalLength)
let offset = 0
arrays.forEach((array) => {
concatenatedArray.set(array, offset)
offset += array.length
})
return concatenatedArray
}
``` | /content/code_sandbox/packages/web/src/javascripts/Utils/ConcatenateUint8Arrays.ts | xml | 2016-12-05T23:31:33 | 2024-08-16T06:51:19 | app | standardnotes/app | 5,180 | 94 |
```xml
'use strict';
import { Disposable, ExtensionContext, TextEditorRevealType, Uri, ViewColumn, commands, window, workspace } from 'vscode';
import { CodeActionParams, WorkspaceEdit } from 'vscode-languageclient';
import { LanguageClient } from 'vscode-languageclient/node';
import { Commands } from './commands';
import { getActiveLanguageClient } from './extension';
import {
AccessorCodeActionParams,
AccessorCodeActionRequest,
AccessorKind,
AddOverridableMethodsRequest,
CheckConstructorStatusRequest,
CheckDelegateMethodsStatusRequest,
CheckHashCodeEqualsStatusRequest,
CheckToStringStatusRequest,
CleanupRequest,
GenerateAccessorsRequest,
GenerateConstructorsRequest,
GenerateDelegateMethodsRequest,
GenerateHashCodeEqualsRequest,
GenerateToStringRequest,
ImportCandidate, ImportSelection,
ListOverridableMethodsRequest,
OrganizeImportsRequest,
VariableBinding
} from './protocol';
import { applyWorkspaceEdit } from './standardLanguageClient';
export function registerCommands(languageClient: LanguageClient, context: ExtensionContext) {
registerOverrideMethodsCommand(languageClient, context);
registerHashCodeEqualsCommand(languageClient, context);
registerOrganizeImportsCommand(languageClient, context);
registerCleanupCommand(languageClient, context);
registerChooseImportCommand(context);
registerGenerateToStringCommand(languageClient, context);
registerGenerateAccessorsCommand(languageClient, context);
registerGenerateConstructorsCommand(languageClient, context);
registerGenerateDelegateMethodsCommand(languageClient, context);
}
function registerOverrideMethodsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.OVERRIDE_METHODS_PROMPT, async (params: CodeActionParams) => {
const result = await languageClient.sendRequest(ListOverridableMethodsRequest.type, params);
if (!result || !result.methods || !result.methods.length) {
window.showWarningMessage('No overridable methods found in the super type.');
return;
}
result.methods.sort((a, b) => {
const declaringClass = a.declaringClass.localeCompare(b.declaringClass);
if (declaringClass !== 0) {
return declaringClass;
}
const methodName = a.name.localeCompare(b.name);
if (methodName !== 0) {
return methodName;
}
return a.parameters.length - b.parameters.length;
});
const quickPickItems = result.methods.map(method => {
return {
label: `${method.name}(${method.parameters.join(',')})`,
description: `${method.declaringClassType}: ${method.declaringClass}`,
picked: method.unimplemented,
originalMethod: method,
};
});
const selectedItems = await window.showQuickPick(quickPickItems, {
canPickMany: true,
placeHolder: `Select methods to override or implement in ${result.type}`
});
if (!selectedItems || !selectedItems.length) {
return;
}
const workspaceEdit = await languageClient.sendRequest(AddOverridableMethodsRequest.type, {
context: params,
overridableMethods: selectedItems.map((item) => item.originalMethod),
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerCleanupCommand(languageClient: LanguageClient, context: ExtensionContext): void {
// Only active when editorLangId == java
context.subscriptions.push(commands.registerCommand(Commands.MANUAL_CLEANUP, async () => {
const languageClient: LanguageClient | undefined = await getActiveLanguageClient();
const workspaceEdit = await languageClient.sendRequest(CleanupRequest.type, languageClient.code2ProtocolConverter.asTextDocumentIdentifier(window.activeTextEditor.document));
await applyWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerHashCodeEqualsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.HASHCODE_EQUALS_PROMPT, async (params: CodeActionParams) => {
const result = await languageClient.sendRequest(CheckHashCodeEqualsStatusRequest.type, params);
if (!result || !result.fields || !result.fields.length) {
window.showErrorMessage(`The operation is not applicable to the type ${result.type}.`);
return;
}
let regenerate = false;
if (result.existingMethods && result.existingMethods.length) {
const ans = await window.showInformationMessage(`Methods ${result.existingMethods.join(' and ')} already ${result.existingMethods.length === 1 ? 'exists' : 'exist'} in the Class '${result.type}'. `
+ 'Do you want to regenerate the implementation?', 'Regenerate', 'Cancel');
if (ans !== 'Regenerate') {
return;
}
regenerate = true;
}
const fieldItems = result.fields.map((field) => {
return {
label: `${field.name}: ${field.type}`,
picked: true,
originalField: field
};
});
const selectedFields = await window.showQuickPick(fieldItems, {
canPickMany: true,
placeHolder: 'Select the fields to include in the hashCode() and equals() methods.'
});
if (!selectedFields || !selectedFields.length) {
return;
}
const workspaceEdit = await languageClient.sendRequest(GenerateHashCodeEqualsRequest.type, {
context: params,
fields: selectedFields.map((item) => item.originalField),
regenerate
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerOrganizeImportsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.ORGANIZE_IMPORTS, async (params: CodeActionParams) => {
const workspaceEdit = await languageClient.sendRequest(OrganizeImportsRequest.type, params);
await applyWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerChooseImportCommand(context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.CHOOSE_IMPORTS, async (uri: string, selections: ImportSelection[], restoreExistingImports?: boolean) => {
const chosen: ImportCandidate[] = [];
const fileUri: Uri = Uri.parse(uri);
for (let i = 0; i < selections.length; i++) {
const selection: ImportSelection = selections[i];
// Move the cursor to the code line with ambiguous import choices.
await window.showTextDocument(fileUri, { preserveFocus: true, selection: selection.range, viewColumn: ViewColumn.One });
const candidates: ImportCandidate[] = selection.candidates;
const items = candidates.map((item) => {
return {
label: item.fullyQualifiedName,
origin: item
};
});
const fullyQualifiedName = candidates[0].fullyQualifiedName;
const typeName = fullyQualifiedName.substring(fullyQualifiedName.lastIndexOf(".") + 1);
const disposables: Disposable[] = [];
try {
const pick = await new Promise<any>((resolve, reject) => {
const input = window.createQuickPick();
input.title = restoreExistingImports ? "Add All Missing Imports" : "Organize Imports";
input.step = i + 1;
input.totalSteps = selections.length;
input.placeholder = `Choose type '${typeName}' to import`;
input.items = items;
disposables.push(
input.onDidChangeSelection(items => resolve(items[0])),
input.onDidHide(() => {
reject(undefined);
}),
input
);
input.show();
});
chosen.push(pick ? pick.origin : null);
} catch (err) {
break;
} finally {
disposables.forEach(d => d.dispose());
}
}
return chosen;
}));
}
function registerGenerateToStringCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.GENERATE_TOSTRING_PROMPT, async (params: CodeActionParams) => {
const result = await languageClient.sendRequest(CheckToStringStatusRequest.type, params);
if (!result) {
return;
}
if (result.exists) {
const ans = await window.showInformationMessage(`Method 'toString()' already exists in the Class '${result.type}'. `
+ 'Do you want to replace the implementation?', 'Replace', 'Cancel');
if (ans !== 'Replace') {
return;
}
}
let fields: VariableBinding[] = [];
if (result.fields && result.fields.length) {
const fieldItems = result.fields.map((field) => {
return {
label: `${field.name}: ${field.type}`,
picked: field.isSelected,
originalField: field
};
});
const selectedFields = await window.showQuickPick(fieldItems, {
canPickMany: true,
placeHolder: 'Select the fields to include in the toString() method.'
});
if (!selectedFields) {
return;
}
fields = selectedFields.map((item) => item.originalField);
}
const workspaceEdit = await languageClient.sendRequest(GenerateToStringRequest.type, {
context: params,
fields,
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerGenerateAccessorsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.GENERATE_ACCESSORS_PROMPT, async (params: AccessorCodeActionParams) => {
await generateAccessors(languageClient, params);
}));
}
async function generateAccessors(languageClient: LanguageClient, params: AccessorCodeActionParams): Promise<void> {
const accessors = await languageClient.sendRequest(AccessorCodeActionRequest.type, params);
if (!accessors || !accessors.length) {
return;
}
const accessorItems = accessors.map((accessor) => {
const description = [];
if (accessor.generateGetter) {
description.push('getter');
}
if (accessor.generateSetter) {
description.push('setter');
}
return {
label: `${accessor.fieldName}: ${accessor.typeName}`,
description: (accessor.isStatic ? 'static ' : '')+ description.join(', '),
originalField: accessor,
};
});
let accessorsKind: string;
switch (params.kind) {
case AccessorKind.both:
accessorsKind = "getters and setters";
break;
case AccessorKind.getter:
accessorsKind = "getters";
break;
case AccessorKind.setter:
accessorsKind = "setters";
break;
default:
return;
}
const selectedAccessors = await window.showQuickPick(accessorItems, {
canPickMany: true,
placeHolder: `Select the fields to generate ${accessorsKind}`
});
if (!selectedAccessors || !selectedAccessors.length) {
return;
}
const workspaceEdit = await languageClient.sendRequest(GenerateAccessorsRequest.type, {
context: params,
accessors: selectedAccessors.map((item) => item.originalField),
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}
function registerGenerateConstructorsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.GENERATE_CONSTRUCTORS_PROMPT, async (params: CodeActionParams) => {
const status = await languageClient.sendRequest(CheckConstructorStatusRequest.type, params);
if (!status || !status.constructors || !status.constructors.length) {
return;
}
let selectedConstructors = status.constructors;
let selectedFields = [];
if (status.constructors.length > 1) {
const constructorItems = status.constructors.map((constructor) => {
return {
label: `${constructor.name}(${constructor.parameters.join(',')})`,
originalConstructor: constructor,
};
});
const selectedConstructorItems = await window.showQuickPick(constructorItems, {
canPickMany: true,
placeHolder: 'Select super class constructor(s).',
});
if (!selectedConstructorItems || !selectedConstructorItems.length) {
return;
}
selectedConstructors = selectedConstructorItems.map(item => item.originalConstructor);
}
if (status.fields.length) {
const fieldItems = status.fields.map((field) => {
return {
label: `${field.name}: ${field.type}`,
originalField: field,
picked: field.isSelected
};
});
const selectedFieldItems = await window.showQuickPick(fieldItems, {
canPickMany: true,
placeHolder: 'Select fields to initialize by constructor(s).',
});
if (!selectedFieldItems) {
return;
}
selectedFields = selectedFieldItems.map(item => item.originalField);
}
const workspaceEdit = await languageClient.sendRequest(GenerateConstructorsRequest.type, {
context: params,
constructors: selectedConstructors,
fields: selectedFields,
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}));
}
function registerGenerateDelegateMethodsCommand(languageClient: LanguageClient, context: ExtensionContext): void {
context.subscriptions.push(commands.registerCommand(Commands.GENERATE_DELEGATE_METHODS_PROMPT, async (params: CodeActionParams) => {
const status = await languageClient.sendRequest(CheckDelegateMethodsStatusRequest.type, params);
if (!status || !status.delegateFields || !status.delegateFields.length) {
window.showWarningMessage("All delegatable methods are already implemented.");
return;
}
let selectedDelegateField = status.delegateFields[0];
if (status.delegateFields.length > 1) {
const fieldItems = status.delegateFields.map((delegateField) => {
return {
label: `${delegateField.field.name}: ${delegateField.field.type}`,
originalField: delegateField,
};
});
const selectedFieldItem = await window.showQuickPick(fieldItems, {
placeHolder: 'Select target to generate delegates for.',
});
if (!selectedFieldItem) {
return;
}
selectedDelegateField = selectedFieldItem.originalField;
}
const delegateEntryItems = selectedDelegateField.delegateMethods.map(delegateMethod => {
return {
label: `${selectedDelegateField.field.name}.${delegateMethod.name}(${delegateMethod.parameters.join(',')})`,
originalField: selectedDelegateField.field,
originalMethod: delegateMethod,
};
});
if (!delegateEntryItems.length) {
window.showWarningMessage("All delegatable methods are already implemented.");
return;
}
const selectedDelegateEntryItems = await window.showQuickPick(delegateEntryItems, {
canPickMany: true,
placeHolder: 'Select methods to generate delegates for.',
});
if (!selectedDelegateEntryItems || !selectedDelegateEntryItems.length) {
return;
}
const delegateEntries = selectedDelegateEntryItems.map(item => {
return {
field: item.originalField,
delegateMethod: item.originalMethod,
};
});
const workspaceEdit = await languageClient.sendRequest(GenerateDelegateMethodsRequest.type, {
context: params,
delegateEntries,
});
await applyWorkspaceEdit(workspaceEdit, languageClient);
await revealWorkspaceEdit(workspaceEdit, languageClient);
}));
}
async function revealWorkspaceEdit(workspaceEdit: WorkspaceEdit, languageClient: LanguageClient): Promise<void> {
const codeWorkspaceEdit = await languageClient.protocol2CodeConverter.asWorkspaceEdit(workspaceEdit);
if (!codeWorkspaceEdit) {
return;
}
for (const entry of codeWorkspaceEdit.entries()) {
await workspace.openTextDocument(entry[0]);
if (entry[1].length > 0) {
// reveal first available change of the workspace edit
window.activeTextEditor.revealRange(entry[1][0].range, TextEditorRevealType.InCenter);
break;
}
}
}
``` | /content/code_sandbox/src/sourceAction.ts | xml | 2016-08-12T15:02:43 | 2024-08-15T03:36:05 | vscode-java | redhat-developer/vscode-java | 2,064 | 3,396 |
```xml
import { sortPrependValues } from '../src/codegen.js';
describe('sortPrependValues', () => {
it('Should sort and use the correct order', () => {
const strings: string[] = [`import `, '/* comment */', `// This is a comment`];
const sorted = sortPrependValues(strings);
expect(sorted).toEqual(['/* comment */', `// This is a comment`, `import `]);
});
});
``` | /content/code_sandbox/packages/graphql-codegen-core/tests/prepend.spec.ts | xml | 2016-12-05T19:15:11 | 2024-08-15T14:56:08 | graphql-code-generator | dotansimha/graphql-code-generator | 10,759 | 96 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "path_to_url">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>com.jazzy.demoexpandingcollection</string>
<key>CFBundleName</key>
<string>DemoExpandingCollection</string>
<key>DocSetPlatformFamily</key>
<string>jazzy</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>isJavaScriptEnabled</key>
<true/>
<key>DashDocSetFamily</key>
<string>dashtoc</string>
</dict>
</plist>
``` | /content/code_sandbox/docs/docsets/DemoExpandingCollection.docset/Contents/Info.plist | xml | 2016-05-25T08:01:40 | 2024-08-15T06:32:12 | expanding-collection | Ramotion/expanding-collection | 5,551 | 191 |
```xml
/*
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
// TypeScript Version: 4.1
/* eslint-disable max-lines */
import HOURS_IN_DAY = require( '@stdlib/constants/time/hours-in-day' );
import HOURS_IN_WEEK = require( '@stdlib/constants/time/hours-in-week' );
import MILLISECONDS_IN_DAY = require( '@stdlib/constants/time/milliseconds-in-day' );
import MILLISECONDS_IN_HOUR = require( '@stdlib/constants/time/milliseconds-in-hour' );
import MILLISECONDS_IN_MINUTE = require( '@stdlib/constants/time/milliseconds-in-minute' );
import MILLISECONDS_IN_SECOND = require( '@stdlib/constants/time/milliseconds-in-second' );
import MILLISECONDS_IN_WEEK = require( '@stdlib/constants/time/milliseconds-in-week' );
import MINUTES_IN_DAY = require( '@stdlib/constants/time/minutes-in-day' );
import MINUTES_IN_HOUR = require( '@stdlib/constants/time/minutes-in-hour' );
import MINUTES_IN_WEEK = require( '@stdlib/constants/time/minutes-in-week' );
import MONTHS_IN_YEAR = require( '@stdlib/constants/time/months-in-year' );
import SECONDS_IN_DAY = require( '@stdlib/constants/time/seconds-in-day' );
import SECONDS_IN_HOUR = require( '@stdlib/constants/time/seconds-in-hour' );
import SECONDS_IN_MINUTE = require( '@stdlib/constants/time/seconds-in-minute' );
import SECONDS_IN_WEEK = require( '@stdlib/constants/time/seconds-in-week' );
/**
* Interface describing the `time` namespace.
*/
interface Namespace {
/**
* Number of hours in a day.
*
* @example
* var num = ns.HOURS_IN_DAY;
* // returns 24
*/
HOURS_IN_DAY: typeof HOURS_IN_DAY;
/**
* Number of hours in a week.
*
* @example
* var num = ns.HOURS_IN_WEEK;
* // returns 168
*/
HOURS_IN_WEEK: typeof HOURS_IN_WEEK;
/**
* Number of milliseconds in a day.
*
* @example
* var num = ns.MILLISECONDS_IN_DAY;
* // returns 86400000
*/
MILLISECONDS_IN_DAY: typeof MILLISECONDS_IN_DAY;
/**
* Number of milliseconds in an hour.
*
* @example
* var num = ns.MILLISECONDS_IN_HOUR;
* // returns 3600000
*/
MILLISECONDS_IN_HOUR: typeof MILLISECONDS_IN_HOUR;
/**
* Number of milliseconds in a minute.
*
* @example
* var num = ns.MILLISECONDS_IN_MINUTE;
* // returns 60000
*/
MILLISECONDS_IN_MINUTE: typeof MILLISECONDS_IN_MINUTE;
/**
* Number of milliseconds in a second.
*
* @example
* var num = ns.MILLISECONDS_IN_SECOND;
* // returns 1000
*/
MILLISECONDS_IN_SECOND: typeof MILLISECONDS_IN_SECOND;
/**
* Number of milliseconds in a week.
*
* @example
* var num = ns.MILLISECONDS_IN_WEEK;
* // returns 604800000
*/
MILLISECONDS_IN_WEEK: typeof MILLISECONDS_IN_WEEK;
/**
* Number of minutes in a day.
*
* @example
* var num = ns.MINUTES_IN_DAY;
* // returns 1440
*/
MINUTES_IN_DAY: typeof MINUTES_IN_DAY;
/**
* Number of minutes in an hour.
*
* @example
* var num = ns.MINUTES_IN_HOUR;
* // returns 60
*/
MINUTES_IN_HOUR: typeof MINUTES_IN_HOUR;
/**
* Number of minutes in a week.
*
* @example
* var num = ns.MINUTES_IN_WEEK;
* // returns 10080
*/
MINUTES_IN_WEEK: typeof MINUTES_IN_WEEK;
/**
* Number of months in a year.
*
* @example
* var num = ns.MONTHS_IN_YEAR;
* // returns 12
*/
MONTHS_IN_YEAR: typeof MONTHS_IN_YEAR;
/**
* Number of seconds in a day.
*
* @example
* var num = ns.SECONDS_IN_DAY;
* // returns 86400
*/
SECONDS_IN_DAY: typeof SECONDS_IN_DAY;
/**
* Number of seconds in an hour.
*
* @example
* var num = ns.SECONDS_IN_HOUR;
* // returns 3600
*/
SECONDS_IN_HOUR: typeof SECONDS_IN_HOUR;
/**
* Number of seconds in a minute.
*
* @example
* var num = ns.SECONDS_IN_MINUTE;
* // returns 60
*/
SECONDS_IN_MINUTE: typeof SECONDS_IN_MINUTE;
/**
* Number of seconds in a week.
*
* @example
* var num = ns.SECONDS_IN_WEEK;
* // returns 604800
*/
SECONDS_IN_WEEK: typeof SECONDS_IN_WEEK;
}
/**
* Time constants.
*/
declare var ns: Namespace;
// EXPORTS //
export = ns;
``` | /content/code_sandbox/lib/node_modules/@stdlib/constants/time/docs/types/index.d.ts | xml | 2016-03-24T04:19:52 | 2024-08-16T09:03:19 | stdlib | stdlib-js/stdlib | 4,266 | 1,126 |
```xml
import React, { ReactNode, useCallback, useMemo } from 'react';
import { ChoiceGroup, IChoiceGroupProps, Stack, Label, IChoiceGroupOption, useTheme, IChoiceGroupOptionStyles, concatStyleSets } from '@fluentui/react';
import { ValidationRule } from 'common';
import { ListItemEntity } from 'common/sharepoint';
import LiveUpdate from './LiveUpdate';
import { getCurrentValue, LiveType, setValue } from './LiveUtils';
import { Validation } from './Validation';
interface IProps<E extends ListItemEntity<any>, P extends keyof E> extends IChoiceGroupProps {
entity: E;
propertyName: P;
getKeyFromValue: (val: LiveType<E, P>) => string;
getTextFromValue: (val: LiveType<E, P>) => string;
getValueFromKey: (key: string) => LiveType<E, P>;
rules?: ValidationRule<E>[];
showValidationFeedback?: boolean;
updateField: (update: (data: E) => void, callback?: () => any) => void;
renderValue?: (val: LiveType<E, P>) => ReactNode;
}
const LiveChoiceGroup = <E extends ListItemEntity<any>, P extends keyof E>(props: IProps<E, P>) => {
const {
entity,
propertyName,
getKeyFromValue,
getTextFromValue,
getValueFromKey,
rules,
showValidationFeedback,
label,
required,
updateField,
renderValue,
options
} = props;
const value = getCurrentValue(entity, propertyName);
const updateValue = useCallback((val: LiveType<E, P>) => updateField(e => setValue(e, propertyName, val)), [updateField, propertyName]);
const renderValueCallback = useCallback((val: LiveType<E, P>) => <>{getTextFromValue(val)}</>, [getTextFromValue]);
const localRenderValue = renderValue || renderValueCallback;
const onChange = useCallback((ev, val: IChoiceGroupOption) => updateField(e => setValue(e, propertyName, getValueFromKey(val.key))), [updateField, propertyName, getValueFromKey]);
const { palette: { neutralLighterAlt } } = useTheme();
const fixHighContrastThemeStyle = useMemo(() => {
return {
root: { backgroundColor: neutralLighterAlt }
} as IChoiceGroupOptionStyles;
}, [neutralLighterAlt]);
options.forEach(option => option.styles = concatStyleSets(fixHighContrastThemeStyle, option.styles));
return (
<Validation entity={entity} rules={rules} active={showValidationFeedback}>
<LiveUpdate entity={entity} propertyName={propertyName} updateValue={updateValue} renderValue={localRenderValue}>
{(renderLiveUpdateMark) => <>
<Stack horizontal>
<Label required={required}>{label}</Label>
{renderLiveUpdateMark()}
</Stack>
<ChoiceGroup
{...props}
label={undefined}
selectedKey={getKeyFromValue(value)}
onChange={onChange}
/>
</>}
</LiveUpdate>
</Validation>
);
};
export default LiveChoiceGroup;
``` | /content/code_sandbox/samples/react-rhythm-of-business-calendar/src/common/components/LiveChoiceGroup.tsx | xml | 2016-08-30T17:21:43 | 2024-08-16T18:41:32 | sp-dev-fx-webparts | pnp/sp-dev-fx-webparts | 2,027 | 666 |
```xml
import type { NodePath, types } from 'next/dist/compiled/babel/core'
import type { PluginObj } from 'next/dist/compiled/babel/core'
export default function NextPageDisallowReExportAllExports(): PluginObj<any> {
return {
visitor: {
ImportDeclaration(path: NodePath<types.ImportDeclaration>) {
if (
[
'@next/font/local',
'@next/font/google',
'next/font/local',
'next/font/google',
].includes(path.node.source.value)
) {
const err = new SyntaxError(
`"next/font" requires SWC although Babel is being used due to a custom babel config being present.\nRead more: path_to_url`
)
;(err as any).code = 'BABEL_PARSE_ERROR'
;(err as any).loc =
path.node.loc?.start ?? path.node.loc?.end ?? path.node.loc
throw err
}
},
},
}
}
``` | /content/code_sandbox/packages/next/src/build/babel/plugins/next-font-unsupported.ts | xml | 2016-10-05T23:32:51 | 2024-08-16T19:44:30 | next.js | vercel/next.js | 124,056 | 206 |
```xml
<LinearLayout xmlns:android="path_to_url"
xmlns:app="path_to_url"
xmlns:tools="path_to_url"
android:id="@+id/fragment"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
app:layout_behavior="@string/appbar_scrolling_view_behavior">
<Button
android:id="@+id/bt_music"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="32dp"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Music - ListView - FixedHeader"/>
<Button
android:id="@+id/bt_food"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Food - RecyclerView - PureScrollMode"/>
<Button
android:id="@+id/bt_science"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Science - GridView - SinaHeader"/>
<Button
android:id="@+id/bt_photo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Photo - RecyclerView - BezierLayout"/>
<Button
android:id="@+id/bt_story"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Story - ScrollView - GoogleDotView"/>
<Button
android:id="@+id/bt_enjoy"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Dribbble - WebView - FloatRefresh"/>
<Button
android:id="@+id/bt_coordinate"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Photo - CoordinateLayout-Beta - FloatRefresh"/>
<Button
android:id="@+id/bt_normalView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text="Story - NormalView - PureScrollMode"/>
<Button
android:id="@+id/bt_test"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:textAllCaps="false"
android:text=" T E S T "/>
<android.support.design.widget.TabLayout
android:id="@+id/tab_layout"
app:tabBackground="@android:color/white"
app:tabMode="scrollable"
app:tabGravity="fill"
app:tabIndicatorHeight="3dp"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<android.support.v4.view.ViewPager
android:id="@+id/pager"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</LinearLayout>
``` | /content/code_sandbox/app/src/main/res/layout/content_main.xml | xml | 2016-03-02T12:11:56 | 2024-08-07T06:56:48 | TwinklingRefreshLayout | lcodecorex/TwinklingRefreshLayout | 3,995 | 760 |
```xml
export { NoRepositoriesView } from './no-repositories-view'
``` | /content/code_sandbox/app/src/ui/no-repositories/index.ts | xml | 2016-05-11T15:59:00 | 2024-08-16T17:00:41 | desktop | desktop/desktop | 19,544 | 14 |
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!--
~
~
~ path_to_url
~
~ Unless required by applicable law or agreed to in writing, software
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<beans xmlns="path_to_url"
xmlns:xsi="path_to_url" xmlns:motan="path_to_url"
xsi:schemaLocation="path_to_url path_to_url
path_to_url path_to_url">
<!-- jar-->
<motan:registry regProtocol="zk" name="registry" address="127.0.0.1:2181"/>
<!-- motan -->
<motan:protocol default="true" name="motan" haStrategy="failfast" loadbalance="configurableWeight"/>
<!-- referer -->
<motan:basicReferer id="clientBasicConfig" group="aaa" registry="registry"
protocol="motan" throwException="false"/>
<!-- refererbeanid -->
<motan:referer id="motanDemoReferer"
interface="com.weibo.motan.demo.service.MotanDemoService"
connectTimeout="1000" requestTimeout="1000" basicReferer="clientBasicConfig"/>
</beans>
``` | /content/code_sandbox/motan-demo/motan-demo-client/src/main/resources/motan_demo_client_commandRegistry.xml | xml | 2016-04-20T10:56:17 | 2024-08-16T01:20:43 | motan | weibocom/motan | 5,882 | 279 |
```xml
import { packageName } from '../../util/pkg-name';
import { confirmOption, yesOption } from '../../util/arg-common';
export const gitCommand = {
name: 'git',
description: 'Manage your Git provider connections.',
arguments: [
{
name: 'command',
required: true,
},
],
subcommands: [
{
name: 'connect',
description:
'Connect your Vercel Project to your Git repository or provide the remote URL to your Git repository',
arguments: [
{
name: 'git url',
required: false,
},
],
options: [],
examples: [],
},
{
name: 'disconnect',
description: 'Disconnect the Git provider repository from your project',
arguments: [],
options: [],
examples: [],
},
],
options: [yesOption, { ...confirmOption, deprecated: true }],
examples: [
{
name: 'Connect your Vercel Project to your Git repository defined in your local .git config',
value: `${packageName} git connect`,
},
{
name: 'Connect your Vercel Project to a Git repository using the remote URL',
value: `${packageName} git connect path_to_url`,
},
{
name: 'Disconnect the Git provider repository',
value: `${packageName} git disconnect`,
},
],
} as const;
``` | /content/code_sandbox/packages/cli/src/commands/git/command.ts | xml | 2016-09-09T01:12:08 | 2024-08-16T17:39:45 | vercel | vercel/vercel | 12,545 | 295 |
```xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="path_to_url"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:paddingLeft="16dp"
android:paddingStart="16dp"
android:paddingRight="16dp"
android:paddingEnd="16dp"
android:orientation="horizontal" >
<ImageView android:id="@+id/account_row_icon"
android:layout_width="wrap_content"
android:layout_height="fill_parent"
android:paddingRight="8dip"
android:paddingEnd="8dip" />
<TextView xmlns:android="path_to_url"
android:id="@+id/account_row_text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textAppearance="?android:attr/textAppearanceListItem"
android:gravity="center_vertical"
android:minHeight="?android:listPreferredItemHeight" />
</LinearLayout>
``` | /content/code_sandbox/VirtualApp/lib/src/main/res/layout/choose_account_row.xml | xml | 2016-07-06T13:25:23 | 2024-08-16T16:38:22 | VirtualApp | asLody/VirtualApp | 10,107 | 213 |
```xml
export { Cell } from './Cell';
``` | /content/code_sandbox/packages/erxes-ui/src/components/richTextEditor/extensions/Table/Cell/index.ts | xml | 2016-11-11T06:54:50 | 2024-08-16T10:26:06 | erxes | erxes/erxes | 3,479 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.