text
stringlengths
1
22.8M
```shell Bash history reverse search Aliasing ssh connections Clear the terminal instantly Quick `bash` shortcuts Useful aliasing in bash ```
RIMS may refer to: Institutions Rajendra Institute of Medical Sciences, in Ranchi, Jharkhand, India Rajiv Gandhi Institute of Medical Sciences (disambiguation), various institutes in India Regional Institute of Medical Sciences, a medical college and hospital in Manipur, India Research Institute for Mathematical Sciences, attached to Kyoto University in Japan Rhein-Main International Montessori School, a private school in Friedrichsdorf, Germany Risk and Insurance Management Society, a nonprofit organization dedicated to advancing risk management Rourkela Institute of Management Studies, a business school in Rourkela, Orissa, India Other uses RIMS, abbreviation for Royal Indian Marine Ship Regional Input–Output Modeling System Resonance Isolation Mounting System, the original rim mount for kit drums See also Rim (disambiguation)
```smalltalk using SixLabors.ImageSharp.Memory; using SixLabors.ImageSharp.PixelFormats; namespace SixLabors.ImageSharp.Formats.Tiff.PhotometricInterpretation; /// <summary> /// The base class for photometric interpretation decoders. /// </summary> /// <typeparam name="TPixel">The pixel format.</typeparam> internal abstract class TiffBaseColorDecoder<TPixel> where TPixel : unmanaged, IPixel<TPixel> { /// <summary> /// Decodes source raw pixel data using the current photometric interpretation. /// </summary> /// <param name="data">The buffer to read image data from.</param> /// <param name="pixels">The image buffer to write pixels to.</param> /// <param name="left">The x-coordinate of the left-hand side of the image block.</param> /// <param name="top">The y-coordinate of the top of the image block.</param> /// <param name="width">The width of the image block.</param> /// <param name="height">The height of the image block.</param> public abstract void Decode(ReadOnlySpan<byte> data, Buffer2D<TPixel> pixels, int left, int top, int width, int height); } ```
```xml import type { HtmlProps } from './html-context.shared-runtime' import type { ComponentType, JSX } from 'react' import type { DomainLocale } from '../../server/config' import type { Env } from '@next/env' import type { IncomingMessage, ServerResponse } from 'http' import type { NextRouter } from './router/router' import type { ParsedUrlQuery } from 'querystring' import type { PreviewData } from '../../types' import type { COMPILER_NAMES } from './constants' import type fs from 'fs' export type NextComponentType< Context extends BaseContext = NextPageContext, InitialProps = {}, Props = {}, > = ComponentType<Props> & { /** * Used for initial page load data population. Data returned from `getInitialProps` is serialized when server rendered. * Make sure to return plain `Object` without using `Date`, `Map`, `Set`. * @param context Context of `page` */ getInitialProps?(context: Context): InitialProps | Promise<InitialProps> } export type DocumentType = NextComponentType< DocumentContext, DocumentInitialProps, DocumentProps > export type AppType<P = {}> = NextComponentType< AppContextType, P, AppPropsType<any, P> > export type AppTreeType = ComponentType< AppInitialProps & { [name: string]: any } > /** * Web vitals provided to _app.reportWebVitals by Core Web Vitals plugin developed by Google Chrome team. * path_to_url#integrated-web-vitals-reporting */ export const WEB_VITALS = ['CLS', 'FCP', 'FID', 'INP', 'LCP', 'TTFB'] as const export type NextWebVitalsMetric = { id: string startTime: number value: number attribution?: { [key: string]: unknown } } & ( | { label: 'web-vital' name: (typeof WEB_VITALS)[number] } | { label: 'custom' name: | 'Next.js-hydration' | 'Next.js-route-change-to-render' | 'Next.js-render' } ) export type Enhancer<C> = (Component: C) => C export type ComponentsEnhancer = | { enhanceApp?: Enhancer<AppType> enhanceComponent?: Enhancer<NextComponentType> } | Enhancer<NextComponentType> export type RenderPageResult = { html: string head?: Array<JSX.Element | null> } export type RenderPage = ( options?: ComponentsEnhancer ) => DocumentInitialProps | Promise<DocumentInitialProps> export type BaseContext = { res?: ServerResponse [k: string]: any } export type NEXT_DATA = { props: Record<string, any> page: string query: ParsedUrlQuery buildId: string assetPrefix?: string runtimeConfig?: { [key: string]: any } nextExport?: boolean autoExport?: boolean isFallback?: boolean isExperimentalCompile?: boolean dynamicIds?: (string | number)[] err?: Error & { statusCode?: number source?: typeof COMPILER_NAMES.server | typeof COMPILER_NAMES.edgeServer } gsp?: boolean gssp?: boolean customServer?: boolean gip?: boolean appGip?: boolean locale?: string locales?: string[] defaultLocale?: string domainLocales?: DomainLocale[] scriptLoader?: any[] isPreview?: boolean notFoundSrcPage?: string } /** * `Next` context */ export interface NextPageContext { /** * Error object if encountered during rendering */ err?: (Error & { statusCode?: number }) | null /** * `HTTP` request object. */ req?: IncomingMessage /** * `HTTP` response object. */ res?: ServerResponse /** * Path section of `URL`. */ pathname: string /** * Query string section of `URL` parsed as an object. */ query: ParsedUrlQuery /** * `String` of the actual path including query. */ asPath?: string /** * The currently active locale */ locale?: string /** * All configured locales */ locales?: string[] /** * The configured default locale */ defaultLocale?: string /** * `Component` the tree of the App to use if needing to render separately */ AppTree: AppTreeType } export type AppContextType<Router extends NextRouter = NextRouter> = { Component: NextComponentType<NextPageContext> AppTree: AppTreeType ctx: NextPageContext router: Router } export type AppInitialProps<PageProps = any> = { pageProps: PageProps } export type AppPropsType< Router extends NextRouter = NextRouter, PageProps = {}, > = AppInitialProps<PageProps> & { Component: NextComponentType<NextPageContext, any, any> router: Router __N_SSG?: boolean __N_SSP?: boolean } export type DocumentContext = NextPageContext & { renderPage: RenderPage defaultGetInitialProps( ctx: DocumentContext, options?: { nonce?: string } ): Promise<DocumentInitialProps> } export type DocumentInitialProps = RenderPageResult & { styles?: React.ReactElement[] | Iterable<React.ReactNode> | JSX.Element } export type DocumentProps = DocumentInitialProps & HtmlProps /** * Next `API` route request */ export interface NextApiRequest extends IncomingMessage { /** * Object of `query` values from url */ query: Partial<{ [key: string]: string | string[] }> /** * Object of `cookies` from header */ cookies: Partial<{ [key: string]: string }> body: any env: Env draftMode?: boolean preview?: boolean /** * Preview data set on the request, if any * */ previewData?: PreviewData } /** * Send body of response */ type Send<T> = (body: T) => void /** * Next `API` route response */ export type NextApiResponse<Data = any> = ServerResponse & { /** * Send data `any` data in response */ send: Send<Data> /** * Send data `json` data in response */ json: Send<Data> status: (statusCode: number) => NextApiResponse<Data> redirect(url: string): NextApiResponse<Data> redirect(status: number, url: string): NextApiResponse<Data> /** * Set draft mode */ setDraftMode: (options: { enable: boolean }) => NextApiResponse<Data> /** * Set preview data for Next.js' prerender mode */ setPreviewData: ( data: object | string, options?: { /** * Specifies the number (in seconds) for the preview session to last for. * The given number will be converted to an integer by rounding down. * By default, no maximum age is set and the preview session finishes * when the client shuts down (browser is closed). */ maxAge?: number /** * Specifies the path for the preview session to work under. By default, * the path is considered the "default path", i.e., any pages under "/". */ path?: string } ) => NextApiResponse<Data> /** * Clear preview data for Next.js' prerender mode */ clearPreviewData: (options?: { path?: string }) => NextApiResponse<Data> /** * Revalidate a specific page and regenerate it using On-Demand Incremental * Static Regeneration. * The path should be an actual path, not a rewritten path. E.g. for * "/blog/[slug]" this should be "/blog/post-1". * @link path_to_url#on-demand-revalidation */ revalidate: ( urlPath: string, opts?: { unstable_onlyGenerated?: boolean } ) => Promise<void> } /** * Next `API` route handler */ export type NextApiHandler<T = any> = ( req: NextApiRequest, res: NextApiResponse<T> ) => unknown | Promise<unknown> /** * Utils */ export function execOnce<T extends (...args: any[]) => ReturnType<T>>( fn: T ): T { let used = false let result: ReturnType<T> return ((...args: any[]) => { if (!used) { used = true result = fn(...args) } return result }) as T } // Scheme: path_to_url#section-3.1 // Absolute URL: path_to_url#section-4.3 const ABSOLUTE_URL_REGEX = /^[a-zA-Z][a-zA-Z\d+\-.]*?:/ export const isAbsoluteUrl = (url: string) => ABSOLUTE_URL_REGEX.test(url) export function getLocationOrigin() { const { protocol, hostname, port } = window.location return `${protocol}//${hostname}${port ? ':' + port : ''}` } export function getURL() { const { href } = window.location const origin = getLocationOrigin() return href.substring(origin.length) } export function getDisplayName<P>(Component: ComponentType<P>) { return typeof Component === 'string' ? Component : Component.displayName || Component.name || 'Unknown' } export function isResSent(res: ServerResponse) { return res.finished || res.headersSent } export function normalizeRepeatedSlashes(url: string) { const urlParts = url.split('?') const urlNoQuery = urlParts[0] return ( urlNoQuery // first we replace any non-encoded backslashes with forward // then normalize repeated forward slashes .replace(/\\/g, '/') .replace(/\/\/+/g, '/') + (urlParts[1] ? `?${urlParts.slice(1).join('?')}` : '') ) } export async function loadGetInitialProps< C extends BaseContext, IP = {}, P = {}, >(App: NextComponentType<C, IP, P>, ctx: C): Promise<IP> { if (process.env.NODE_ENV !== 'production') { if (App.prototype?.getInitialProps) { const message = `"${getDisplayName( App )}.getInitialProps()" is defined as an instance method - visit path_to_url for more information.` throw new Error(message) } } // when called from _app `ctx` is nested in `ctx` const res = ctx.res || (ctx.ctx && ctx.ctx.res) if (!App.getInitialProps) { if (ctx.ctx && ctx.Component) { // @ts-ignore pageProps default return { pageProps: await loadGetInitialProps(ctx.Component, ctx.ctx), } } return {} as IP } const props = await App.getInitialProps(ctx) if (res && isResSent(res)) { return props } if (!props) { const message = `"${getDisplayName( App )}.getInitialProps()" should resolve to an object. But found "${props}" instead.` throw new Error(message) } if (process.env.NODE_ENV !== 'production') { if (Object.keys(props).length === 0 && !ctx.ctx) { console.warn( `${getDisplayName( App )} returned an empty object from \`getInitialProps\`. This de-optimizes and prevents automatic static optimization. path_to_url` ) } } return props } export const SP = typeof performance !== 'undefined' export const ST = SP && (['mark', 'measure', 'getEntriesByName'] as const).every( (method) => typeof performance[method] === 'function' ) export class DecodeError extends Error {} export class NormalizeError extends Error {} export class PageNotFoundError extends Error { code: string constructor(page: string) { super() this.code = 'ENOENT' this.name = 'PageNotFoundError' this.message = `Cannot find module for page: ${page}` } } export class MissingStaticPage extends Error { constructor(page: string, message: string) { super() this.message = `Failed to load static file for page: ${page} ${message}` } } export class MiddlewareNotFoundError extends Error { code: string constructor() { super() this.code = 'ENOENT' this.message = `Cannot find the middleware module` } } export interface CacheFs { existsSync: typeof fs.existsSync readFile: typeof fs.promises.readFile readFileSync: typeof fs.readFileSync writeFile(f: string, d: any): Promise<void> mkdir(dir: string): Promise<void | string> stat(f: string): Promise<{ mtime: Date }> } export function stringifyError(error: Error) { return JSON.stringify({ message: error.message, stack: error.stack }) } ```
```python #!/usr/local/bin/python3 print("send first ping6 fragment that ends behind ipv6 max packet size") # IPV6_MAXPACKET | # |----| # |--------| # |----| import os from addr import * from scapy.all import * pid=os.getpid() eid=pid & 0xffff payload=b"ABCDEFGHIJKLMNOP" packet=IPv6(src=LOCAL_ADDR6, dst=REMOTE_ADDR6)/ \ ICMPv6EchoRequest(id=eid, data=payload) frag=[] fid=pid & 0xffffffff frag.append(IPv6ExtHdrFragment(nh=58, id=fid, offset=8191)/bytes(packet)[56:64]) frag.append(IPv6ExtHdrFragment(nh=58, id=fid, m=1)/bytes(packet)[40:56]) frag.append(IPv6ExtHdrFragment(nh=58, id=fid, offset=2)/bytes(packet)[56:64]) eth=[] for f in frag: pkt=IPv6(src=LOCAL_ADDR6, dst=REMOTE_ADDR6)/f eth.append(Ether(src=LOCAL_MAC, dst=REMOTE_MAC)/pkt) if os.fork() == 0: time.sleep(1) sendp(eth, iface=LOCAL_IF) os._exit(0) ans=sniff(iface=LOCAL_IF, timeout=3, filter= "ip6 and src "+REMOTE_ADDR6+" and dst "+LOCAL_ADDR6+" and icmp6") for a in ans: if a and a.type == ETH_P_IPV6 and \ ipv6nh[a.payload.nh] == 'ICMPv6' and \ icmp6types[a.payload.payload.type] == 'Echo Reply': id=a.payload.payload.id print("id=%#x" % (id)) if id != eid: print("WRONG ECHO REPLY ID") exit(2) data=a.payload.payload.data print("payload=%s" % (data)) if data == payload: exit(0) print("PAYLOAD!=%s" % (payload)) exit(1) print("NO ECHO REPLY") exit(2) ```
```java public class LoopSum { private static int sum(int[] arr) { int total = 0; for (int x = 0; x < arr.length; x++) { total += arr[x]; } return total; } public static void main(String[] args) { System.out.println(sum(new int[]{1, 2, 3, 4})); // 10 } } ```
My Fair Brady is an American reality television series on VH1 that followed Christopher Knight, who played Peter Brady on The Brady Bunch, and Adrianne Curry, who won the first season of America's Next Top Model, a year after they met and fell in love on the reality show The Surreal Life. The show appeared to have originated from a Season 4 episode of The Surreal Life, during which each cast member pitched a TV show idea to network executives. Adrianne's concept, which she called Beauty and the Brady, was a show about her and Knight's then-fledgling romance, in which she would attempt to convince Knight to marry her. Within that episode, Adrianne's idea was passed over in favor of that of castmate Da Brat, but the latter's show never came to fruition. Season 1 The first season was filmed after Surreal Life Season 4 finished. In the last episode of that season, Knight had tentatively decided to start a relationship with Curry; nevertheless, during almost all the episodes he refused to formalize a relationship of any kind with her, acknowledging the difference in their ages. Some memorable moments are Florence Henderson visiting the couple, Curry partying with her best friend, and wearing a wedding dress without Knight's knowledge, who just watched her looking surprised. At the end of the first season, Christopher proposed to Adrianne. Season 2 Entitled My Fair Brady: We're Getting Married!, the second season covered Christopher Knight and Adrianne Curry's preparations for the wedding. My Fair Brady: We're Getting Married! premiered on-air May 28, 2006. The wedding episode aired on July 23, 2006. Some of the notable parts of the second season are Adrianne meeting her father-in-law, Christopher meeting Adrianne's divorced parents, Knight getting drunk in front of Adrianne's mother, Christopher and Adrianne's Bachelor's farewell party, and an incident that almost causes the cancellation of the wedding. Season 3 Titled My Fair Brady... Maybe Baby, the third season began on January 20, 2008. In the second episode, after Adrianne gave Chris a birthday present of nude photos of her with a female friend, he came to think Adrianne was a lesbian and left her, asking for a separation. The two were reconciled in the next episode. The season focused on Adrianne's plans to have plastic surgery to make her breasts symmetrical and Christopher's cross-interests decision that he wants to start a family with her. The couple argue about this and even close friends and family members frankly advise them not to have any kids at present. Adrianne does have the surgery and it goes well. But in the season finale, the couple go to Hawaii for a second honeymoon and Adrianne decides to throw out her birth control pills in front of Christopher. DVD releases The first two seasons have been released on DVD in Australia by Shock Records. References External links official website (archive link) My Fair Brady: We're Getting Married! on VH1.com VH1 original programming 2005 American television series debuts 2008 American television series endings 2000s American reality television series American dating and relationship reality television series American television spin-offs Reality television spin-offs The Brady Bunch The Surreal Life spinoffs English-language television shows Television series by Endemol Television series by Lionsgate Television
```css Use `box-sizing` to define an element's `width` and `height` properties Hide the scrollbar in webkit browser Determine the opacity of background-colors using the RGBA declaration Difference between `initial` and `inherit` Select items using negative `nth-child` ```
List of presidents of the American Medical Association (founded 1847): Nathaniel Chapman, 1847–48 Alexander Hodgdon Stevens, 1848–49 John Collins Warren, 1849–50 Reuben D. Mussey, 1850–51 James Moultrie, 1851–52 Beverly R. Wellford, 1852–53 Jonathan Knight, 1853–54 Charles A. Pope, 1854–55 George Bacon Wood, 1855–56 Zina Pitcher, 1856–57 Paul F. Eve, 1857–58 Harvey Lindsly, 1858–59 Henry Miller, 1859–60 Eli Ives, 1860–61 Alden March, 1863–64 Nathan Smith Davis, 1864–66 David Humphreys Storer, 1866–67 Henry F. Askew, 1867–68 Samuel D. Gross, 1868–69 William O. Baldwin, 1869–70 George Mendenhall, 1870–71 Alfred StillΓ©, 1871–72 D. W. Yandell, 1872–73 Thomas M. Logan, 1873–74 Joseph M. Toner, 1874–75 W. K. Bowling, 1875–76 J. Marion Sims, 1876–77 Henry I. Bowditch, 1877–78 T. G. Richardson, 1878–79 Theophilus Parvin, 1879–80 Lewis Sayre, 1880–81 John T. Hodgen, 1881–82 Joseph Janvier Woodward, 1882–83 John Light Atlee, 1883–84 Austin Flint I, 1884–85 H. F. Campbell, 1885–86 William Brodie, 1886–87 E. H. Gregory, 1887–88 Y. P. Garnett, 1888–89 W. W. Dawson, 1889–80 E. M. Moore, 1890–91 W. T. Briggs, 1891–92 H. O. Marcy, 1892–93 Hunter McGuire, 1893–94 James F. Hibberd, 1894–95 Donald MacLean, 1885–96 R. Beverly Cole, 1896–97 Nicholas Senn, 1897–98 George Miller Sternberg, 1898–99 J. M. Mathews, 1899–1900 W. W. Keen, 1900–01 C. A. L. Reed, 1901–02 John Allan Wyeth, 1902–03 Frank Billings, 1903–04 John Herr Musser, 1904–05 L. S. McMurtry, 1905–06 William James Mayo, 1906–07 Joseph D. Bryant, 1907–08 H. L. Burrell, 1908–09 William C. Gorgas, 1909–10 William H. Welch, 1910–11 John Benjamin Murphy, 1911–12 Abraham Jacobi, 1912–13 John A. Witherspoon, 1913–14 Victor C. Vaughan, 1914–15 William L. Rodman, 1915 Albert Vander Veer (vice president), 1915–16 Rupert Blue, 1916–17 Charles Horace Mayo, 1917–18 Arthur D. Bevan, 1918–19 Alexander Lambert, 1919–20 William Clarence Braisted, 1920–21 Hubert Work, 1921–22 George de Schweinitz, 1922–23 Ray Lyman Wilbur, 1923–24 William Allen Pusey, 1924–25 William D. Haggard, 1925–26 Wendell C. Phillips, 1926–27 Jabez N. Jackson, 1927–28 William W. Thayer, 1928–29 Malcolm L. Harris, 1929–30 William Gerry Morgan, 1930–31 E. Starr Judd, 1931–32 Edward H. Cary, 1932–33 Dean D. Lewis, 1933–34 Walter L. Bierring, 1934–35 James S. McLester, 1935–36 James Tate Mason, 1936 Charles Gordon Heyd, 1936–37 J. H. J. Upham, 1937–38 Irvin Abell, 1938–39 Rock Sleyster, 1939–40 Nathan B. Van Etten, 1940–41 Frank H. Lahey, 1941–42 Fred W. Rankin, 1942–43 James E. Paullin, 1943–44 Herman L. Kretschmer, 1944–45 Roger I. Lee, 1945–46 H. H. Shoulders, 1946–47 Edward L. Bortz, 1947–48 Roscoe L. Sensenich, 1948–49 Ernest E. Irons, 1949–50 Elmer L. Henderson, 1950–51 John W. Cline, 1951–52 Louis H. Bauer, 1952–53 Edward J. McCormick, 1953–54 Walter B. Martin, 1954–55 Elmer Hess, 1955–56 Dwight H. Murray, 1956–57 David B. Allman, 1957–58 Gunnar Gundersen, 1958–59 Louis M. Orr, 1959–60 E. Vincent Askey, 1960–61 Leonard W. Larson, 1961–62 George M. Fister, 1962–63 Edward R. Annis, 1963–64 Norman A. Welch, 1964 Donovan F. Ward (vice president), 1964–65 James Z. Appel, 1965–66 Charles L. Hudson, 1966–67 Milford O. Rouse, 1967–68 Dwight Locke Wilbur, 1968–69 Gerald D. Dorman, 1969–70 W. C Bornemeier, 1970–71 Wesley W. Hall, 1971–72 C. A. Hoffman, 1972–72 Russell B. Roth, 1973–74 Malcolm C. Todd, 1974–75 Max H. Parrott, 1975–76 Richard E. Palmer, 1976–77 John H. Budd, 1977–78 Thomas E. Nesbitt, 1978–79 Hoyt D. Gardner, 1979–80 Robert B. Hunter, 1980–81 Daniel T. Cloud, 1981–82 William Y. Rial, 1982–83 Frank J. Jirka Jr., 1983–84 Joseph F. Boyle, 1984–85 Harrison L. Rogers, Jr., 1985–86 John J. Coury, Jr., 1986–87 William S. Hotchkiss, 1987–88 James E. Davis, 1988–89 Alan R. Nelson, 1989–90 C. John Tupper, 1990–91 John J. Ring, 1991–92 John L. Clowe, 1992–93 Joseph T. Painter, 1993–94 Robert E. McAfee, 1994–95 Lonnie R. Bristow, 1995–96 Daniel H. Johnson, Jr., 1996–97 Percy Wootton, 1997–98 Nancy Dickey, 1998–99 Thomas Reardon, 1999–2000 Randolph D. Smoak, Jr., 2000–01 Richard F. Corlin, 2001–02 Yank D. Coble, Jr., 2002–03 Donald J. Palmisano, 2003–04 John C. Nelson, 2004–05 J. Edward Hill, 2005–06 William G. Plested, III, 2006–07 Ronald M. Davis, 2007–08 Nancy H. Nielsen, 2008–09 J. James Rohack, 2009–10 Cecil B. Wilson, 2010–11 Peter W. Carmel, 2011–12 Jeremy A. Lazarus, 2012–13 Ardis Dee Hoven, 2013–14 Robert M. Wah, 2014–15 Steven J. Stack, 2015–16 Andrew W. Gurman, 2016–17 David O. Barbe, 2017–18 Barbara L. McAneny, 2018–2019 Patrice Harris, 2019–2020 Susan R. Bailey, 2020-2021 Gerald E. Harmon, 2021-2022 Jack Resneck, Jr., 2022-2023 Jesse Ehrenfeld, 2023-present References American Medical Association Presidents of the American Medical Association Presidents of the American Medical Association
Epideira schoutanica is a species of sea snail, a marine gastropod mollusk in the family Horaiclavidae. Distribution This marine species is endemic to Australia and occurs off South Australia, Tasmania and Victoria. References Further reading May, W.L. 1910. New marine Mollusca. Proceedings of the Royal Society of Tasmania 48: 380–398 Hedley, C. 1922. A revision of the Australian Turridae. Records of the Australian Museum 13(6): 213–359, pls 42–56 May, W.L. 1923. An Illustrated Index of Tasmanian Shells: with 47 plates and 1052 species. Hobart : Government Printer 100 pp. Wilson, B. 1994. Australian Marine Shells. Prosobranch Gastropods. Kallaroo, WA : Odyssey Publishing Vol. 2 370 pp. Simon J. Grove, 2006 A Systematic List of the Marine Molluscs of Tasmania, Queen Victoria Museum and Art Gallery. External links Tucker, J.K. 2004 Catalog of recent and fossil turrids (Mollusca: Gastropoda). Zootaxa 682:1–1295 schoutanica Gastropods of Australia Gastropods described in 1911
```php <?php /** * Back-compat placeholder for the base embed template * * @package WordPress * @subpackage oEmbed * @since 4.4.0 * @deprecated 4.5.0 Moved to wp-includes/theme-compat/embed.php */ _deprecated_file( basename( __FILE__ ), '4.5.0', WPINC . '/theme-compat/embed.php' ); require ABSPATH . WPINC . '/theme-compat/embed.php'; ```
The 1941 Rollins Tars football team was an American football team that represented Rollins College as a member of the Southern Intercollegiate Athletic Association (SIAA) during the 1941 college football season. In their 13th season under head coach Jack McDowall, the Tars compiled a 5–2–1 record (2–2 against SIAA opponents), shut out five of eight opponents, and outscored all opponents by a total of 164 to 41. Rollins guard Frank Grundler was selected by the Associated Press as a first-team player on the 1941 All-Florida football team. End Scott and back Ray received second-team honors. Rollins was ranked at No. 83 (out of 681 teams) in the final rankings under the Litkenhous Difference by Score System for 1941. The team played its home games at Greater Orlando Stadium in Orlando, Florida. Schedule References Rollins Rollins Tars football seasons Rollins Tars football
Follansbee is a surname. Notable people with the surname include: Clyde Follansbee (1902–1948), American politician Edward Follansbee Noyes (1832–1890), American politician Elizabeth Follansbee (1839–1917), American physician and professor
Britain in a Day is a crowdsourced documentary film that consists of a series clips of footage shot by members of the public in Britain on 12 November 2011. Scott Free Films and the BBC produced the film, which was made in conjunction with YouTube. The film is 90 minutes long and includes shots from 314 different perspectives out of the 11,526 submitted onto the video sharing website YouTube. The film was premiered in cinemas and broadcast on BBC2 on 11 June 2012 for a general audience, as part of the Cultural Olympiad. Billed as a follow-up to the 2011 documentary film Life in a Day, the film was similarly executive produced by Ridley Scott and Kevin Macdonald. The film was directed by Morgan Matthews. Cast & Crew Saranne Bensusan Jonathan Berry Lee John Blackmore Jack Keane Lawrence Mallinson Robin Whittaker Graham Sutton Andrea Dalla Costa Jonathan Proud Jack Arbuthnott Billy Dosanjh Ann Lynch Martin Phipps Peter Christelis Yefri ZuΓ±iga Chris Hunter Ben Mills Hugo Adams Danny Freemantle Glenn Freemantle Eilam Hoffman Emilie O'Connor Adam Scrivener Ian Tapp Craig Brewin Archie Campbell Bec Cranswick Raquel Alvarez Alex Ash Corinne Borgeaud Sean James Thomas Carrell Matt Curtis Jemma Gander Caroline Gerard Iain Griffiths Ross Howieson Olivia Humphreys Amy Jackson Joseph Matthews Helen Mullane Mike Nicholls Rebecca Pearson Claire Salter Johannes Schaff Daniel Thomas Richard Thorburn Elliot Weaver Zander Weaver Chloe White Callum McPherson Babs van Gilst Ernest Hope Stephenson See also Time capsule References 2012 films British documentary films Crowdsourcing 2012 Cultural Olympiad 2010s English-language films 2010s British films
Matt Scott (born March 27, 1985) is an American wheelchair basketball player. Biography Matt Scott was born in Detroit, Michigan, where he began playing wheelchair basketball with the Sterling Heights Challengers in the NWBA Junior Division. As a high schooler, he was on the gold medal-winning US team at the 2007 Parapan American Games in Brazil, where he was coached by Tracy Chynoweth, head coach of University of Wisconsin-Whitewater's college wheelchair basketball team. Scott played five years for the UW-Whitewater Warhawks in the NWBA College Division, winning championships three out of five years in 2004, 2005, and 2007. During his time at Whitewater, he competed in the 2004 Summer Paralympics and 2008 Summer Paralympics. He also won a silver medal at IWBF World Championship in 2006 in Amsterdam. He was nominated for the Best Male Athlete with a Disability ESPY Award in 2008. After college, Scott played professionally for Galatasaray S.K. in Istanbul, Turkey for six seasons from 2008 to 2014. In 2012, Matt Scott helped Team USA to a bronze medal in their 61–46 victory over Great Britain. Scott continued his professional career with Comune Di Porto Torres in Italy for two seasons from 2014 to 2016, during which time Scott's game film of him making multiple three-point shots in front of a crowded arena went viral, prompting the sports blog The Undefeated to publish a write-up titled "Meet the Steph Curry of Wheelchair Basketball." Whistle Sports also shared the game film, which has garnered over 2.5M views on Facebook. Scott left Italy to play three seasons with the RSB Thuringia Bulls in Exleben, Germany. He competed in the 2016 Summer Paralympics, where Team USA defeated Spain 68–52 to win Team USA's first gold medal since 1988. Scott then returned to RSB Thuringia where he and Team USA teammate Jake Williams helped the Bulls win the IWBF Champions Cup in 2018 and 2019. In November 2018, Scott fell ill and contracted sepsis, leading to months-long hospitalization during which time he went into a coma for two weeks. He later recovered and returned to playing, but credits the episode for putting his life and basketball career in perspective. During the COVID-19 pandemic, when it was uncertain whether the German RBBL would execute a season, Scott signed with CP Mideba in Badajoz, Spain. In 2021, he competed in the Tokyo Paralympic Games and helped secure a repeat gold medal for the United States in a 64–60 victory over Japan. Matt now resides in the Bay Area. Scott is one of the most sought-after wheelchair basketball players by well-known brands. He was the first US Paralympian to star in a Nike, Inc. advertisement, "No Excuses", where the viewer is made unaware of his disability until the camera zooms out to show his basketball wheelchair at the end. Scott has also been a spokesperson for Travel Wisconsin, participated in a digital marketing campaign for the Apple Watch, and has been an athlete ambassador for Ralph Lauren. References External links 1985 births Living people American men's wheelchair basketball players Galatasaray S.K. (wheelchair basketball) players People with spina bifida Basketball players from Detroit University of Wisconsin–Whitewater alumni Paralympic wheelchair basketball players for the United States Paralympic gold medalists for the United States Paralympic bronze medalists for the United States Paralympic medalists in wheelchair basketball Wheelchair basketball players at the 2004 Summer Paralympics Wheelchair basketball players at the 2008 Summer Paralympics Wheelchair basketball players at the 2012 Summer Paralympics Wheelchair basketball players at the 2016 Summer Paralympics Wheelchair basketball players at the 2020 Summer Paralympics Medalists at the 2012 Summer Paralympics Medalists at the 2016 Summer Paralympics Medalists at the 2020 Summer Paralympics 21st-century American sportsmen
```yaml --- Resources: RedshiftCluster: Type: AWS::Redshift::Cluster Properties: ClusterType: single-node DBName: foobar MasterUserPassword: '{{resolve:secretsmanager:/redshift/cluster/masteruserpassword:SecretString:password}}' MasterUsername: admin NodeType: dc2.large ```
```javascript CKEDITOR.plugins.setLang("save","lv",{toolbar:"Saglabt"}); ```
```php <?php /* * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the */ namespace Google\Service\AndroidPublisher; class ExternalSubscription extends \Google\Model { /** * @var string */ public $subscriptionType; /** * @param string */ public function setSubscriptionType($subscriptionType) { $this->subscriptionType = $subscriptionType; } /** * @return string */ public function getSubscriptionType() { return $this->subscriptionType; } } // Adding a class alias for backwards compatibility with the previous class name. class_alias(ExternalSubscription::class, 'Google_Service_AndroidPublisher_ExternalSubscription'); ```
These are the official results of the Men's 200 metres event at the 2001 IAAF World Championships in Edmonton, Canada. There were a total number of 52 participating athletes, with seven qualifying heats, four quarter-finals, two semi-finals and the final held on Thursday 9 August 2001 at 21:40h. Medalists Records Final Semi-final Held on Wednesday 8 August 2001 Quarter-finals Held on Tuesday 7 August 2001 Heats Held on Tuesday 7 August 2001 References Finals Results Semi-finals results Quarter-finals results Heats results H 200 metres at the World Athletics Championships
Nelson Kyeremeh is a Ghanaian politician and an administrator. He has been the member of parliament for the Berekum East Constituency in the Bono Region since 7 January 2021. Early life and education Kyeremeh was born on 27 March 1985 and hails from Berekum in the Bono Region of Ghana. He obtained his basic education certificate in 2000 and his secondary school certificate in 2003. He was awarded his Bachelor of Science degree in Management Studies (Administration/Management) in 2012 succeeding a Diploma in Basic Education in 2009. Career Kyeremeh was an Assistant Headteacher with the Ghana Education Service and subsequently an Administrator at Agyengoplus Transport and Logistical Service Limited. Politics Kyeremeh won to contest as the NPP Parliamentary candidate for the Berekum East Constituency against the incumbent MP Kwabena Twum-Nuamah. He further won the 2020 general elections by 27,731 votes making 61.3% of total votes cast whilst the NDC parliamentary candidate Simon Ampaabeng Kyeremeh had 17,305 votes making 38.2% of the total votes cast and an Independent candidate Francis Manu-Gyan had 217 votes making 0.5% of the total votes cast. Committees Kyeremeh is a member of Members Holding Offices of Profit Committee and a member of Works and Housing Committee. Personal life Kyeremeh is a Christian. Philanthropy In November 2021, he presented some educational materials to about 41 public schools. References Ghanaian MPs 2021–2025 Living people New Patriotic Party politicians 1985 births
The Kiesinger cabinet was the 8th Government of the Federal Republic of Germany from 1 December 1966 to 22 October 1969 throughout the 5th legislative session of the Bundestag. It was led by the Christian Democratic Union's Kurt Georg Kiesinger, a former Nazi Party member. The Bundestag that had been chosen in the September 1965 election initially resulted in the Cabinet Erhard II, but when the Free Democratic Party resigned from the government, that led to the formation of the new cabinet. The cabinet was supported by the first grand coalition between the Christian Democratic Union (CDU), Christian Social Union of Bavaria (CSU) and the Social Democratic Party (SPD). The Vice-Chancellor was Willy Brandt (SPD) Composition |} References Kiesinger 1966 establishments in West Germany 1969 disestablishments in West Germany Cabinets established in 1966 Cabinets disestablished in 1969 Coalition governments of Germany Kurt Georg Kiesinger
```nu # Run nnn with dynamic changing directory to the environment. # # $env.XDG_CONFIG_HOME sets the home folder for `nnn` folder and its $env.NNN_TMPFILE variable. # See manual NNN(1) for more information. # # Import module using `use quitcd.nu n` to have `n` command in your context. export def --env n [ ...args : string # Extra flags to launch nnn with. --selective = false # Change directory only when exiting via ^G. ] -> nothing { # The behaviour is set to cd on quit (nnn checks if $env.NNN_TMPFILE is set). # Hard-coded to its respective behaviour in `nnn` source-code. let nnn_tmpfile = $env | default '~/.config/' 'XDG_CONFIG_HOME' | get 'XDG_CONFIG_HOME' | path join 'nnn/.lastd' | path expand # Launch nnn. Add desired flags after `^nnn`, ex: `^nnn -eda ...$args`, # or make an alias `alias n = n -eda`. if $selective { ^nnn ...$args } else { NNN_TMPFILE=$nnn_tmpfile ^nnn ...$args } if ($nnn_tmpfile | path exists) { # Remove <cd '> from the first part of the string and the last single quote <'>. # Fix post-processing of nnn's given path that escapes its single quotes with POSIX syntax. let path = open $nnn_tmpfile | str replace --all --regex `^cd '|'$` `` | str replace --all `'\''` `'` ^rm -- $nnn_tmpfile cd $path } } ```
```c++ #include "UIManager.h" #include "Layout/Children.h" #include "EngineUtils.h" #include "Components/Widget.h" #include "Blueprint/UserWidget.h" #include "Blueprint/WidgetTree.h" #include "Engine/UserInterfaceSettings.h" #include "Engine.h" #include "Common/Log.h" #include "XmlParser.h" #include "Common/GXmlJsonTools.h" #include "Engine/UserInterfaceSettings.h" #include "TextBlock.h" #include "RichTextBlock.h" #include "MultiLineEditableTextBox.h" #include "Handler/CommandHandler.h" #include "MultiLineEditableText.h" #include <ctime> #ifdef __ANDROID__ #include "Android/AndroidWindow.h" #endif namespace WeTestU3DAutomation { FXmlNode* TransformUmg2XmlElement(UWidget* Widget, FXmlNode* Parent) { FXmlNode* WidgetXmlNode = AddFXmlNode(Parent, "UWidget", FString()); TArray<FXmlAttribute>& Attributes = const_cast<TArray<FXmlAttribute>&>(WidgetXmlNode->GetAttributes()); UClass* WidgetClass = Widget->GetClass(); Attributes.Add(FXmlAttribute("name", Widget->GetName())); Attributes.Add(FXmlAttribute("components", WidgetClass->GetName())); Attributes.Add(FXmlAttribute("id", FString("0"))); const UTextBlock* TextBlock = Cast<UTextBlock>(Widget); if (TextBlock != nullptr) { Attributes.Add(FXmlAttribute("txt", TextBlock->GetText().ToString())); } return WidgetXmlNode; } void ForWidgetAndChildren(UWidget* Widget, FXmlNode* Parent) { if (Widget == nullptr || Parent == nullptr || !Widget->IsVisible()) { return; } FXmlNode* WidgetXmlNode = TransformUmg2XmlElement(Widget, Parent); // Search for any named slot with content that we need to dive into. if (INamedSlotInterface* NamedSlotHost = Cast<INamedSlotInterface>(Widget)) { TArray<FName> SlotNames; NamedSlotHost->GetSlotNames(SlotNames); for (FName SlotName : SlotNames) { if (UWidget* SlotContent = NamedSlotHost->GetContentForSlot(SlotName)) { ForWidgetAndChildren(SlotContent, WidgetXmlNode); } } } // Search standard children. if (UPanelWidget* PanelParent = Cast<UPanelWidget>(Widget)) { for (int32 ChildIndex = 0; ChildIndex < PanelParent->GetChildrenCount(); ChildIndex++) { if (UWidget* ChildWidget = PanelParent->GetChildAt(ChildIndex)) { ForWidgetAndChildren(ChildWidget, WidgetXmlNode); } } } } FString GetCurrentWidgetTree() { TSharedPtr<FXmlFile> xml = CreateFXmlFile(); FString XmlStr; FXmlNode* RootNode = xml->GetRootNode(); for (TObjectIterator<UUserWidget> Itr; Itr; ++Itr) { UUserWidget* UserWidget = *Itr; if (UserWidget == nullptr || !UserWidget->GetIsVisible() || UserWidget->WidgetTree == nullptr) { UE_LOG(GALog, Log, TEXT("UUserWidget Iterator get a null(unvisible) UUserWidget")); continue; } ForWidgetAndChildren(UserWidget->WidgetTree->RootWidget, RootNode); } WriteNodeHierarchy(*RootNode, FString(), XmlStr); return MoveTemp(XmlStr); } const UWidget* FindUWidgetObject(const FString& name) { for (TObjectIterator<UUserWidget> Itr; Itr; ++Itr) { UUserWidget* UserWidget = *Itr; if (UserWidget == nullptr || !UserWidget->GetIsVisible() || UserWidget->WidgetTree == nullptr) { UE_LOG(GALog, Log, TEXT("UUserWidget Iterator get a null(unvisible) UUserWidget")); continue; } UWidget* Widget = UserWidget->GetWidgetFromName(FName(*name)); if (Widget != nullptr) { return Widget; } } return nullptr; } void GetElementBound(const FString& name, FBoundInfo& BoundInfo) { const UWidget* Widget = FindUWidgetObject(name); } bool GetCurrentLevelName(FString& LevelName) { for (TObjectIterator<UUserWidget> Itr; Itr; ++Itr) { UUserWidget* UserWidget = *Itr; if (UserWidget == nullptr || !UserWidget->GetIsVisible() || UserWidget->WidgetTree == nullptr) { continue; } UWorld* World = Itr->GetWorld(); if (World != nullptr) { LevelName = World->GetMapName(); return true; } } return false; } FString GetUWidgetLabelText(const UWidget* Widget) { if (Widget == nullptr) { return FString(); } const UMultiLineEditableText* MultiLineEditableText = Cast<UMultiLineEditableText>(Widget); if (MultiLineEditableText != nullptr) { return MultiLineEditableText->GetText().ToString(); } const UTextBlock* TextBlock = Cast<UTextBlock>(Widget); if (TextBlock != nullptr) { return TextBlock->GetText().ToString(); } const UMultiLineEditableTextBox* MultiLineEditableTextBox = Cast<UMultiLineEditableTextBox>(Widget); if (MultiLineEditableTextBox != nullptr) { return MultiLineEditableTextBox->GetText().ToString(); } return FString(); } int32 FUWidgetHelper::SurfaceViewWidth = 0; int32 FUWidgetHelper::SurfaceViewHeight = 0; float FUWidgetHelper::WidthScale = -1.0f; float FUWidgetHelper::HeightScale = -1.0f; float FUWidgetHelper::ViewportScale = -1.0f; bool FUWidgetHelper::Initialize() { if (!CheckGEngine()) { UE_LOG(GALog, Error, TEXT("FUWidgetHelper Initialize failed")); Inited = false; return false; } #ifdef __ANDROID__ bool AndroidInitResult = InitScaleByAndroid(); if (!AndroidInitResult) { InitViewPortScale(); } #else //Not Anroid,use general,DPI method. InitViewPortScale(); #endif Inited = true; return true; } bool FUWidgetHelper::InitViewPortScale() { if (!CheckGEngine()) { UE_LOG(GALog, Error, TEXT("FUWidgetHelper Initialize failed")); Inited = false; return false; } const FVector2D ViewportSize = FVector2D(GEngine->GameViewport->Viewport->GetSizeXY()); const UUserInterfaceSettings* setting = GetDefault<UUserInterfaceSettings>(UUserInterfaceSettings::StaticClass()); if (setting != nullptr) { ViewportScale = setting->GetDPIScaleBasedOnSize(FIntPoint(ViewportSize.X, ViewportSize.Y)); if (ViewportScale <= 0.0) { UE_LOG(GALog, Error, TEXT("ViewportScale = %f,invaild"), ViewportScale); Inited = false; return false; } WidthScale = ViewportScale; HeightScale = ViewportScale; SurfaceViewWidth = GSystemResolution.ResX / ViewportScale; SurfaceViewHeight = GSystemResolution.ResY / ViewportScale; UE_LOG(GALog, Log, TEXT("Screen(GSystemResolution) with scale %f, size width= %f,height=%f"), ViewportScale, GSystemResolution.ResX / ViewportScale, GSystemResolution.ResY / ViewportScale); } return true; } bool FUWidgetHelper::InitScaleByAndroid() { #ifdef __ANDROID__ if (!CheckGEngine()) { UE_LOG(GALog, Error, TEXT("FUWidgetHelper Initialize failed")); Inited = false; return false; } const FVector2D ViewportSize = FVector2D(GEngine->GameViewport->Viewport->GetSizeXY()); if (SurfaceViewWidth != 0.0f&&SurfaceViewHeight != 0.0f) { return true; } void* NativeWindow = FAndroidWindow::GetHardwareWindow(); FAndroidWindow::CalculateSurfaceSize(NativeWindow, SurfaceViewWidth, SurfaceViewHeight); if (SurfaceViewWidth == 0.0f) { UE_LOG(GALog, Error, TEXT("SurfaceWidth error = 0.0 ")); SurfaceViewWidth = ViewportSize.X; } if (SurfaceViewHeight == 0.0f) { UE_LOG(GALog, Error, TEXT("SurfaceViewHeight error = 0.0 ")); SurfaceViewHeight = ViewportSize.Y; } WidthScale = ViewportSize.X / SurfaceViewWidth; HeightScale = ViewportSize.Y / SurfaceViewHeight; UE_LOG(LogTemp, Log, TEXT("Surfaceview WidthScale=%f ,HeightScale=%f,SurfaceViewWidth = %d,SurfaceViewHeight=%d"), WidthScale, HeightScale, SurfaceViewWidth, SurfaceViewHeight); return true; #else return false; #endif } bool FUWidgetHelper::CheckGEngine() { if (GEngine == nullptr || GEngine->GameViewport == nullptr || GEngine->GameViewport->Viewport == nullptr) { UE_LOG(GALog, Error, TEXT("Global GEngine(GameViewPort) is null")); return false; } return true; } FVector FUWidgetHelper::GetMobileinfo() { FVector vector; vector.X = ViewportScale; vector.Y = SurfaceViewWidth; vector.Z = SurfaceViewHeight; return vector; } bool FUWidgetHelper::GetElementBound(const FString& name, FBoundInfo& BoundInfo) { if (!Inited&&Initialize()) { UE_LOG(GALog, Error, TEXT("Mobile Screen size get error")); BoundInfo.existed = false; BoundInfo.instance = -1; BoundInfo.visible = false; return false; } const UWidget* WidgetPtr = FindUWidgetObject(name); if (WidgetPtr == nullptr || !WidgetPtr->IsVisible()) { UE_LOG(GALog, Log, TEXT("UObject %s can't find"), *name); BoundInfo.existed = false; BoundInfo.instance = -1; BoundInfo.visible = false; return true; } const FGeometry geometry = WidgetPtr->GetCachedGeometry(); FVector2D Position = geometry.GetAbsolutePosition(); FVector2D Size = geometry.GetAbsoluteSize(); BoundInfo.x = Position.X / WidthScale; BoundInfo.y = Position.Y / HeightScale; BoundInfo.width = Size.X / WidthScale; BoundInfo.height = Size.Y / HeightScale; return true; } bool FUWidgetHelper::PositionInRect(const FGeometry& geometry, float x, float y) { FVector2D Position = geometry.GetAbsolutePosition(); FVector2D Size = geometry.GetAbsoluteSize(); if (x >= Position.X&&y >= Position.Y&&x <= (Position.X + Size.X) && y <= (Position.Y + Size.Y)) { return true; } return false; } const UWidget* FUWidgetHelper::FindUWidgetObjectByPos(float x, float y) { if (!Inited&&Initialize()) { UE_LOG(GALog, Error, TEXT("Mobile Screen size get error")); return nullptr; } float GeometryX = x*WidthScale; float GeometryY = y*HeightScale; UWidget* ContainPosWidget = nullptr; for (TObjectIterator<UUserWidget> Itr; Itr; ++Itr) { UUserWidget* UserWidget = *Itr; if (UserWidget == nullptr || !UserWidget->GetIsVisible() || UserWidget->WidgetTree == nullptr) { UE_LOG(GALog, Log, TEXT("UUserWidget Iterator get a null(unvisible) UUserWidget")); continue; } UserWidget->WidgetTree->ForEachWidgetAndDescendants([&ContainPosWidget, this, GeometryX, GeometryY](UWidget* WidgetPtr) { if (WidgetPtr == nullptr || !WidgetPtr->IsVisible()) { return; } const FGeometry geometry = WidgetPtr->GetCachedGeometry(); if (this->PositionInRect(geometry, GeometryX, GeometryY)) { ContainPosWidget = WidgetPtr; } }); } return ContainPosWidget; } bool TimeTemp::SetTimerHandle() { handle = new FTimerHandle(); UGameInstance* gameInstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameInstance = *Itr; if (gameInstance == nullptr) { continue; } timerDel.BindLambda([this]() {TimerHandleFunc(); }); gameInstance->GetWorld()->GetTimerManager().SetTimer(*handle, timerDel, tickTime, loop); return true; } return false; } static TArray<FCharacterPos> characterposs; void TimeTemp::TimerHandleFunc() { UGameInstance* gameInstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameInstance = *Itr; if (gameInstance == nullptr) { continue; } FHitResult Hit, Hit2; UE_LOG(GALog, Log, TEXT("Timer Start")); // ACharacter* character = gameInstance->GetWorld()->GetFirstPlayerController()->GetCharacter(); FVector vectorStart = character->GetActorLocation(); vectorStart.Z = 0.0f; FVector actorRotator = character->GetActorForwardVector(); FVector vectorEnd = vectorStart + actorRotator * scales; vectorEnd.Z = character->GetDefaultHalfHeight() * 2; DrawDebugLine(gameInstance->GetWorld(), vectorStart + FVector(0.0f, 0.0f, 25.0f), vectorEnd, FColor(255, 0, 0), false, 0, 0, 10); FCollisionObjectQueryParams checkTrace(ECollisionChannel::ECC_WorldStatic); checkTrace.AddObjectTypesToQuery(ECollisionChannel::ECC_PhysicsBody); // gameInstance->GetWorld()->LineTraceSingleByObjectType(Hit, vectorStart + FVector(0.0f, 0.0f, 25.0f) , vectorEnd, FCollisionObjectQueryParams(checkTrace)); AActor* actor = Hit.GetActor(); if (actor) { gameInstance->GetWorld()->GetTimerManager().ClearTimer(*handle); handle = nullptr; UE_LOG(GALog, Log, TEXT("Disable monitor")); FCharacterPos characterpos; auto i = reinterpret_cast<std::uintptr_t>(actor); characterpos.instance = i; characterpos.x = character->GetActorLocation().X; characterpos.y = character->GetActorLocation().Y; characterpos.z = character->GetActorLocation().Z; command.ReponseJsonType = ResponseDataType::OBJECT; characterposs.Push(characterpos); command.ResponseJson = ArrayToJson<FCharacterPos>(characterposs); FCommandHandler::cond_var->notify_one(); } } } // const bool ChangeRotator(const FString& str) { UGameInstance* gameinstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameinstance = *Itr; if (gameinstance == nullptr) { continue; } gameinstance->GetWorld()->GetFirstPlayerController()->GetPawn()->AddControllerYawInput(FCString::Atof(*str)); return true; } return false; } // const FRotator getRotation() { FRotator rotator = FRotator(90.0f, 90.0f, 90.0f); UGameInstance* gameinstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameinstance = *Itr; if (gameinstance == nullptr) { continue; } rotator = gameinstance->GetWorld()->GetFirstPlayerController()->GetPawn()->GetActorRotation(); break; } return rotator; } // const float getScale() { UGameInstance* gameinstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameinstance = *Itr; if (gameinstance == nullptr) { continue; } return gameinstance->GetWorld()->GetFirstPlayerController()->InputYawScale; } return 0.0f; } // const FVector getLevelBound(const FString& str) { FVector origin = FVector(0, 0, 0); FVector boxextent = FVector(0, 0, 0); for (TObjectIterator<AActor> Itr; Itr; ++Itr) { AActor* actor = *Itr; if (actor == nullptr) continue; if (actor->GetName() == FString(str)) { actor->GetActorBounds(false, origin, boxextent); return boxextent; } } return boxextent; } // const bool setLocation(const FString& str) { ACharacter* character = nullptr; FVector vec = FVector(0, 0, 0); UGameInstance* gameinstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameinstance = *Itr; if (gameinstance == nullptr) continue; if (!character) { character = gameinstance->GetWorld()->GetFirstPlayerController()->GetCharacter(); vec = gameinstance->GetWorld()->GetFirstPlayerController()->GetPawn()->GetActorForwardVector() * FCString::Atof(*str); vec += character->GetTargetLocation(); UE_LOG(GALog, Log, TEXT("%f,%f,%f"), vec.X, vec.Y, vec.Z); if (character->SetActorLocation(vec)) { return true; } else return false; } } return false; } // const bool setCharacter(float& posx,float& posy) { ACharacter* character = nullptr; FVector vec = FVector(0, 0, 0); UGameInstance* gameinstance = nullptr; for (TObjectIterator<UGameInstance> Itr; Itr; ++Itr) { gameinstance = *Itr; if (gameinstance == nullptr) continue; if (!character) { character = gameinstance->GetWorld()->GetFirstPlayerController()->GetCharacter(); vec = character->GetTargetLocation(); vec.X = posx; vec.Y = posy; UE_LOG(GALog, Log, TEXT("%f,%f,%f"), vec.X, vec.Y, vec.Z); if (character->SetActorLocation(vec)) return true; break; } } return false; } struct FParam { FString par; FString outcome; }; //Api const FString callRegisterHandler(FName& funcname, FString& funcparams) { FParam par; par.par = funcparams; UClass* ActorRef = FindObject<UClass>((UObject*)ANY_PACKAGE, *FString("MyObject")); if (ActorRef) { UFunction* func = ActorRef->FindFunctionByName(funcname); if (func) { try { ActorRef->ProcessEvent(func, &par); UE_LOG(GALog, Log, TEXT("ProcessEvent Success!")) return par.outcome; } catch (const std::exception& ex) { UE_LOG(GALog, Log, TEXT("%s"), ex.what()) return "false"; } } } return "Null"; } } ```
```python #!/usr/bin/env python # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # from autothreadharness.harness_case import HarnessCase import unittest class Router_5_5_3(HarnessCase): role = HarnessCase.ROLE_ROUTER case = '5 5 3' golden_devices_required = 4 def on_dialog(self, dialog, title): pass if __name__ == '__main__': unittest.main() ```
```asciidoc //// This file is generated by DocsTest, so don't change it! //// = apoc.load.driver :description: This section contains reference documentation for the apoc.load.driver procedure. label:procedure[] label:apoc-full[] [.emphasis] apoc.load.driver('org.apache.derby.jdbc.EmbeddedDriver') register JDBC driver of source database == Signature [source] ---- apoc.load.driver(driverClass :: STRING?) :: VOID ---- == Input parameters [.procedures, opts=header] |=== | Name | Type | Default |driverClass|STRING?|null |=== [[usage-apoc.load.driver]] == Usage Examples include::partial$usage/apoc.load.driver.adoc[] ```
```c++ /* * FinderPatternInfo.cpp * zxing * * Created by Christian Brunschen on 13/05/2008. * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ #include <zxing/qrcode/detector/FinderPatternInfo.h> namespace zxing { namespace qrcode { FinderPatternInfo::FinderPatternInfo(std::vector<Ref<FinderPattern> > patternCenters) : bottomLeft_(patternCenters[0]), topLeft_(patternCenters[1]), topRight_(patternCenters[2]) { } Ref<FinderPattern> FinderPatternInfo::getBottomLeft() { return bottomLeft_; } Ref<FinderPattern> FinderPatternInfo::getTopLeft() { return topLeft_; } Ref<FinderPattern> FinderPatternInfo::getTopRight() { return topRight_; } } } ```
```css /*! fancyBox v2.1.5 fancyapps.com | fancyapps.com/fancybox/#license */ .fancybox-wrap, .fancybox-skin, .fancybox-outer, .fancybox-inner, .fancybox-image, .fancybox-wrap iframe, .fancybox-wrap object, .fancybox-nav, .fancybox-nav span, .fancybox-tmp { padding: 0; margin: 0; border: 0; outline: none; vertical-align: top; } .fancybox-wrap { position: absolute; top: 0; left: 0; z-index: 8020; } .fancybox-skin { position: relative; background: #f9f9f9; color: #444; text-shadow: none; /*-webkit-border-radius: 4px; -moz-border-radius: 4px; border-radius: 4px;*/ } .fancybox-opened { z-index: 8030; } .fancybox-opened .fancybox-skin { -webkit-box-shadow: 0 10px 25px rgba(0, 0, 0, 0.5); -moz-box-shadow: 0 10px 25px rgba(0, 0, 0, 0.5); box-shadow: 0 10px 25px rgba(0, 0, 0, 0.5); } .fancybox-outer, .fancybox-inner { position: relative; } .fancybox-inner { overflow: hidden; } .fancybox-type-iframe .fancybox-inner { -webkit-overflow-scrolling: touch; } .fancybox-error { color: #444; font: 14px/20px "Helvetica Neue",Helvetica,Arial,sans-serif; margin: 0; padding: 15px; white-space: nowrap; } .fancybox-image, .fancybox-iframe { display: block; width: 100%; height: 100%; } .fancybox-image { max-width: 100%; max-height: 100%; } #fancybox-loading, .fancybox-close, .fancybox-prev span, .fancybox-next span { background-image: url(../img/gallery/fancybox_sprite.png); } #fancybox-loading { position: fixed; top: 50%; left: 50%; margin-top: -22px; margin-left: -22px; background-position: 0 -108px; opacity: 0.8; cursor: pointer; z-index: 8060; } #fancybox-loading div { width: 44px; height: 44px; background: url(../img/gallery/fancybox_loading.gif) center center no-repeat; } .fancybox-close { position: absolute; top: -18px; right: -18px; width: 36px; height: 36px; cursor: pointer; z-index: 8040; } .fancybox-nav { position: absolute; top: 0; width: 40%; height: 100%; cursor: pointer; text-decoration: none; background: transparent url(../img/gallery/blank.gif); /* helps IE */ -webkit-tap-highlight-color: rgba(0,0,0,0); z-index: 8040; } .fancybox-prev { left: 0; } .fancybox-next { right: 0; } .fancybox-nav span { position: absolute; top: 50%; width: 36px; height: 34px; margin-top: -18px; cursor: pointer; z-index: 8040; visibility: hidden; } .fancybox-prev span { left: 10px; background-position: 0 -36px; } .fancybox-next span { right: 10px; background-position: 0 -72px; } .fancybox-nav:hover span { visibility: visible; } .fancybox-tmp { position: absolute; top: -99999px; left: -99999px; max-width: 99999px; max-height: 99999px; overflow: visible !important; } /* Overlay helper */ .fancybox-lock { overflow: visible !important; width: auto; } .fancybox-lock body { overflow: hidden !important; } .fancybox-lock-test { overflow-y: hidden !important; } .fancybox-overlay { position: absolute; top: 0; left: 0; overflow: hidden; display: none; z-index: 8010; background: url(../img/gallery/fancybox_overlay.png); } .fancybox-overlay-fixed { position: fixed; bottom: 0; right: 0; } .fancybox-lock .fancybox-overlay { overflow: auto; overflow-y: scroll; } /* Title helper */ .fancybox-title { visibility: hidden; font: normal 13px/20px "Helvetica Neue",Helvetica,Arial,sans-serif; position: relative; text-shadow: none; z-index: 8050; } .fancybox-opened .fancybox-title { visibility: visible; } .fancybox-title-float-wrap { position: absolute; bottom: 0; right: 50%; margin-bottom: -35px; z-index: 8050; text-align: center; } .fancybox-title-float-wrap .child { display: inline-block; margin-right: -100%; padding: 2px 20px; background: transparent; /* Fallback for web browsers that doesn't support RGBa */ background: rgba(0, 0, 0, 0.8); -webkit-border-radius: 15px; -moz-border-radius: 15px; border-radius: 15px; text-shadow: 0 1px 2px #222; color: #FFF; font-weight: bold; line-height: 24px; white-space: nowrap; } .fancybox-title-outside-wrap { position: relative; margin-top: 10px; color: #fff; } .fancybox-title-inside-wrap { padding-top: 10px; } .fancybox-title-over-wrap { position: absolute; bottom: 0; left: 0; color: #fff; padding: 10px; background: #000; background: rgba(0, 0, 0, .8); } /*Retina graphics!*/ @media only screen and (-webkit-min-device-pixel-ratio: 1.5), only screen and (min--moz-device-pixel-ratio: 1.5), only screen and (min-device-pixel-ratio: 1.5){ #fancybox-loading, .fancybox-close, .fancybox-prev span, .fancybox-next span { background-image: url(../img/gallery/fancybox_sprite@2x.png); background-size: 44px 152px; /*The size of the normal image, half the size of the hi-res image*/ } #fancybox-loading div { background-image: url(../img/gallery/fancybox_loading@2x.gif); background-size: 24px 24px; /*The size of the normal image, half the size of the hi-res image*/ } } ```
```python import http.server import multiprocessing import os import socket import ssl from typing import Callable import pexpect import pytest from common_test_methods import get_host_ip4_by_dest_ip from pytest_embedded import Dut from RangeHTTPServer import RangeRequestHandler server_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'server_certs/ca_cert.pem') key_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'server_certs/server_key.pem') enc_bin_name = 'pre_encrypted_ota_secure.bin' def https_request_handler() -> Callable[...,http.server.BaseHTTPRequestHandler]: """ Returns a request handler class that handles broken pipe exception """ class RequestHandler(RangeRequestHandler): def finish(self) -> None: try: if not self.wfile.closed: self.wfile.flush() self.wfile.close() except socket.error: pass self.rfile.close() def handle(self) -> None: try: RangeRequestHandler.handle(self) except socket.error: pass return RequestHandler def start_https_server(ota_image_dir: str, server_ip: str, server_port: int) -> None: os.chdir(ota_image_dir) requestHandler = https_request_handler() httpd = http.server.HTTPServer((server_ip, server_port), requestHandler) ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ssl_context.load_cert_chain(certfile=server_file, keyfile=key_file) httpd.socket = ssl_context.wrap_socket(httpd.socket, server_side=True) httpd.serve_forever() @pytest.mark.esp32 @pytest.mark.ethernet_ota def test_examples_protocol_pre_encrypted_ota_example(dut: Dut) -> None: server_port = 8001 # Start server thread1 = multiprocessing.Process(target=start_https_server, args=(dut.app.binary_path, '0.0.0.0', server_port)) thread1.daemon = True thread1.start() try: dut.expect('Loaded app from partition at offset', timeout=30) try: ip_address = dut.expect(r'IPv4 address: (\d+\.\d+\.\d+\.\d+)[^\d]', timeout=30)[1].decode() print('Connected to AP/Ethernet with IP: {}'.format(ip_address)) except pexpect.exceptions.TIMEOUT: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP/Ethernet') host_ip = get_host_ip4_by_dest_ip(ip_address) dut.expect('Starting Pre Encrypted OTA example', timeout=30) print('writing to device: {}'.format('path_to_url + host_ip + ':' + str(server_port) + '/' + enc_bin_name)) dut.write('path_to_url + host_ip + ':' + str(server_port) + '/' + enc_bin_name) dut.expect('Magic Verified', timeout=30) dut.expect('Reading RSA private key', timeout=30) dut.expect('upgrade successful. Rebooting', timeout=60) # after reboot dut.expect('Loaded app from partition at offset', timeout=30) finally: thread1.terminate() @pytest.mark.esp32 @pytest.mark.ethernet_ota @pytest.mark.parametrize('config', ['partial_download',], indirect=True) def your_sha256_hash(dut: Dut) -> None: server_port = 8001 # Size of partial HTTP request request_size = int(dut.app.sdkconfig.get('EXAMPLE_HTTP_REQUEST_SIZE')) # File to be downloaded. This file is generated after compilation binary_file = os.path.join(dut.app.binary_path, enc_bin_name) bin_size = os.path.getsize(binary_file) http_requests = int((bin_size / request_size) - 1) assert http_requests > 1 # Start server thread1 = multiprocessing.Process(target=start_https_server, args=(dut.app.binary_path, '0.0.0.0', server_port)) thread1.daemon = True thread1.start() try: dut.expect('Loaded app from partition at offset', timeout=30) try: ip_address = dut.expect(r'IPv4 address: (\d+\.\d+\.\d+\.\d+)[^\d]', timeout=30)[1].decode() print('Connected to AP/Ethernet with IP: {}'.format(ip_address)) except pexpect.exceptions.TIMEOUT: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP/Ethernet') host_ip = get_host_ip4_by_dest_ip(ip_address) dut.expect('Starting Pre Encrypted OTA example', timeout=30) print('writing to device: {}'.format('path_to_url + host_ip + ':' + str(server_port) + '/' + enc_bin_name)) dut.write('path_to_url + host_ip + ':' + str(server_port) + '/' + enc_bin_name) dut.expect('Magic Verified', timeout=30) dut.expect('Reading RSA private key', timeout=30) for _ in range(http_requests): dut.expect('Connection closed', timeout=60) dut.expect('upgrade successful. Rebooting', timeout=60) # after reboot dut.expect('Loaded app from partition at offset', timeout=30) finally: thread1.terminate() ```
```rust #![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] pub type RefPtr<T> = T; #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct A { pub _address: u8, } pub type A_a = b; #[allow(clippy::unnecessary_operation, clippy::identity_op)] const _: () = { ["Size of A"][::std::mem::size_of::<A>() - 1usize]; ["Alignment of A"][::std::mem::align_of::<A>() - 1usize]; }; #[repr(C)] pub struct e<c> { pub _phantom_0: ::std::marker::PhantomData<::std::cell::UnsafeCell<c>>, pub d: RefPtr<c>, } impl<c> Default for e<c> { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct f { pub _address: u8, } #[repr(C)] pub struct g { pub h: f, } #[allow(clippy::unnecessary_operation, clippy::identity_op)] const _: () = { ["Size of g"][::std::mem::size_of::<g>() - 1usize]; ["Alignment of g"][::std::mem::align_of::<g>() - 1usize]; ["Offset of field: g::h"][::std::mem::offset_of!(g, h) - 0usize]; }; impl Default for g { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } #[repr(C)] pub struct b { pub _base: g, } #[allow(clippy::unnecessary_operation, clippy::identity_op)] const _: () = { ["Size of b"][::std::mem::size_of::<b>() - 1usize]; ["Alignment of b"][::std::mem::align_of::<b>() - 1usize]; }; impl Default for b { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } extern "C" { #[link_name = "\u{1}_Z25Servo_Element_GetSnapshotv"] pub fn Servo_Element_GetSnapshot() -> A; } #[allow(clippy::unnecessary_operation, clippy::identity_op)] const _: () = { [ "Size of template specialization: f_open0_e_open1_int_close1_close0", ][::std::mem::size_of::<f>() - 1usize]; [ "Align of template specialization: f_open0_e_open1_int_close1_close0", ][::std::mem::align_of::<f>() - 1usize]; }; ```
The Grammy Award for Best Orchestral Performance has been awarded since 1959. There have been several minor changes to the name of the award over this time: From 1959 to 1964 it was awarded as Best Classical Performance - Orchestra In 1965 it was Best Performance - Orchestra From 1966 to 1975 it returned to Best Classical Performance - Orchestra'' From 1977 to 1978 it was awarded as Best Classical Orchestral PerformanceFrom 1980 to 1981 it was awarded as Best Classical Orchestral RecordingIn 1983 it was awarded as Best Orchestral Performance In 1984 it was awarded as Best Orchestral Recording From 1985 to 1987 it returned to being called Best Classical Orchestral Recording From 1988 to 1989 it was once again called Best Orchestral RecordingFrom 1990 to the present it has returned to being called Best Orchestral Performance''' Years reflect the year in which the Grammy Awards were presented, for works released in the previous year. Until 1989, the Grammy Award went to the conductor only, but since then, the Orchestra has also been given an award (although the orchestras are not always mentioned as a nominee). Recipients Multiple Wins and Nominations The following individuals received two or more Best Orchestral Performance awards: The following individuals received four or more Best Orchestral Performance nominations: References Orchestral Performance Orchestral music Awards established in 1959
Eladio Romero Santos was a Dominican musician. Originally from CenovΓ­, a town outside of San Francisco de MacorΓ­s, Santos' career spans over forty years. Santos started recording bachata in 1966 with his first song "Tomando En Tu Mesa". Since he performed mostly in country social clubs and for patron saints' festivals, he was not marginalized as were many of his fellow bachateros. Santos' style was much simpler and more straightforward than that of other guitarists such as Edilio Paredes; it was also rhythmic and danceable. He contracted arthritis in 1995 and was forced to stop playing the guitar. After 1995, he only performed as a singer. Romero Santos retired in 1998. He died three years later, in 2001, of lung cancer. his sister Leonilda Alejo moved to the United States and made some very popular songs such as Mamita and sera porque soy pobre. she currently now lives in the United states and has quit her music career. Discography El Creador (1970) El Zumbador (1970) La Muerte de Mi Hermano (1970) La MuΓ±eca (1970) Las Bailadoras (1970) La Madrugadora (1978) La Viuda (1979) La Mujer PolicΓ­a (1980) El Sabor de Mi Guitarra (1980) Eladio Romero Santos (1981) Eladio Romero Santos Presenta a Francisco Ulloa y el Conjunto San Rafael (with Francisco Ulloa) (1981) Muchacha Dominicana (1988) 15 Γ‰xitos (1990) Γ‰xitos Vol. 2 (1990) Relevance Santos is specially remembered because he developed an innovative way to perform the merengue with guitars, in opposition to the traditional way of the conjunto tipico or the combos(another popular urban style of orchestra). External links Eladio Romero Santos: Pioneer of bachata and guitar merengue Profile at Bachata Republic 1937 births 2001 deaths Bachata musicians Merengue musicians Dominican Republic guitarists 20th-century Dominican Republic male singers 20th-century guitarists
Legend of the Amazon Women is a beat 'em up video game developed by SilverTime and published by U.S. Gold and Mastertronic for Amstrad CPC, Commodore 64, and ZX Spectrum in 1986. Plot Stranded in the middle of the jungle after a terrible plane crash, your child has been stolen by a tribe of Amazon Warriors, who want to raise her as one of their own. You must fight your way through the jungle past Amazons armed with clubs, swords and axes avoiding the many arrows, in order to rescue your daughter. You [sic] goal is to fight your way through the ten zones and rescue the stolen child. The Amazons will do their best to stop you. They will fight you one at a time, though they constantly fire arrows at you which you need to avoid by jumping or ducking. As you progress through the game the Amazons will get more intelligent and harder to defeat. You have a limited time to complete each zone, if you fail to reach the end of the zone before your time runs out you lose a life. Gameplay Reception See also Flight of the Amazon Queen References External links Legend of the Amazon Women resources at World of Spectrum Legend of the Amazon Women at Your Sinclair 1986 video games Amstrad CPC games Beat 'em ups Commodore 64 games Video games developed in the United Kingdom Video games featuring female protagonists ZX Spectrum games Mastertronic games U.S. Gold games
```go // // // path_to_url // // Unless required by applicable law or agreed to in writing, software // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. package operator import ( "reflect" "testing" v1 "k8s.io/api/core/v1" monitoringv1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1" ) func TestMakeHostAliases(t *testing.T) { cases := []struct { input []monitoringv1.HostAlias expected []v1.HostAlias }{ { input: nil, expected: nil, }, { input: []monitoringv1.HostAlias{}, expected: nil, }, { input: []monitoringv1.HostAlias{ { IP: "1.1.1.1", Hostnames: []string{"foo.com"}, }, }, expected: []v1.HostAlias{ { IP: "1.1.1.1", Hostnames: []string{"foo.com"}, }, }, }, } for i, c := range cases { result := MakeHostAliases(c.input) if !reflect.DeepEqual(result, c.expected) { t.Errorf("expected test case %d to be %s but got %s", i, c.expected, result) } } } ```
```go package captain import ( "bytes" "encoding/binary" "errors" "io" "io/ioutil" "os" "path/filepath" "sync/atomic" "testing" "time" ) func TestAppend(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } tests := [][]byte{ []byte("1"), []byte("2"), []byte("3"), } startTime := time.Now().UTC() for _, v := range tests { err = a.Append(v) if err != nil { t.Fatalf("Append err=%s", err) } } endTime := time.Now().UTC() c, err := s.OpenCursor() if err != nil { t.Fatalf("Cursor err=%s", err) } for i, v := range tests { r, err := c.Next() if err != nil || (r == nil && err == nil) { t.Fatalf("ursor.Next() mismatch, r=%+v, err=%s, index=%d", r, err, i) } if !bytes.Equal(r.Payload, v) { t.Fatalf("Payload mismatch, act=%+v, exp=%+v", r.Payload, v) } if r.Time.Before(startTime) || r.Time.After(endTime) { t.Fatalf("Record time out of range, act=%s, expected between %s - %s", r.Time, startTime, endTime) } } } func TestAppendRotate(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) // 71 bytes is the size of magic header + 3 single char records. s := NewStream(dir, testMagicHeader) options := &AppendOptions{SegmentSize: 71} a, err := s.OpenAppender(options) if err != nil { t.Fatalf("Open Appender err=%s", err) } oneSeg := []*segmentInfo{&segmentInfo{name: filepath.Clean(dir + "/000000001.log"), seq: 1}} twoSeg := append(oneSeg, &segmentInfo{name: filepath.Clean(dir + "/000000002.log"), seq: 2}) tests := []struct { payload []byte expSize int expSegs []*segmentInfo }{ {payload: []byte("1"), expSize: 29, expSegs: oneSeg}, {payload: []byte("2"), expSize: 50, expSegs: oneSeg}, {payload: []byte("3"), expSize: 71, expSegs: oneSeg}, {payload: []byte("4"), expSize: 29, expSegs: twoSeg}, } for _, tt := range tests { err := a.Append(tt.payload) if err != nil { t.Fatalf("Append err=%s", err) } segs := scanSegments(dir) if len(segs) != len(tt.expSegs) { t.Fatalf("Segment length mismatch, act=%d, exp=%d", len(segs), len(tt.expSegs)) } for i, s := range tt.expSegs { if *segs[i] != *s { t.Fatalf("Segment mismatch, act=%+v, exp=%+v", segs[i], s) } } var stat os.FileInfo if len(tt.expSegs) == len(oneSeg) { stat, err = os.Stat(oneSeg[0].name) } else if len(tt.expSegs) == len(twoSeg) { stat, err = os.Stat(twoSeg[1].name) } if err != nil { t.Fatalf("Unable to stat, err=%s", err) } if stat.Size() != int64(tt.expSize) { t.Fatalf("Size mismatch, act=%d, exp=%d", stat.Size(), tt.expSize) } } } func TestAppendInvalidDir(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } // Set dir to invalid. a.path = dir + "/does-not-exist" err = a.Append([]byte("a")) if err == nil { t.Fatalf("Expected err on invalid dir") } } // Test failure handling for an unlikely record marshaling error. func TestAppendRecordMarshalFailure(t *testing.T) { testErr := errors.New("invalid file descriptor") copy := binaryWrite expData := []byte("a") binaryWrite = func(w io.Writer, order binary.ByteOrder, data interface{}) error { b, ok := data.([]byte) if ok && bytes.Equal(expData, b) { return testErr } return copy(w, order, data) } defer func() { binaryWrite = copy }() dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } err = a.Append(expData) if err != testErr { t.Fatalf("Append err, act=%s, exp=%s", err, testErr) } } // Test internal writer failure handling. // e.g., Disk full, closed fd. func TestAppendWriteFailure(t *testing.T) { expData := []byte("deadbeef") testErr := errors.New("invalid file descriptor") copy := binaryWrite binaryWrite = func(w io.Writer, order binary.ByteOrder, data interface{}) error { b, ok := data.([]byte) // Look for expData within marshaled record. if ok && len(b) == 28 && bytes.Equal(expData, b[16:24]) { return testErr } return copy(w, order, data) } defer func() { binaryWrite = copy }() dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } err = a.Append(expData) if err != testErr { t.Fatalf("Append err, act=%s, exp=%s", err, testErr) } } func TestAppendEmptyDir(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) segPath := dir + "/000000001.log" _, err = os.Stat(segPath) if os.IsExist(err) { t.Fatalf("Expected empty dir") } s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("New Appender err=%s", err) } a.Append([]byte("a")) f, err := os.Open(segPath) if err != nil { t.Fatalf("Expected file path to exist") } if err = validateSegmentHeader(f, testMagicHeader); err != nil { t.Fatalf("Expected valid segment header, err=%s", err) } } func TestAppendInvalidHeader(t *testing.T) { s := NewStream("./test/invalid-header", testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("New Appender err=%s", err) } err = a.Append([]byte("a")) if err == nil { t.Fatalf("Append err, act=nil, exp=err") } } func TestAppenderNewWithInvalidDir(t *testing.T) { s := NewStream("./test/does-not-exist", testMagicHeader) _, err := s.OpenAppender(nil) if err == nil { t.Fatalf("Expected not found err") } } // Opening a new appender on a directory with the last segment file already at // the SegmentSize limit, should rotate it immediately. func TestAppenderLastActiveFileAtLimit(t *testing.T) { dir := "./test/appender-rotate-limit" expFile := dir + "/000000003.log" defer os.Remove(expFile) // 71 bytes is the size of magic header + 3 single char records. s := NewStream(dir, testMagicHeader) options := &AppendOptions{SegmentSize: 71} a, err := s.OpenAppender(options) if err != nil { t.Fatalf("New appender err=%s", err) } f, err := a.activeSegment() if err != nil { t.Fatalf("Unexpected Appender.activeFile() err=%s", err) } if f.Name() != expFile { t.Fatalf("Rotated file mismatch, act=%s, exp=%s", f.Name(), expFile) } } func TestAppendProcessLock(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a1, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } a2, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } if err := a1.Lock(); err != nil { t.Fatalf("Appender Lock err=%s", err) } defer a1.Unlock() done := make(chan struct{}) go func() { if err := a2.Lock(); err != nil { t.Fatalf("Appender Lock err=%s", err) } defer a2.Unlock() close(done) }() timer := time.NewTimer(100 * time.Millisecond) select { case <-done: t.Fatalf("Unexpected second append lock") case <-timer.C: } a1.Unlock() timer = time.NewTimer(100 * time.Millisecond) select { case <-done: case <-timer.C: t.Fatalf("Expected successful a2 lock") } } func TestAppenderActiveFileExistingSegments(t *testing.T) { dir := "./test/appender-existing-segments" s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(nil) if err != nil { t.Fatalf("Open Appender err=%s", err) } f, err := a.activeSegment() if err != nil { t.Fatalf("Active segment err=%s", err) } expFile := dir + "/000000002.log" if f.Name() != expFile { t.Fatalf("Active file mismatch, act=%s, exp=%s", f.Name(), expFile) } } type testSegmentWriter struct { sync func() error write func(b []byte) (int, error) } func (w *testSegmentWriter) Sync() error { return w.sync() } func (w *testSegmentWriter) Write(b []byte) (int, error) { return w.write(b) } func (w *testSegmentWriter) Close() error { return nil } func TestAppenderSyncInterval(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(&AppendOptions{SyncPolicy: SyncInterval, SyncInterval: 10}) if err != nil { t.Fatalf("Open Appender err=%s", err) } // Ensure sync is proper when there is no segment file to sync. // This will show up in coverage report. time.Sleep(15 * time.Millisecond) var n uint32 w := &testSegmentWriter{ sync: func() error { atomic.AddUint32(&n, 1) return nil }, } a.rwlock.Lock() a.seg = &segmentWriter{writer: w} a.rwlock.Unlock() time.Sleep(40 * time.Millisecond) actN := atomic.LoadUint32(&n) if actN < 3 || actN > 5 { t.Fatalf("Sync count act=%d, exp=3 - 5", actN) } } func TestAppenderSyncAlways(t *testing.T) { dir, err := ioutil.TempDir("", "test-captain") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) s := NewStream(dir, testMagicHeader) a, err := s.OpenAppender(&AppendOptions{SyncPolicy: SyncAlways}) if err != nil { t.Fatalf("Open Appender err=%s", err) } var n int w := &testSegmentWriter{ sync: func() error { n++ return nil }, write: func(b []byte) (int, error) { return len(b), nil }, } a.rwlock.Lock() a.seg = &segmentWriter{writer: w} a.rwlock.Unlock() err = a.Append([]byte("a")) if err != nil { t.Fatalf("Append err=%s", err) } if n != 1 { t.Fatalf("Sync count act=%d, exp=1", n) } } ```
Morchella palazonii is a species of morel found in Spain. Morels are edible mushrooms in the family Morchellaceae (Ascomycota). Morchella palazonii was described as new to science in 2015 by Philippe Clowez and colleagues, from collections under holly oak (Quercus ilex) and narrow-leafed ash trees (Fraxinus angustifolia) in Spain. This edible species is characterised by an elongated cap, a rufescent fruiting body, and small spores. References External links palazonii
```html <!DOCTYPE html> <html lang="en"> <head> <title>Ring Structure Reference</title> <link rel="stylesheet" type="text/css" href="../css/jazzy.css" /> <link rel="stylesheet" type="text/css" href="../css/highlight.css" /> <meta charset='utf-8'> <script src="../js/jquery.min.js" defer></script> <script src="../js/jazzy.js" defer></script> </head> <body> <a name="//apple_ref/swift/Struct/Ring" class="dashAnchor"></a> <a title="Ring Structure Reference"></a> <header> <div class="content-wrapper"> <p><a href="../index.html">UICircularProgressRing 7.0.0 Docs</a> (100% documented)</p> </div> </header> <div class="content-wrapper"> <p id="breadcrumbs"> <a href="../index.html">UICircularProgressRing Reference</a> <img id="carat" src="../img/carat.png" /> Ring Structure Reference </p> </div> <div class="content-wrapper"> <nav class="sidebar"> <ul class="nav-groups"> <li class="nav-group-name"> <a href="../Enums.html">Enumerations</a> <ul class="nav-group-tasks"> <li class="nav-group-task"> <a href="../Enums/RingAxis.html">RingAxis</a> </li> <li class="nav-group-task"> <a href="../Enums/RingColor.html">RingColor</a> </li> <li class="nav-group-task"> <a href="../Enums/RingProgress.html">RingProgress</a> </li> <li class="nav-group-task"> <a href="../Enums/TimerRingTimeUnit.html">TimerRingTimeUnit</a> </li> </ul> </li> <li class="nav-group-name"> <a href="../Structs.html">Structures</a> <ul class="nav-group-tasks"> <li class="nav-group-task"> <a href="../Structs/IndeterminateRing.html">IndeterminateRing</a> </li> <li class="nav-group-task"> <a href="../Structs/PercentFormattedText.html">PercentFormattedText</a> </li> <li class="nav-group-task"> <a href="../Structs/ProgressRing.html">ProgressRing</a> </li> <li class="nav-group-task"> <a href="../Structs/Ring.html">Ring</a> </li> <li class="nav-group-task"> <a href="../Structs/RingStyle.html">RingStyle</a> </li> <li class="nav-group-task"> <a href="../Structs/TimerRing.html">TimerRing</a> </li> </ul> </li> </ul> </nav> <article class="main-content"> <section> <section class="section"> <h1>Ring</h1> <div class="declaration"> <div class="language"> <pre class="highlight swift"><code><span class="kd">public</span> <span class="kd">struct</span> <span class="kt">Ring</span><span class="o">&lt;</span><span class="kt">Content</span><span class="o">&gt;</span> <span class="k">where</span> <span class="kt">Content</span> <span class="p">:</span> <span class="kt">View</span></code></pre> <pre class="highlight swift"><code><span class="kd">extension</span> <span class="kt">Ring</span><span class="p">:</span> <span class="kt">View</span></code></pre> </div> </div> <h1 id='ring' class='heading'>Ring</h1> <p>A view which represents a ring (a circle with a stroke). The <code>percent</code> determines how much of the ring is drawn starting from the <code>axis</code>.</p> <p>Example: A ring with an axis of <code><a href="../Enums/RingAxis.html#/s:22UICircularProgressRing0C4AxisO3topyA2CmF">RingAxis.top</a></code>, a percent of <code>0.5</code>, and <code>clockwise == true</code> will draw a stroked circle from <code>90</code> degrees (on a unit circle) to <code>270</code> degrees.</p> </section> <section class="section task-group-section"> <div class="task-group"> <ul> <li class="item"> <div> <code> <a name="/s:your_sha256_hashyour_sha256_hashc"></a> <a name="//apple_ref/swift/Method/init(percent:axis:clockwise:color:strokeStyle:_:)" class="dashAnchor"></a> <a class="token" href="#/s:your_sha256_hashyour_sha256_hashc">init(percent:<wbr>axis:<wbr>clockwise:<wbr>color:<wbr>strokeStyle:<wbr>_:<wbr>)</a> </code> </div> <div class="height-container"> <div class="pointer-container"></div> <section class="section"> <div class="pointer"></div> <div class="abstract"> <p>Creates a <code>Ring</code>.</p> </div> <div class="declaration"> <h4>Declaration</h4> <div class="language"> <p class="aside-title">Swift</p> <pre class="highlight swift"><code><span class="kd">public</span> <span class="nf">init</span><span class="p">(</span> <span class="nv">percent</span><span class="p">:</span> <span class="kt">Double</span><span class="p">,</span> <span class="nv">axis</span><span class="p">:</span> <span class="kt"><a href="../Enums/RingAxis.html">RingAxis</a></span><span class="p">,</span> <span class="nv">clockwise</span><span class="p">:</span> <span class="kt">Bool</span><span class="p">,</span> <span class="nv">color</span><span class="p">:</span> <span class="kt"><a href="../Enums/RingColor.html">RingColor</a></span><span class="p">,</span> <span class="nv">strokeStyle</span><span class="p">:</span> <span class="kt">StrokeStyle</span><span class="p">,</span> <span class="kd">@ViewBuilder</span> <span class="n">_</span> <span class="nv">content</span><span class="p">:</span> <span class="kd">@escaping</span> <span class="p">(</span><span class="kt">Double</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kt">Content</span> <span class="p">)</span></code></pre> </div> </div> <div> <h4>Parameters</h4> <table class="graybox"> <tbody> <tr> <td> <code> <em>percent</em> </code> </td> <td> <div> <p>The starting completion percent of the ring.</p> </div> </td> </tr> <tr> <td> <code> <em>axis</em> </code> </td> <td> <div> <p>The axis to begin drawing the ring.</p> </div> </td> </tr> <tr> <td> <code> <em>clockwise</em> </code> </td> <td> <div> <p>Whether the ring is drawn in a clockwise manner.</p> </div> </td> </tr> <tr> <td> <code> <em>color</em> </code> </td> <td> <div> <p>The stroke color for the ring.</p> </div> </td> </tr> <tr> <td> <code> <em>strokeStyle</em> </code> </td> <td> <div> <p>The <code>StrokeStyle</code> for the ring.</p> </div> </td> </tr> <tr> <td> <code> <em>content</em> </code> </td> <td> <div> <p>An optional content view placed within the center of the ring.</p> </div> </td> </tr> </tbody> </table> </div> </section> </div> </li> <li class="item"> <div> <code> <a name="/s:7SwiftUI4ViewP4body4BodyQzvp"></a> <a name="//apple_ref/swift/Property/body" class="dashAnchor"></a> <a class="token" href="#/s:7SwiftUI4ViewP4body4BodyQzvp">body</a> </code> </div> <div class="height-container"> <div class="pointer-container"></div> <section class="section"> <div class="pointer"></div> <div class="abstract"> </div> <div class="declaration"> <h4>Declaration</h4> <div class="language"> <p class="aside-title">Swift</p> <pre class="highlight swift"><code><span class="kd">public</span> <span class="k">var</span> <span class="nv">body</span><span class="p">:</span> <span class="n">some</span> <span class="kt">View</span> <span class="p">{</span> <span class="k">get</span> <span class="p">}</span></code></pre> </div> </div> </section> </div> </li> </ul> </div> <div class="task-group"> <div class="task-name-container"> <a name="/Available%20where%20%60Content%60%20%3D%3D%20%60EmptyView%60"></a> <a name="//apple_ref/swift/Section/Available where `Content` == `EmptyView`" class="dashAnchor"></a> <div class="section-name-container"> <a class="section-name-link" href="#/Available%20where%20%60Content%60%20%3D%3D%20%60EmptyView%60"></a> <h3 class="section-name"><p>Available where <code>Content</code> == <code>EmptyView</code></p> </h3> </div> </div> <ul> <li class="item"> <div> <code> <a name="/s:your_sha256_hashyour_sha256_hashOAD06StrokeM0Vtcfc"></a> <a name="//apple_ref/swift/Method/init(percent:axis:clockwise:color:strokeStyle:)" class="dashAnchor"></a> <a class="token" href="#/s:your_sha256_hashyour_sha256_hashOAD06StrokeM0Vtcfc">init(percent:<wbr>axis:<wbr>clockwise:<wbr>color:<wbr>strokeStyle:<wbr>)</a> </code> </div> <div class="height-container"> <div class="pointer-container"></div> <section class="section"> <div class="pointer"></div> <div class="abstract"> <p>Default init which returns a ring with no label.</p> </div> <div class="declaration"> <h4>Declaration</h4> <div class="language"> <p class="aside-title">Swift</p> <pre class="highlight swift"><code><span class="kd">public</span> <span class="nf">init</span><span class="p">(</span> <span class="nv">percent</span><span class="p">:</span> <span class="kt">Double</span><span class="p">,</span> <span class="nv">axis</span><span class="p">:</span> <span class="kt"><a href="../Enums/RingAxis.html">RingAxis</a></span><span class="p">,</span> <span class="nv">clockwise</span><span class="p">:</span> <span class="kt">Bool</span><span class="p">,</span> <span class="nv">color</span><span class="p">:</span> <span class="kt"><a href="../Enums/RingColor.html">RingColor</a></span><span class="p">,</span> <span class="nv">strokeStyle</span><span class="p">:</span> <span class="kt">StrokeStyle</span> <span class="p">)</span></code></pre> </div> </div> </section> </div> </li> </ul> </div> </section> </section> <section id="footer"> <p>&copy; 2020 <a class="link" href="path_to_url" target="_blank" rel="external">Luis Padron</a>. All rights reserved. (Last updated: 2020-07-12)</p> <p>Generated by <a class="link" href="path_to_url" target="_blank" rel="external">jazzy v0.13.4</a>, a <a class="link" href="path_to_url" target="_blank" rel="external">Realm</a> project.</p> </section> </article> </div> </body> </div> </html> ```
Tai'an Village () is the smallest village under the jurisdiction of Beiwan Town (), Jingyuan County, Gansu. It has a total area of , with of roads. , the village had 3,832 people in 987 households. Among them 82 are Communist Party members, 17 (20.73%) of those being women. Out of the 3,534 mΗ” of land in the village, 157 were devoted to orchards and 955 to vegetable greenhouses; livestock in the village included 5,100 pigs, 1,760 sheep, and 196,400 chickens. Total annual grain production was 2,565 tonnes, while vegetable production was 5,727 tonnes. The per capita net income of farmers had reached 5,200 yuan by that year. References Jingyuan County, Gansu Villages in China
```python import demistomock as demisto # noqa: F401 from CommonServerPython import * # noqa: F401 def print_to_parent_incident(alert_id: str, value: str, parent_incident_id: str) -> None: """Prints a value to the alert's parent incident. Args: alert_id (str): The alert ID running the script. value (str): The value to print. parent_incident_id (str): The parent incident's ID of the alert. """ entry_note = json.dumps( [{"Type": 1, "ContentsFormat": EntryFormat.MARKDOWN, "Contents": f"Entry from alert #{alert_id}:\n{value}"}] ) entry_tags_res: list[dict[str, Any]] = demisto.executeCommand( "addEntries", {"entries": entry_note, "id": parent_incident_id, "reputationCalcAsync": True} ) if isError(entry_tags_res[0]): return_error(get_error(entry_tags_res)) else: return_results(CommandResults(readable_output=f"Successfully printed to parent incident {parent_incident_id}.")) def validate_parent_incident_id(parent_incident_id: str, alert_id: str) -> str: """Validates if the parent incident ID of the alert is not empty, and return it. Args: parent_incident_id (str): The parent incident ID of the alert. alert_id (str): The alert ID running the script. Raises: DemistoException: If the parent incident ID is an empty string, meaning it couldn't be found. Returns: str: The parent incident ID if not empty. """ if not parent_incident_id: raise DemistoException(f"No parent incident was found for {alert_id =}") return parent_incident_id def main(): # pragma: no cover try: args = demisto.args() value: str = args["value"] current_alert: dict[str, Any] = demisto.incident() alert_id: str = current_alert["id"] parent_incident_id: str = validate_parent_incident_id( parent_incident_id=current_alert.get("parentXDRIncident", ""), alert_id=alert_id, ) print_to_parent_incident( alert_id=alert_id, value=value, parent_incident_id=parent_incident_id, ) except Exception as ex: return_error(f"Failed to execute PrintToParentIncident. Error: {str(ex)}") if __name__ in ("__main__", "__builtin__", "builtins"): main() ```
Vallières () is a former commune in the Haute-Savoie department in the Auvergne-Rhône-Alpes region in south-eastern France. On 1 January 2019, it was merged into the new commune Vallières-sur-Fier. Geography The Fier forms the commune's southern border. See also Communes of the Haute-Savoie department References Former communes of Haute-Savoie Populated places disestablished in 2019
This is a list of awards and nominations received by D'banj, a Nigerian recording artist and harmonica player widely known for his contributions to the African Music Industry. Born and raised in Zaria, D'banj came to prominence following the release of his debut album No Long Thing (2005). He won the Best Newcomer award at the 2006 Channel O Music Video Awards for "Tongolo", a song off his aforementioned album. In 2005, D'banj won the Most Promising Male Artist award at the 2005 Kora Awards. His follow-up album, RunDown Funk U Up (2006), included the hit single "Why Me" whose music video included the receipt of 1 Channel O Music Video Awards out of 3 nominations in 2007. On May 23, 2010, and February 9, 2011, the Kokomaster released "Mr Endowed" and "Mr Endowed (Remix)" respectively. The former was nominated for Hottest Single of the Year at the 2011 Nigeria Entertainment Awards. The music video for the former was nominated in the Best Afro Pop Video and Video of the Year categories at the 2010 Nigeria Music Video Awards (NMWA). Moreover, D'banj was nominated for the Best Use of Effects at the aforementioned awards for the music video. On the other hand, the music video for the latter won the Most Gifted Male Video award, and was nominated for Most Gifted Video of The Year at the 2011 Channel O Music Video Awards. It was also nominated for Best Afro Pop Video award at the 2011 Nigeria Music Video Awards (NMWA). D'Kings Men, his 2013 compilation album, included the hit single "Oliver Twist". The music video for "Oliver Twist" won the Most Gifted Male Video and Most Gifted Video of the Year awards at the 2012 Channel O Music Video Awards. "Oliver Twist" won the Song of the Year award, and was nominated for Best Pop Single at The Headies 2012. D'banj received the Best Male West Africa nomination at the 2012 Kora Awards for "Oliver Twist". Furthermore, the song was nominated for Hottest Single of the Year at the 2012 Nigeria Entertainment Awards. BET Awards |- |rowspan="1"|2011 |rowspan="1"|D'banj |"Best International Act (Africa)" | Channel O Music Video Awards |- |rowspan="1"|2006 |rowspan="1"|D'banj for "Tongolo" |"Best Newcomer" | |- |rowspan="3"|2007 |rowspan="3"|"Why Me" |"Best Male Video" | |- |"Best African West Video" | |- |"Best Special Effects Video" | |- |rowspan="1"|2008 |rowspan="1"|"Move Your Body" |"Best African West Video" | |- |rowspan="1"|2010 |rowspan="1"|"Fall in Love" |"Most Gifted Afro Pop" | |- |rowspan="2"|2011 |rowspan="2"|"Mr Endowed (Remix)" |"Most Gifted Male Video" | |- |"Most Gifted Video Of The Year" | |- |rowspan="2"|2012 |rowspan="2"|"Oliver Twist" |"Most Gifted Male Video" | |- |"Most Gifted Video of the Year" | |- |rowspan="1"|2013 |"Tony Montana (Bad Pass) Remix" (Naeto C featuring D'banj) |Most Gifted Duo/Group/Featuring Video | Fizz Awards |- |rowspan="1"|2006 |rowspan="1"|D'banj for "Tongolo" |"Best Newcomer" | Ghana Music Awards |- |rowspan="1"|2007 |rowspan="4"|D'banj |rowspan="4"|"African Artiste of the Year" | |- |rowspan="1"|2009 | |- |rowspan="1"|2012 | |- |rowspan="1"|2013 | The Headies |- |rowspan="1"|2006 |rowspan="1"|D'banj |"Revelation of the Year" | |- |rowspan= "1"|2007 |rowspan="1"|Why Me" |"Song of the Year" | |- |rowspan="1"|2008 |rowspan="1"|D'banj |"Artiste of the Year" | |- |rowspan="4"|2009 |rowspan="1"|The Entertainer |"Album of the Year" | |- |rowspan="1"|D'banj |"Artiste of the Year" | |- |rowspan="1"|"Fall in Love" |"Song of the Year" | |- |rowspan="1"|The Entertainer |"Best R&B/Pop Album" | |- |rowspan="1"|2010 |rowspan="1"|"You Bad" (Wande Coal featuring D'banj) |"Song of the Year" | |- |rowspan="3"|2011 |rowspan="1"|D'banj |"Artiste Of The Year" | |- |rowspan="1"|"Pop Something" (Dr SID featuring D'banj) |"Song of the Year" | |- |rowspan="1"|"Entertainer" |"Best Pop Single" | |- |rowspan="3"|2012 |rowspan="1"|D'banj |"Artiste Of The Year" | |- |rowspan="3"|"Oliver Twist" |"Song of the Year" | |- |"Best Pop Single" | |- |rowspan="2"|2013 |"Best Music Video" | |- |"Tony Montana (Bad Pass) Remix" (Naeto C featuring D'banj) |Best Collabo | |- |rowspan="2"|2016 |"Emergency |"Best Pop Single" | |- | |"Best Recording of the year" | Kora Awards |- |rowspan="1"|2005 |rowspan="1"|D'banj |"Most Promising Male Artist" | |- |rowspan="1"|2012 |rowspan="1"|D'banj for "Oliver Twist" |"Best Male West Africa" | MOBO Awards |- |rowspan="1"|2007 |rowspan="4"|D'banj |rowspan="4"|"Best African Act" | |- |rowspan="1"|2008 | |- |rowspan="1"|2011 | |- |rowspan="1"|2012 | MTV Africa Music Awards |- |rowspan="4"|2008 |rowspan="3"|D'banj |"Best Artist of the Year" | |- |"Best Male Artist" | |- |"Best Live Performer" | |- |rowspan="1"|D'banj for "Why Me" |"Listener's Choice Award" | |- |rowspan="3"|2009 |rowspan="4"|D'banj |"Best Artist of the Year" | |- |"Best Male Artist" | |- |"Best Performer" | |- |rowspan="1"|2010 |"Song Of The Year" | MTV Europe Music Awards |- |rowspan="1"|2007 |rowspan="2"|D'banj |rowspan="3"|"Best African Act" | |- |rowspan="1"|2012 | Nigeria Entertainment Awards |- |rowspan="2"|2007 |rowspan="1"|"Why Me" |"Hottest Single of the Year" | |- |rowspan="1"|D'banj |"Best Afro Pop Act of the Year" | |- |rowspan="2"|2009 |rowspan="1"|The Entertainer |rowspan="1"|"Best Album of the Year" | |- |rowspan="1"|"Suddenly" |rowspan="1"|"Best Music Video of the Year" | |- |rowspan="2"|2010 |rowspan="1"|"You Bad" (Wande Coal featuring D'banj) |"Hottest Single of the Year" | |- |rowspan="1"|"Fall in Love" (D'banj and Sesan) |"Best Male Music Video of the Year (Artist & Director)" | |- |rowspan="2"|2011 |rowspan="1"|"Mr Endowed" |"Hottest Single of the Year" | |- |rowspan="1"|D'banj |"Best Pop/R&B Artist of the Year" | |- |rowspan="2"|2012 |rowspan="1"|"Oliver Twist" |"Hottest Single of the Year" | |- |rowspan="1"|D'banj |"Best Entertainment Personality" | Nigeria Music Video Awards (NMVA) |- |rowspan="1"|2009 |rowspan="1|"Gbono Feli Feli" |"Best Afro Pop Video" | |- |rowspan="3"|2010 |rowspan="1"|"Mr Endowed" |"Best Afro Pop Video" | |- |rowspan="1"| D'banj for "Mr Endowed" |"Best Use of Effects" | |- |rowspan="1"|"Mr Endowed" |"Video of the Year" | |- |rowspan="1"|2011 |rowspan="1"|"Mr Endowed (Remix)" |"Best Afro Pop Video" | |- |rowspan="1"|2012 |"Tony Montana (Bad Pass) Remix" (Naeto C featuring D'banj) |"Best Afro Hip Hop" | |- |rowspan="3"|2013 |rowspan="3"|"Don't Tell Me Nonsense" |"Video of the Year" | |- |"Best Afro Pop Video" | |- |"Best Use of Choreography" |N/A Sound City Music Video Awards |- |rowspan="6"|2008 |rowspan="5"|"Move Your Body" |"Best Male Video" | |- |"Best R&B/Pop Video" | |- |"Best Special Effect/Editing" | |- |"Best Video" | |- |"Best Cinematography" | |- |rowspan="1"|"Booty Call" (Mo' Hits All Stars) |"Soundcity Fresh Video" | |- |rowspan="5"|2009 |"Suddenly" |"Soundcity Viewers Choice" | |- |"Ten Ten" (Mo' Hits All Stars) |"Best Special Effect Editing" | |- |"Pere" (Mo' Hits All Stars) |"Best Duo/Group Video" | |- |"Wind am Well" (Ikechukwu featuring D'banj) |"Best Collaboration in a Music Video" | |- |"Pere" (Mo' Hits All Stars) |"Best Video" | |- |rowspan="1"|2010 |"Mr Endowed" |"Soundcity Fresh Video" | |} World Music Awards |- |rowspan="5"|2014 |rowspan="3"|D'banj |"World’s Best Male Artist" | |- |"World's Best Entertainer of the Year" | |- |"Best-selling African Artist" | |- |rowspan="2"|"Oliver Twist" |"World’s Best Song" | |- |"World’s Best Video" | 4Syte TV Music Video Awards |- |rowspan="1"|2013 |"Don't Tell Me Nonsense" |rowspan="3"|"Best African Act Video" | |- |2012 |"Oliver Twist" | |- |2011 |"Mr Endowed Remix" | References D'banj D'banj
A breakthrough occurs when an offensive force has broken or penetrated an opponent's defensive line, and rapidly exploits the gap. Usually, large force is employed on a relatively small portion of the front to achieve this. While the line may have held for a long while prior to the breakthrough, the breakthrough happens suddenly when the pressure on the defender causes him to "snap". As the first defensive unit breaks, the adjacent units suffer adverse results from this (spreading panic, additional defensive angles, threat to supply lines). Since they were already pressured, this leads them to "snap" as well, causing a domino-like collapse of the defensive system. The defensive force thus evaporates at the breakthrough point, letting the attacker to rapidly move troops into the gap, exploiting the breakthrough in width (by attacking enemy units at the edge of the breakthrough, so widening it), in depth (advancing into enemy territory towards strategic objectives), or a combination of both. Terminology The OED records "break through" used in a military sense from the trench warfare times of 1915, when the Observer used the phrase in a headline. The Online Etymology Dictionary dates the metaphoric use of "breakthrough" - meaning "abrupt solution or progress" - from the 1930s, shortly after Joseph Stalin popularized the Russian equivalent () in a pep-piece on the "Great Breakthrough" published in November 1929, dense with military jargon and encouraging industrialization during the Soviet Union's first Five-Year Plan. See also Penetration (warfare) Breakout (military) References Sources Carl von Clausewitz, On War Heinz Guderian, Achtung, Panzer! Military strategy
```php <?php # Load captcha if(isset($use_captcha) && $use_captcha == true) { if(file_exists(__DIR__ . "/../lib/captcha/" . $captcha_class . ".php")) { $captcha_fullclass = "captcha\\$captcha_class"; require_once(__DIR__ . "/../lib/captcha/" . $captcha_class . ".php"); error_log("Captcha module $captcha_class successfully loaded"); # Inspect parameters of constructor $reflection = new ReflectionClass($captcha_fullclass); $constructorParams = $reflection->getConstructor()->getParameters(); # Gather parameters to pass to the class: all config params to pass $definedVariables = get_defined_vars(); # get all variables, including configuration $params = []; foreach ($constructorParams AS $param) { if(!isset($definedVariables[$param->name])) { error_log("Error: Missing param $param->name for $captcha_class"); exit(1); } array_push($params, $definedVariables[$param->name]); } $captchaInstance = new $captcha_fullclass(...$params); } else { error_log("Error: unable to load captcha class $captcha_class in " . __DIR__ . "/../lib/captcha/" . $captcha_class . ".php"); exit(1); } } ?> ```
Knights Must Fall is a 1949 Warner Bros. Merrie Melodies cartoon directed by Friz Freleng. The short was released on July 16, 1949, and stars Bugs Bunny. A spoof of the King Arthur mythology, the title is a pun on the 1937 film Night Must Fall. Plot Bugs (as a knave) stands in line with several knights, chewing a carrot. As Bugs finishes eating, he disposes of the carrot in the suit of "Sir Pantsalot of Drop Seat Manor" (a pun on Sir Lancelot), angering Pantsalot. After they exchange glove blows to each other (with Bugs using one of Pantsalot's gauntlets), the two agree to settle their feud with a joust. The joust begins with Pantsalot introduced to great fanfare, and Bugs being booed. Pantsalot beats Bugs back twice, and destroys Bugs' lance on his third attempt with his shield, earning Bugs the derision of the crowd ("Hey! That cast-iron palooka's making a chump outta me!"). Half-time is signaled with the entertainment consisting of a band playing music (used prior in Porky in Wackyland). The second half begins with Bugs and Pantsalot trading head blows until Bugs tickles Pantsalot using a pneumatic drill on Pantsalot's armor. Pantsalot responds by attempting to swing a cast-iron ball at Bugs, who uses a spring to cause the ball to recoil and slam Pantsalot in the head. Bugs then says, "It is to laugh!", and laughs. Then, Bugs tricks Pantsalot into opening his helmet and peeking out so that he can punch Pantsalot's head back in. Pantsalot chases Bugs into a rabbit hole on the field. Bugs comes up from an adjacent hole while Pantsalot looks for him. Bugs hits Pantsalot on the head again, angering Pantsalot, who smashes what he thinks is Bugs in armor. He finds Bugs hiding in his armor as Bugs evades yet another bat to the head (making Pantsalot strike his own head). Bugs applies a needle to Pantsalot 's posterior, causing him to jump and smash into an arch before smashing back into his armor. Bugs unscrews the helmet and remarks "Look at the new Dick Tracy character, Accordion Head!". Bugs is then chased into a manhole, and before Pantsalot can dive in, Bugs puts the lid on, causing yet another head blow to Pantsalot. Bugs, thinking he has won, prepares to leave ("I guess I'd better go phone Lady Windermere not to expect her spouse home for dinner"), but the knights, led by Pantsalot, reappear in formation to joust together against Bugs. Bugs calls a timeout and builds a glass and cast-iron steam case resembling a tank and a bomber in a nearby blacksmith shop to house himself, his pony, and lance. After emerging, Bugs and the knights charge to each other and end up (off-screen) crashing into each other, rattling the crowd. The cartoon ends with Bugs as "The Smiling Rabbit", selling all of the defeated knights' suits of armor and disposing of another carrot in what was Pantsalot's suit ("Ehh, So it shouldn't be a total loss."). Analysis While a parody of the Arthurian legend, the short avoids using familiar names. The ending, however, is clearly based on the final fight in A Connecticut Yankee in King Arthur's Court (1889), where the knights of England attack the protagonist en masse and fall to him. It is a battle between modern American technology and old English ways. The narrative of the film never explains the arrival of Bugs in this time period. He is simply there. Yet his American ways have a calamitous effect, as did those of Hank Morgan in the original novel, giving this short a dark side. The film also evokes images of the post-war era. For example, the pavilion of Bugs is a military surplus tent with the markings of the United States Army. There are references to Errol Flynn, popular films, Dick Tracy, speakeasies, and bombers. The jousting field is depicted as a typical sports field transferred to the Middle Ages. There is an announcer, a vendor selling programs, a football field, and references to baseball, boxing, and pool. There is also an appearance by a kazoo-using marching band at half-time. The first half of the joust consists of three passes each ending in defeat for Bugs. In the first two, he is sent flying into a wall. In the third and last, he shatters his lance. The second half of the joust is a free-for-all, making use of multiple weapons. In an opening sequence, the two combatants smash each other with clubs to the tune of I've Been Working on the Railroad. Sources References External links 1949 films 1949 short films 1949 animated films Short films directed by Friz Freleng Films set in the Middle Ages Films set in England Merrie Melodies short films Warner Bros. Cartoons animated short films Films scored by Carl Stalling Bugs Bunny films Films based on A Connecticut Yankee in King Arthur's Court 1940s Warner Bros. animated short films 1940s English-language films
```javascript /* ======================================================================== * Bootstrap: modal.js v3.2.0 * path_to_url#modals * ======================================================================== * ======================================================================== */ +function ($) { 'use strict'; // MODAL CLASS DEFINITION // ====================== var Modal = function (element, options) { this.options = options this.$body = $(document.body) this.$element = $(element) this.$backdrop = this.isShown = null this.scrollbarWidth = 0 if (this.options.remote) { this.$element .find('.modal-content') .load(this.options.remote, $.proxy(function () { this.$element.trigger('loaded.bs.modal') }, this)) } } Modal.VERSION = '3.2.0' Modal.DEFAULTS = { backdrop: true, keyboard: true, show: true } Modal.prototype.toggle = function (_relatedTarget) { return this.isShown ? this.hide() : this.show(_relatedTarget) } Modal.prototype.show = function (_relatedTarget) { var that = this var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) this.$element.trigger(e) if (this.isShown || e.isDefaultPrevented()) return this.isShown = true this.checkScrollbar() this.$body.addClass('modal-open') this.setScrollbar() this.escape() this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this)) this.backdrop(function () { var transition = $.support.transition && that.$element.hasClass('fade') if (!that.$element.parent().length) { that.$element.appendTo(that.$body) // don't move modals dom position } that.$element .show() .scrollTop(0) if (transition) { that.$element[0].offsetWidth // force reflow } that.$element .addClass('in') .attr('aria-hidden', false) that.enforceFocus() var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget }) transition ? that.$element.find('.modal-dialog') // wait for modal to slide in .one('bsTransitionEnd', function () { that.$element.trigger('focus').trigger(e) }) .emulateTransitionEnd(300) : that.$element.trigger('focus').trigger(e) }) } Modal.prototype.hide = function (e) { if (e) e.preventDefault() e = $.Event('hide.bs.modal') this.$element.trigger(e) if (!this.isShown || e.isDefaultPrevented()) return this.isShown = false this.$body.removeClass('modal-open') this.resetScrollbar() this.escape() $(document).off('focusin.bs.modal') this.$element .removeClass('in') .attr('aria-hidden', true) .off('click.dismiss.bs.modal') $.support.transition && this.$element.hasClass('fade') ? this.$element .one('bsTransitionEnd', $.proxy(this.hideModal, this)) .emulateTransitionEnd(300) : this.hideModal() } Modal.prototype.enforceFocus = function () { $(document) .off('focusin.bs.modal') // guard against infinite focus loop .on('focusin.bs.modal', $.proxy(function (e) { if (this.$element[0] !== e.target && !this.$element.has(e.target).length) { this.$element.trigger('focus') } }, this)) } Modal.prototype.escape = function () { if (this.isShown && this.options.keyboard) { this.$element.on('keyup.dismiss.bs.modal', $.proxy(function (e) { e.which == 27 && this.hide() }, this)) } else if (!this.isShown) { this.$element.off('keyup.dismiss.bs.modal') } } Modal.prototype.hideModal = function () { var that = this this.$element.hide() this.backdrop(function () { that.$element.trigger('hidden.bs.modal') }) } Modal.prototype.removeBackdrop = function () { this.$backdrop && this.$backdrop.remove() this.$backdrop = null } Modal.prototype.backdrop = function (callback) { var that = this var animate = this.$element.hasClass('fade') ? 'fade' : '' if (this.isShown && this.options.backdrop) { var doAnimate = $.support.transition && animate this.$backdrop = $('<div class="modal-backdrop ' + animate + '" />') .appendTo(this.$body) this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) { if (e.target !== e.currentTarget) return this.options.backdrop == 'static' ? this.$element[0].focus.call(this.$element[0]) : this.hide.call(this) }, this)) if (doAnimate) this.$backdrop[0].offsetWidth // force reflow this.$backdrop.addClass('in') if (!callback) return doAnimate ? this.$backdrop .one('bsTransitionEnd', callback) .emulateTransitionEnd(150) : callback() } else if (!this.isShown && this.$backdrop) { this.$backdrop.removeClass('in') var callbackRemove = function () { that.removeBackdrop() callback && callback() } $.support.transition && this.$element.hasClass('fade') ? this.$backdrop .one('bsTransitionEnd', callbackRemove) .emulateTransitionEnd(150) : callbackRemove() } else if (callback) { callback() } } Modal.prototype.checkScrollbar = function () { if (document.body.clientWidth >= window.innerWidth) return this.scrollbarWidth = this.scrollbarWidth || this.measureScrollbar() } Modal.prototype.setScrollbar = function () { var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) if (this.scrollbarWidth) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) } Modal.prototype.resetScrollbar = function () { this.$body.css('padding-right', '') } Modal.prototype.measureScrollbar = function () { // thx walsh var scrollDiv = document.createElement('div') scrollDiv.className = 'modal-scrollbar-measure' this.$body.append(scrollDiv) var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth this.$body[0].removeChild(scrollDiv) return scrollbarWidth } // MODAL PLUGIN DEFINITION // ======================= function Plugin(option, _relatedTarget) { return this.each(function () { var $this = $(this) var data = $this.data('bs.modal') var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) if (!data) $this.data('bs.modal', (data = new Modal(this, options))) if (typeof option == 'string') data[option](_relatedTarget) else if (options.show) data.show(_relatedTarget) }) } var old = $.fn.modal $.fn.modal = Plugin $.fn.modal.Constructor = Modal // MODAL NO CONFLICT // ================= $.fn.modal.noConflict = function () { $.fn.modal = old return this } // MODAL DATA-API // ============== $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { var $this = $(this) var href = $this.attr('href') var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) if ($this.is('a')) e.preventDefault() $target.one('show.bs.modal', function (showEvent) { if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown $target.one('hidden.bs.modal', function () { $this.is(':visible') && $this.trigger('focus') }) }) Plugin.call($target, option, this) }) }(jQuery); ```
```yaml name: angel_validate version: 3.0.0-alpha.1 description: Strongly-typed form handlers and validators for the Angel framework. author: Tobe O <thosakwe@gmail.com> homepage: path_to_url environment: sdk: ">=2.0.0 <3.0.0" dependencies: angel_framework: ^2.0.0 duration: ^2.0.0 html_builder: ^1.0.0 http_parser: ^3.0.0 image: ^2.0.0 matcher: ^0.12.5 dev_dependencies: angel_orm: ^2.1.0-beta angel_orm_generator: ^2.1.0-beta angel_serialize: ^2.0.0 angel_serialize_generator: ^2.0.0 build_runner: ^1.0.0 pedantic: ^1.0.0 pretty_logging: ^1.0.0 test: ^1.0.0 ```
The Royal Order of King George Tupou I is a knighthood order of the Kingdom of Tonga. History The Order was established between 1876 and 1890 by King George Tupou I as a general reward for meritorious services to the kingdom. Classes The Order consists of three classes: Knight Grand Cross - Star & badge from a sash Knight Commander - Star, badge from a necklet ribbon Commander - Breast badge from a ribbon The post-nominal letters of the three classes (from highest to lowest) are respectively K.G.C.G.T., K.C.G.T. and C.G.T. In 2008 King George Tupou V declared the classes of Knight Commander and Commander obsolete. Insignia Knight Grand Cross The Star is a 90mm 8-pointed silver, silver-gilt & enamel faceted Star with four silver Tongan crowns on the main points, (in the North, South, East & West). The white enamel central medallion has the national coat of arms in the centre, the red riband has the gold capital letters KOE 'OTUA MO TOGA KO HOKU TOFi'A. (the wrongly spelt motto due to a manufacturing error – "God and Tonga Are My Inheritance"), there is a small 5-pointed gold star in the base of the riband (reversed, i.e. one point downwards). The ribbon is a 102mm red moirΓ© sash is with two white stripes (18/22.5/21/22.5/18mm) Knight Commander The star (approx. 90mm) is a 7-pointed silver, silver-gilt & enamel faceted Star (with one point downwards), on top of the riband is a gilt crown. The central medallion & riband are as above. The necklet badge (approx 60mm without the crown) is a silver, silver-gilt & enamel 6-pointed faceted star with two points upwards, suspended by a silver-gilt Tongan crown. The central medallion & riband are as above. The necklet ribbon is approx. 41mm wide, red with three white stripes (2.5/6/9/6/9/6/2.5mm) Companion The breast badge is a similar faceted star as the Knight Commander (above) except that the white central medallion has the shield from the coat of arms in the centre. The breast ribbon is approx 38mm wide, red with two white stripes (6/8/10/8/6mm) References Orders, decorations, and medals of Tonga
The following article presents a summary of the 1909 football (soccer) season in Brazil, which was the 8th season of competitive football in the country. Campeonato Paulista Final Standings Championship Playoff AA das Palmeiras declared as the Campeonato Paulista champions. State championship champions References Brazilian competitions at RSSSF Seasons in Brazilian football Brazil
```java // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // // path_to_url // // Unless required by applicable law or agreed to in writing, // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // specific language governing permissions and limitations package org.apache.kudu.client; import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.kudu.test.ClientTestUtil.createDefaultTable; import static org.apache.kudu.test.ClientTestUtil.loadDefaultTable; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.Security; import java.util.List; import java.util.Set; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.CharStreams; import org.junit.Rule; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.kudu.test.KuduTestHarness; import org.apache.kudu.test.KuduTestHarness.MasterServerConfig; import org.apache.kudu.test.KuduTestHarness.TabletServerConfig; import org.apache.kudu.test.TempDirUtils; import org.apache.kudu.test.cluster.KuduBinaryLocator; import org.apache.kudu.test.cluster.MiniKuduCluster; import org.apache.kudu.test.cluster.MiniKuduCluster.MiniKuduClusterBuilder; // This is a class for Kudu RPC connection negotiation test scenarios targeting // TLSv1.3. See TestNegotiator for pre-TLSv1.3 test scenarios. public class TestNegotiationTLSv13 { static final String[] TLS13_CIPHERS = new String[]{ "TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384", "TLS_CHACHA20_POLY1305_SHA256", }; private static final Logger LOG = LoggerFactory.getLogger(TestNegotiation.class); private static final String TABLE_NAME = "tls_v_1_3_test_table"; private static final int NUM_ROWS = 10; private final MiniKuduClusterBuilder clusterBuilder; @Rule public KuduTestHarness harness; // Whether TLSv1.3 supported by both server and client side. private boolean isTLSv13Supported = false; // Check if TLSv1.3 is supported by the JVM. private static boolean isTLSv13SupportedByJVM() { // It seems some policy-related globals are initialized due to the // SSLContext.getInstance("TLSv1.3") call below, so server certificates // signed by 768-bit RSA keys aren't accepted later on when running test // scenarios due to default security policies. To work around that, override // the default security constraints the same way it's done // in the MiniKuduCluster's constructor. Security.setProperty("jdk.certpath.disabledAlgorithms", "MD2, RC4, MD5"); Security.setProperty("jdk.tls.disabledAlgorithms", "SSLv3, RC4, MD5"); try { SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(null, null, null); SSLEngine engine = ctx.createSSLEngine(); engine.setUseClientMode(true); { Set<String> supported = Sets.newHashSet(engine.getSupportedCipherSuites()); List<String> common = Lists.newArrayList(); for (String c : TLS13_CIPHERS) { if (supported.contains(c)) { common.add(c); } } if (common.isEmpty()) { LOG.info("client side doesn't support TLSv1.3: no common ciphers"); return false; } } { String[] enabled = engine.getEnabledProtocols(); LOG.debug("enabled TLS protocols: {}", Joiner.on(' ').join(enabled)); Set<String> supported = Sets.newHashSet(engine.getSupportedProtocols()); LOG.debug("supported TLS protocols: {}", Joiner.on(' ').join(supported)); if (!supported.contains("TLSv1.3")) { LOG.info("client side doesn't support TLSv1.3: unsupported protocol"); return false; } } } catch (KeyManagementException | NoSuchAlgorithmException e) { LOG.info("client side doesn't support TLSv1.3", e); return false; } return true; } // Check if TLSv1.3 is supported by the Kudu server side. private static boolean isTLSv13SupportedByServerSide() { // Try to start kudu-master requiring TLSv1.3. It will fail to start if // TLSv1.3 isn't supported either by the node's OpenSSL library or // by the build environment where the kudu-master binary was built. MiniKuduClusterBuilder b = new MiniKuduClusterBuilder() .numMasterServers(1) .numTabletServers(0) .addMasterServerFlag("--time_source=system_unsync") .addMasterServerFlag("--rpc_tls_min_protocol=TLSv1.3"); try (MiniKuduCluster c = b.build()) { try { // A sanity check: make sure the started processes haven't crashed. // MiniKuduCluster does neither detect nor report properly on such // events otherwise. c.killAllMasterServers(); } catch (IOException e) { LOG.error("unexpected exception:", e); fail("kudu-master didn't actually start"); return false; // unreachable } } catch (IOException e) { LOG.info("server side doesn't support TLSv1.3", e); return false; } return true; } public TestNegotiationTLSv13() { clusterBuilder = new MiniKuduClusterBuilder() .numMasterServers(1) .numTabletServers(3) .enableKerberos(); isTLSv13Supported = isTLSv13SupportedByJVM() && isTLSv13SupportedByServerSide(); if (isTLSv13Supported) { // By the virtue of excluding all other protocols but TLSv1.3 // from the list of available TLS protocols at the server side, // client and server will use TLSv1.3 to negotiate a connection. clusterBuilder.addMasterServerFlag("--rpc_tls_min_protocol=TLSv1.3"); clusterBuilder.addTabletServerFlag("--rpc_tls_min_protocol=TLSv1.3"); } harness = new KuduTestHarness(clusterBuilder); } /** * Make sure that Kudu Java client is able to negotiate RPC connections * protected by TLSv1.3 with Kudu servers. By the virtue of excluding all * other protocols but TLSv1.3 from the list of available TLS protocols * at the server side, this scenario verifies that Kudu Java client is able to * work with a secure Kudu cluster using TLSv1.3. * * Using the JUnit's terminology, this test scenario is conditionally run only * if both the client and the server sides support TLSv1.3. */ @Test @MasterServerConfig(flags = { "--rpc-encryption=required", "--rpc_encrypt_loopback_connections", "--rpc-trace-negotiation", }) @TabletServerConfig(flags = { "--rpc-encryption=required", "--rpc_encrypt_loopback_connections", "--rpc-trace-negotiation", }) public void connectionNegotiation() throws Exception { assumeTrue("TLSv1.3 isn't supported by both sides", isTLSv13Supported); // Make sure Java client is able to communicate with Kudu masters and tablet // servers: create a table and write several rows into the table. { KuduClient c = harness.getClient(); createDefaultTable(c, TABLE_NAME); loadDefaultTable(c, TABLE_NAME, NUM_ROWS); } // An extra sanity check: on successful negotiation the connection should be // considered 'private' once it's protected by TLS, so Kudu master must send // the client an authn token. { AsyncKuduClient c = harness.getAsyncClient(); SecurityContext ctx = c.securityContext; assertNotNull(ctx.getAuthenticationToken()); } } } ```
Ε aΕ‘kinovci () is a village in the municipality of GradiΕ‘ka, Republika Srpska, Bosnia and Herzegovina. References Populated places in GradiΕ‘ka, Bosnia and Herzegovina
```javascript Infix operators are left-associative Filtering items out of an array Avoid using `with` Extra function arguments are undefined by default Detect an error type ```
Ambivalent prejudice is a social psychological theory that states that, when people become aware that they have conflicting beliefs about an outgroup (a group of people that do not belong to an individual's own group), they experience an unpleasant mental feeling generally referred to as cognitive dissonance. These feelings are brought about because the individual on one hand believes in humanitarian virtues such as helping those in need, but on the other hand also believes in individualistic virtues such as working hard to improve one's life. Bernard Whitley and Mary Kite contend that this dissonance motivates people to alter their thoughts in an attempt to reduce their discomfort. Depending on the situation or context that has primed them, people will give priority to either the positive beliefs or the negative beliefs, leading to a corresponding behavioral shift known as response amplification. Theoretical framework According to Susan Fiske, there are two underlying characteristics of stigmatized groups around the world: the ideas that status predicts perceived competence and that cooperation predicts perceived warmth. Two combinations of competence and warmth produce ambivalent prejudices. The combined perception of groups as warm but incompetent leads to pitied groups, such as traditional women or older people. The combined perception of groups as competent but cold leads to envied groups, such as nontraditional women or minority entrepreneurs. Fiske uses this conception of prejudice to explain ambivalent sexism, heterosexism, racism, anti-immigrant biases, ageism, and classism. Views According to Whitley and Kite, ambivalent prejudice comes from one person having both good and bad thoughts about an outgroup. The example in their book The Psychology of Prejudice and Discrimination talks about race and how some people often have ambivalent attitudes towards people of other races. This means that their behavior is also ambivalent: "sometimes it is positive, sometimes negative." Irwin Katz said that ambivalent prejudice occurs when only the individual becomes aware of the conflicting attitudes, which can be caused for most people simply by coming face to face with someone from the outgroup. According to Katz, that conflict of attitudes can cause problems with one's self-image because it seems as if one is not living up to all important values that one holds. The conflict can cause negative emotions, which are expressed in negative behavior. Irwin Katz and Glen Hass (1988) believed that contradicting American values are to blame for ambivalent prejudice. The first value is that hard work will always pay off and people get what they deserve, but the other value is that all people are equal and that people should help the less fortunate. When that is applied to race, many people are torn. They see disadvantaged people of other races as not working hard enough to be worth as much as people of their own race, but they also understand that people of other race have a harder time financially and socially. Those mixed emotions lead to ambivalence. Tara MacDonald and Mark Zanna suggested that stereotypes were to blame for ambivalent prejudice. According to MacDonald and Zanna, people can like others and respect others, and both emotions work independently of each other. When a person feels those things towards an entire group, it is because of stereotypes. Therefore, a person can like and disrespect people of other races because of certain stereotypes, or they can dislike but respect the same group of people for other stereotypes. In a study testing the nature of ambivalent prejudice, Hisako Matsuo and Kevin McIntyre (2005) studied American attitudes toward immigrant groups. He proposed that ambivalent prejudice stems from two views. There is the individualistic attitude that values the Protestant work ethic, an attitude that is associated with more negative attitudes toward outgroups. The other view is an egalitarian or humanitarian one, which is associated with more positive attitudes toward outgroups. Measures Researchers use a variety of methods to measure ambivalent prejudice. The most widely used method is the Ambivalent Sexism Inventory (ASI) for sexism created by Glick and Fiske in 1996. Typical of all ingroup-outgroup relations, one group (men) has a much greater societal status because to male ambivalence has three sources: paternalism, gender differentiation, and heterosexuality. The assessment measures an individual's endorsement of ambivalent sexism, a theory of that postulates that male ambivalence has three sources: paternalism, gender differentiation, and heterosexuality. Women who resist traditional gender roles are punished by hostile sexism which resembles old-fashioned sexism. The theory predicts resentment of nontraditional women along each dimension: dominative paternalism, competitive gender differentiation, and heterosexual hostility. Conversely, women who co-operate with traditional gender roles and relationships evoke benevolent sexism, which comprises protective paternalism, complementary gender differentiation, and heterosexual intimacy. The ASI measures sexism along all of the six dimensions that compose hostile sexism and benevolent sexism. The ASI is a self-report measure composed of 22 items, 11 for each subscale: hostile sexism and benevolent sexism. Both subscales can be either calculated separately or averaged together to get an overall measure of sexism. The assessment consist of a series of statements with which respondents indicate their level of agreement on a 6-point Likert scale in which 0 means disagree strongly and 5 means agree strongly. Certain items are reversed coded so that agreement with the statement indicates lower levels of sexism and disagreement with the statement indicates higher levels of sexism. Example items from the ASI include: Below is a series of statements concerning men and women and their relationships in contemporary society that this study wrote for their subjects to evaluate. Benevolent sexism subset: People are often truly happy in life without being romantically involved with a member of the other sex. No matter how accomplished he is, a man is not truly complete as a person unless he has the love of a woman. Men are complete without women. Every man ought to have a woman whom he adores. Women should be cherished and protected by men. Women, as compared to men, tend to have a more refined sense of culture and good taste. Women, compared to men, tend to have superior moral sensibility. Many women have a quality of purity that few men possess. Hostile sexism subset: Women exaggerate problems they have at work. Most women interpret innocent remarks or acts as being sexist. Women are too easily offended. Most women fail to appreciate fully all that men do for them. Feminists are not seeking for women to have more power than men. There are actually very few women who get a kick out of teasing men by seeming sexually available and then refusing male advances. Researchers use various other methods to measure different types of ambivalent prejudices. For example, the Modern Racism Scale measures aspects of ambivalent racism. Applications Sexism Ambivalent sexism reflects the duality of hostility towards women and the tendency for women to be rated more positively than men in surveys. Hostile sexism impacts those perceived as nontraditional women who threaten male power, for example, female professionals and intellectuals, feminists, and political lesbians. Conversely, benevolent sexism protects women who are perceived as adhering to traditional gender roles, such as housewives and secretaries. Fiske asserts that these two forms of sexism comprise ambivalence. On the one hand, women are viewed as competent but not warm, while on the other hand, they are viewed as warm but incompetent. In the workplace, nontraditional women tend to suffer from hostile sexism since they are viewed as competitors. As benevolent sexism includes perceived obligations of protection and help, it leads to women being viewed as less worthy of hiring, training, and promoting due to the concern of them being less able to effectively manage both personal and professional. Fiske contends that when addressing bias against women, both demeaning benevolence and dangerous hostility must be account for. Racism Ambivalent racism depicts two contrasting reactions by whites toward blacks. These competing evaluations include hostile (antiblack) sentiments and subjectively sympathetic but paternalistic (problack) sentiments. Problack attitudes attribute black disadvantage to larger social structures and factors including discrimination, segregation, and lack of opportunities. In contrast, hostile antiblack racism, like old-fashioned racism asserts that "black people are unambitious, disorganized, free-riding, and do not value education." Fiske states that "black Americans are viewed ambivalently mainly to the extent that white Americans simultaneously harbor a more subjectively positive and a more hostile attitude, which can flip from one polarity to the other, depending on individual differences in beliefs and on situational cues." Ableism SΓΆder suggests that people do not have fixed cognitive assumptions or emotions about people with disabilities. Rather, people are ambivalent, so their behavior in any given situation will depend on the context. People have two contrasting ideas about people with disabilities; people devalue disabilities while maintaining a benevolent sympathy towards disabled people. This leads to a conflict between basic values held by wider society and moral dilemmas in concrete daily interactions with people with disabilities. SΓΆder proposes an ambivalence model as a better method for evaluating interactions with and attitudes about disabled people as it better captures the totality of people's sentiments. Xenophobia Matsuo and McIntyre applied the concept of ambivalent prejudice to immigrants and refugees. They described attitudes toward immigrants and refugees as ambivalent since on the one hand they are perceived "sympathetically, as disadvantaged, and deserving of justice", but on the other hand, they are seen as "more likely to be involved in crime and a burden on the public system." Matsuo and McIntyre used a sample survey of college students to test egalitarianism and the Protestant work ethic (PWE) and how it relates to perceptions of refugees. Participants completed survey questions regarding social contact, attitudes toward specific ethnic groups, general attitudes toward refugees, and the Humanitarianism/Protestant Work Ethic Scale. They found that the ambivalent attitudes toward refugees is based on the "dual maintenance of American values", egalitarianism and PWE. In testing the contact theory, they found that only when contact is personal and cooperative does prejudice decrease. Response amplification In order to reduce the negative feelings brought about by cognitive dissonance, people may engage in response amplification. Response amplification is defined by engaging in a more extreme response to a stigmatized individual in comparison to a similar but non-stigmatized individual than the situation calls for. This can include overdoing both positive responses and negative responses depending on whether the situation calls for a positive or negative response. For example, whites' evaluations of blacks who are presented positively or negatively tend to be more extreme than evaluations of similar white individuals. Hass et al. (1991) had white students participate in an experiment in which each of them worked with either a white or black confederate to complete a task. The confederate, as instructed by the experimenter, caused the failure or the successful achievement of the task. After the task, the white students rated the confederate's performance. Those who scored higher in ambivalence rated the black confederate more positively in the success condition but more negatively in the failure condition than the white confederate. David Bell and Victoria Esses (2002) conducted a study indicated that response amplification occurs only when one believes that the ambivalent response is problematic. When ambivalent white Canadian students were given essays that emphasized the positivity or negativity of ambivalence (considering both the good and bad in a situation or person), only those in the negative condition engaged in response amplification. In addition to racial contexts, response amplification has been found in multiple contexts including in cases of able bodied people interacting with disabled individuals, women and men rating members of the opposite sex, and ratings of female feminists. Mitigation Leippe and Eisenstadt found that dissonance mediated changed may be more successful when an internal conflict already exists, that is, when individuals possess cognitive dissonance that can be a result of ambivalence. In three experiments, whites were encouraged to write essays regarding scholarship policies that would favor blacks. Writing the essay led to a more positive perception of the policy, as well as, in some cases, more positive attitudes towards blacks in general. Ambivalent people were more likely to comply with writing a positive essay than non ambivalent people. As a result of writing the essay, participants felt cognitive dissonance which led them to engage in a sort of cognitive restructuring to further reduce the dissonance. This meant engaging in more extended thinking that led to more positive beliefs about Blacks in general as well as about the specific policy. By inducing compliance in writing, they were able to induce a change in attitudes toward the target group. Fiske suggests several methods to mitigate ambivalent prejudice particularly in the context of business management. These methods mainly involve an increased awareness and recognition of the different types of prejudice. She states that not all prejudices are alike, but they do create predictable groups of stereotypes, emotional prejudices, and discriminatory tendencies. When working to counteract prejudice, the focus should be on the most stereotypically negative aspect for a group, for example, competence for older people. In addition, constructive contact, that involving cooperation and equal status in the setting, for example, between groups improves emotional intelligence. See also Benevolent prejudice Hostile prejudice Ingroups and outgroups Role congruity theory Women are wonderful Aversive racism Tokenism Allosemitism References Prejudices
Hermann Kriebel (20 January 1876 in Germersheim – 16 February 1941 in Munich) was a lieutenant colonel and former Bavarian staff officer. Life He fought with the Freikorps during the German Revolution of 1918–19. As a member of the German 1919 Armistice delegation, his parting words were "See you again in 20 years." In 1923 became the military leader of the Kampfbund, the league of nationalist and fighting societies that included Adolf Hitler's Nazi party and SA; the Oberland League; and Ernst RΓΆhm's Reichskriegflagge. Kriebel was, with Hitler and Erich Ludendorff, the key figure in the 8–9 November 1923 Beer Hall Putsch and was convicted with Hitler in 1924, serving his sentence at Landsberg Prison. In 1929, he arrived in China to work as an arms dealer and an adviser to the Kuomintang government of Chiang Kai-shek. Besides fighting the Chinese Communists, the Kuomintang regime was at the time fighting the armies of Chinese warlords, namely General Feng Yuxiang in the north and the Guangxi clique of General Bai Chongxi and Li Zongren in the south. Accordingly, as China had hardly any arms manufacturing factories of its own at the time, arms had to be imported. Kriebel found that the demand for arms in China was enormous, making the work of an arms dealer very profitable. After his release from prison, he maintained his ties with the Nazi Party and the Oberland League. He became the German consul general in Shanghai. References Books and articles External links 1876 births 1941 deaths 20th-century Freikorps personnel People from Germersheim National Socialist Freedom Movement politicians Collaborators who participated in the Beer Hall Putsch German diplomats German Army personnel of World War I Military personnel of Bavaria Nazi Party officials Members of the Reichstag of the Weimar Republic Members of the Reichstag of Nazi Germany People convicted of treason against Germany Consuls in Shanghai
```go /* path_to_url Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package v1beta1 import ( "k8s.io/api/extensions/v1beta1" "k8s.io/apimachinery/pkg/api/meta" "k8s.io/apimachinery/pkg/runtime/schema" ) // The ScaleExpansion interface allows manually adding extra methods to the ScaleInterface. type ScaleExpansion interface { Get(kind string, name string) (*v1beta1.Scale, error) Update(kind string, scale *v1beta1.Scale) (*v1beta1.Scale, error) } // Get takes the reference to scale subresource and returns the subresource or error, if one occurs. func (c *scales) Get(kind string, name string) (result *v1beta1.Scale, err error) { result = &v1beta1.Scale{} // TODO this method needs to take a proper unambiguous kind fullyQualifiedKind := schema.GroupVersionKind{Kind: kind} resource, _ := meta.UnsafeGuessKindToResource(fullyQualifiedKind) err = c.client.Get(). Namespace(c.ns). Resource(resource.Resource). Name(name). SubResource("scale"). Do(). Into(result) return } func (c *scales) Update(kind string, scale *v1beta1.Scale) (result *v1beta1.Scale, err error) { result = &v1beta1.Scale{} // TODO this method needs to take a proper unambiguous kind fullyQualifiedKind := schema.GroupVersionKind{Kind: kind} resource, _ := meta.UnsafeGuessKindToResource(fullyQualifiedKind) err = c.client.Put(). Namespace(scale.Namespace). Resource(resource.Resource). Name(scale.Name). SubResource("scale"). Body(scale). Do(). Into(result) return } ```
The Obelisk at Slottsbacken is an obelisk monument adjacent to the Royal Palace on Slottsbacken in Old Town, Stockholm, Sweden and is considered to be the very centre point of the Swedish capital city. Unveiled in 1800, it commemorates the deeds of Stockholm's citizenry during the Russo-Swedish War. In 2017, the original obelisk was dismantled due to age- and weather-related damage and was rebuilt, using newly quarried stone, in spring 2020. Physical description The original stone obelisk was nearly high, including the pedestal of . It weighed 150 tons and was made up of 17 different pieces of granite, believed to have been quarried in nearby Ulfsunda. The new obelisk is also high, but weighs 280 tons, having been constructed of solid stone rather than stone drums like the original. From the Obelisk all street numbers in Stockholm have their common origin - there are only a handful of exceptions, with some small streets originating from the street Birger Jarlsgatan. History The obelisk was commissioned by King Gustav III to show his gratitude to the burghers of Stockholm who guarded the city while the king was leading the Swedish Navy and Army in the Russian War in 1788-1790. The neo-Egyptian design of the obelisk was made by the artist Louis Jean Desprez and it was erected by the inventor and colonel-mecanicus Jonas LidstrΓΆmer in 1800. The construction was at the time considered to be complicated, since the obelisk is made of many heavy stone boulders, and not cut from one piece as was typically done in classical antiquity. Gustav III died before the monument was finished, and in October 1800, King Gustav IV Adolf unveiled the obelisk. 2017–2020 replacement By 2012, the obelisk was showing signs of significant deterioration and the area around the monument was fenced off to protect the public from possible falling stones. In 2017, the obelisk was dismantled and removed for repair and restoration work. The repairs were initially expected to take about a year, but it was later determined that the stones which make up the monument were so damaged they could not be repaired. It was decided to quarry new Bohus granite stones with similar technical and aesthetic properties from an area near Hunnebostrand. The shaft of the obelisk was rebuilt beginning in April 2020, with the final top piece installed 17 June 2020. The new obelisk was processed by the stonemason Ted Zaar at the company Zaarstone in Vilshult. The granite - Tossene GrΓ₯ Bohus - was delivered from one of Hallindens Granit's quarries. References and notes See also List of streets and squares in Gamla stan History of Stockholm Buildings and structures in Stockholm Landmarks in Sweden Obelisks
Daniels v Campbell NO and Others, an important case in South African law, was heard in the Constitutional Court on 6 November 2003, with judgment handed down on 11 March 2004. The applicant was a woman married in terms of Muslim rites, whose husband had died intestate. The court noted that Muslim marriages were not recognised in South African law. It concluded that this violated section 9 of the Constitution. Accordingly, it was held that the applicant could inherit. The ambit of this judgment was restricted to de facto monogamous Muslim marriages; it was extended to polygamous Muslim marriages in Hassam v Jacobs. In this Context the word "spouses" was questioned Facts An application was made for confirmation of an order of the Cape High Court which declared invalid and unconstitutional certain provisions of the Maintenance Act and the Intestate Succession Act for their failure to recognise as "spouses" persons married according to Muslim rites, and therefore to allow partners in Muslim marriages to benefit from their protections, which include the provision of relief to widows to ensure that they receive at least a child's share of their husbands' estates. The Muslim wife in this case was going to lose a house she owned which was registered in her deceased husband's name. In terms of the Maintenance of Surviving Spouses Act, certain benefits are conferred on "spouses", a term which did not include spouses in a de facto monogamous Muslim marriage. It is important to note that the question before the court was not whether Muslim marriage is lawful under the Marriage Act. Muslim marriages have not yet been expressly recognised in South African law, although there is a Draft Muslim Marriages Bill. Judgment Albie Sachs held that the word "spouse," in its ordinary meaning, should include parties to a Muslim marriage, because this corresponds to the way the word is generally understood and used, and because it would be far more awkward from a linguistic point of view to exclude Muslim partners than to include them. The historic exclusion in South Africa flowed not from the courts' giving the word its ordinary meaning but from a linguistically-strained usage and from cultural and racial prejudices. Both the intent and the impact of the restrictive interpretation were discriminatory. The words "spouse" and "survivor" as used in the Acts would henceforth apply to partners to monogamous Muslim marriages. The court intentionally did not deal with the question of polygamous Muslim marriages. See also Amod v MMVF Hassam v Jacobs Ismail v Ismail Kahn v Kahn South African family law Women's Legal Centre Trust v President of the Republic of South Africa References Cases Daniels v Campbell NO and Others 2004 (5) SA 331 (CC). Statutes Intestate Succession Act 81 of 1987. Maintenance of Surviving Spouses Act 27 of 1990. Marriage Act 25 of 1961. Notes Constitutional Court of South Africa cases 2004 in South African case law South African family case law Law of succession in South Africa
Protogamasellopsis leptosomae is a species of mite in the family Rhodacaridae. References Rhodacaridae Articles created by Qbugbot Animals described in 1994
```swift // // ViewControllerProgressProcess.swift // RsyncOSXver30 // // Created by Thomas Evensen on 24/08/2016. // import Cocoa // Protocol for progress indicator protocol Count: AnyObject { func maxCount() -> Int func inprogressCount() -> Int } class ViewControllerProgressProcess: NSViewController, SetConfigurations, SetDismisser, Abort { var count: Double = 0 var maxcount: Double = 0 weak var countDelegate: Count? @IBOutlet var abort: NSButton! @IBOutlet var progress: NSProgressIndicator! @IBAction func abort(_: NSButton) { switch countDelegate { case is ViewControllerSnapshots: dismissview(viewcontroller: self, vcontroller: .vcsnapshot) case is ViewControllerRestore: dismissview(viewcontroller: self, vcontroller: .vcrestore) default: dismissview(viewcontroller: self, vcontroller: .vctabmain) } abort() } override func viewDidAppear() { super.viewDidAppear() SharedReference.shared.setvcref(viewcontroller: .vcprogressview, nsviewcontroller: self) if (presentingViewController as? ViewControllerMain) != nil { if let pvc = (presentingViewController as? ViewControllerMain)?.singletask { countDelegate = pvc } } else if (presentingViewController as? ViewControllerRestore) != nil { countDelegate = SharedReference.shared.getvcref(viewcontroller: .vcrestore) as? ViewControllerRestore } else if (presentingViewController as? ViewControllerSnapshots) != nil { countDelegate = SharedReference.shared.getvcref(viewcontroller: .vcsnapshot) as? ViewControllerSnapshots } initiateProgressbar() abort.isEnabled = true } override func viewWillDisappear() { super.viewWillDisappear() stopProgressbar() SharedReference.shared.setvcref(viewcontroller: .vcprogressview, nsviewcontroller: nil) } private func stopProgressbar() { progress.stopAnimation(self) } // Progress bars private func initiateProgressbar() { if (presentingViewController as? ViewControllerSnapshots) != nil { progress.maxValue = Double(countDelegate?.maxCount() ?? 0) } else { progress.maxValue = Double((countDelegate?.maxCount() ?? 0) + SharedReference.shared.extralines) } progress.minValue = 0 progress.doubleValue = 0 progress.startAnimation(self) } private func updateProgressbar(_ value: Double) { progress.doubleValue = value } } extension ViewControllerProgressProcess: UpdateProgress { func processTermination() { stopProgressbar() switch countDelegate { case is ViewControllerMain: dismissview(viewcontroller: self, vcontroller: .vctabmain) case is ViewControllerSnapshots: dismissview(viewcontroller: self, vcontroller: .vcsnapshot) case is ViewControllerRestore: dismissview(viewcontroller: self, vcontroller: .vcrestore) default: dismissview(viewcontroller: self, vcontroller: .vctabmain) } } func fileHandler() { updateProgressbar(Double(countDelegate?.inprogressCount() ?? 0)) } } ```
Opportunity Village is an organized homeless community in Eugene, Oregon. It is supported by the non-profit organization Opportunity Village Eugene (OVE). History The village began in the wake of the Occupy movement in Eugene. After the Occupy encampments had been disassembled, mayor Kitty Piercy assembled a task force to address the housing crisis in the city. In 2013, the non-profit organization Opportunity Village Eugene (OVE) was founded with the goal of addressing the issue of affordable housing. That August, the Eugene City Council voted to donate land to OVE and approved the construction of a village. Opportunity Village opened in May 2014. It was funded by $100,000 in private donations, and labor and materials were also donated. Each of the 30 houses was constructed for $3,300. Housing consists of small bungalows and Conestoga huts. Organization The village is located on a gated, city-owned lot. It has a capacity for over 30 residents and residents share maintenance and cleaning duties. There is no time limit for how long residents can stay in the village, but they are expected to transition to permanent housing. Each structure is 80 square feet in size. Individual homes are not hooked up to electricity or plumbing, and residents share communal cooking, gathering, and sanitary spaces. References Further reading Homelessness in Oregon Populated places established in 2014
```go // // // path_to_url // // Unless required by applicable law or agreed to in writing, software // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. package server import ( "bytes" "context" "fmt" "io" "path" "runtime" "runtime/trace" "strconv" "strings" "sync" "sync/atomic" "time" "github.com/pingcap/errors" "github.com/pingcap/failpoint" "github.com/pingcap/kvproto/pkg/metapb" "github.com/pingcap/kvproto/pkg/pdpb" "github.com/pingcap/kvproto/pkg/schedulingpb" "github.com/pingcap/kvproto/pkg/tsopb" "github.com/pingcap/log" "github.com/tikv/pd/pkg/core" "github.com/tikv/pd/pkg/errs" "github.com/tikv/pd/pkg/mcs/utils/constant" "github.com/tikv/pd/pkg/storage/endpoint" "github.com/tikv/pd/pkg/storage/kv" "github.com/tikv/pd/pkg/tso" "github.com/tikv/pd/pkg/utils/grpcutil" "github.com/tikv/pd/pkg/utils/logutil" "github.com/tikv/pd/pkg/utils/syncutil" "github.com/tikv/pd/pkg/utils/tsoutil" "github.com/tikv/pd/pkg/versioninfo" "github.com/tikv/pd/server/cluster" clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/multierr" "go.uber.org/zap" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" ) const ( heartbeatSendTimeout = 5 * time.Second maxRetryTimesRequestTSOServer = 3 retryIntervalRequestTSOServer = 500 * time.Millisecond getMinTSFromTSOServerTimeout = 1 * time.Second defaultGRPCDialTimeout = 3 * time.Second gRPCServiceName = "pdpb.PD" ) // gRPC errors var ( // ErrNotLeader is returned when current server is not the leader and not possible to process request. // TODO: work as proxy. ErrNotLeader = status.Errorf(codes.Unavailable, "not leader") ErrNotStarted = status.Errorf(codes.Unavailable, "server not started") ErrSendHeartbeatTimeout = status.Errorf(codes.DeadlineExceeded, "send heartbeat timeout") ErrNotFoundTSOAddr = status.Errorf(codes.NotFound, "not found tso address") ErrNotFoundSchedulingAddr = status.Errorf(codes.NotFound, "not found scheduling address") ErrNotFoundService = status.Errorf(codes.NotFound, "not found service") ErrForwardTSOTimeout = status.Errorf(codes.DeadlineExceeded, "forward tso request timeout") ErrMaxCountTSOProxyRoutinesExceeded = status.Errorf(codes.ResourceExhausted, "max count of concurrent tso proxy routines exceeded") ErrTSOProxyRecvFromClientTimeout = status.Errorf(codes.DeadlineExceeded, "tso proxy timeout when receiving from client; stream closed by server") ErrEtcdNotStarted = status.Errorf(codes.Unavailable, "server is started, but etcd not started") ErrFollowerHandlingNotAllowed = status.Errorf(codes.Unavailable, "not leader and follower handling not allowed") ) var ( errRegionHeartbeatSend = forwardFailCounter.WithLabelValues("region_heartbeat", "send") errRegionHeartbeatClient = forwardFailCounter.WithLabelValues("region_heartbeat", "client") errRegionHeartbeatStream = forwardFailCounter.WithLabelValues("region_heartbeat", "stream") errRegionHeartbeatRecv = forwardFailCounter.WithLabelValues("region_heartbeat", "recv") errScatterRegionSend = forwardFailCounter.WithLabelValues("scatter_region", "send") errSplitRegionsSend = forwardFailCounter.WithLabelValues("split_regions", "send") errStoreHeartbeatSend = forwardFailCounter.WithLabelValues("store_heartbeat", "send") errGetOperatorSend = forwardFailCounter.WithLabelValues("get_operator", "send") ) // GrpcServer wraps Server to provide grpc service. type GrpcServer struct { *Server schedulingClient atomic.Value concurrentTSOProxyStreamings atomic.Int32 } // tsoServer wraps PD_TsoServer to ensure when any error // occurs on Send() or Recv(), both endpoints will be closed. type tsoServer struct { stream pdpb.PD_TsoServer closed int32 } type pdpbTSORequest struct { request *pdpb.TsoRequest err error } func (s *tsoServer) send(m *pdpb.TsoResponse) error { if atomic.LoadInt32(&s.closed) == 1 { return io.EOF } done := make(chan error, 1) go func() { defer logutil.LogPanic() failpoint.Inject("tsoProxyFailToSendToClient", func() { done <- errors.New("injected error") failpoint.Return() }) done <- s.stream.Send(m) }() timer := time.NewTimer(tsoutil.DefaultTSOProxyTimeout) defer timer.Stop() select { case err := <-done: if err != nil { atomic.StoreInt32(&s.closed, 1) } return errors.WithStack(err) case <-timer.C: atomic.StoreInt32(&s.closed, 1) return ErrForwardTSOTimeout } } func (s *tsoServer) recv(timeout time.Duration) (*pdpb.TsoRequest, error) { if atomic.LoadInt32(&s.closed) == 1 { return nil, io.EOF } failpoint.Inject("tsoProxyRecvFromClientTimeout", func(val failpoint.Value) { if customTimeoutInSeconds, ok := val.(int); ok { timeout = time.Duration(customTimeoutInSeconds) * time.Second } }) requestCh := make(chan *pdpbTSORequest, 1) go func() { defer logutil.LogPanic() request, err := s.stream.Recv() requestCh <- &pdpbTSORequest{request: request, err: err} }() timer := time.NewTimer(timeout) defer timer.Stop() select { case req := <-requestCh: if req.err != nil { atomic.StoreInt32(&s.closed, 1) return nil, errors.WithStack(req.err) } return req.request, nil case <-timer.C: atomic.StoreInt32(&s.closed, 1) return nil, ErrTSOProxyRecvFromClientTimeout } } // heartbeatServer wraps PD_RegionHeartbeatServer to ensure when any error // occurs on Send() or Recv(), both endpoints will be closed. type heartbeatServer struct { stream pdpb.PD_RegionHeartbeatServer closed int32 } // Send wraps Send() of PD_RegionHeartbeatServer. func (s *heartbeatServer) Send(m core.RegionHeartbeatResponse) error { if atomic.LoadInt32(&s.closed) == 1 { return io.EOF } done := make(chan error, 1) go func() { defer logutil.LogPanic() done <- s.stream.Send(m.(*pdpb.RegionHeartbeatResponse)) }() timer := time.NewTimer(heartbeatSendTimeout) defer timer.Stop() select { case err := <-done: if err != nil { atomic.StoreInt32(&s.closed, 1) } return errors.WithStack(err) case <-timer.C: atomic.StoreInt32(&s.closed, 1) return ErrSendHeartbeatTimeout } } // Recv wraps Recv() of PD_RegionHeartbeatServer. func (s *heartbeatServer) Recv() (*pdpb.RegionHeartbeatRequest, error) { if atomic.LoadInt32(&s.closed) == 1 { return nil, io.EOF } req, err := s.stream.Recv() if err != nil { atomic.StoreInt32(&s.closed, 1) return nil, errors.WithStack(err) } return req, nil } type schedulingClient struct { client schedulingpb.SchedulingClient primary string } func (s *schedulingClient) getClient() schedulingpb.SchedulingClient { if s == nil { return nil } return s.client } func (s *schedulingClient) getPrimaryAddr() string { if s == nil { return "" } return s.primary } type request interface { GetHeader() *pdpb.RequestHeader } type forwardFn func(ctx context.Context, client *grpc.ClientConn) (any, error) func (s *GrpcServer) unaryMiddleware(ctx context.Context, req request, fn forwardFn) (rsp any, err error) { return s.unaryFollowerMiddleware(ctx, req, fn, nil) } // unaryFollowerMiddleware adds the check of followers enable compared to unaryMiddleware. func (s *GrpcServer) unaryFollowerMiddleware(ctx context.Context, req request, fn forwardFn, allowFollower *bool) (rsp any, err error) { failpoint.Inject("customTimeout", func() { time.Sleep(5 * time.Second) }) forwardedHost := grpcutil.GetForwardedHost(ctx) if !s.isLocalRequest(forwardedHost) { client, err := s.getDelegateClient(ctx, forwardedHost) if err != nil { return nil, err } ctx = grpcutil.ResetForwardContext(ctx) return fn(ctx, client) } if err := s.validateRoleInRequest(ctx, req.GetHeader(), allowFollower); err != nil { return nil, err } return nil, nil } // GetClusterInfo implements gRPC PDServer. func (s *GrpcServer) GetClusterInfo(context.Context, *pdpb.GetClusterInfoRequest) (*pdpb.GetClusterInfoResponse, error) { // Here we purposely do not check the cluster ID because the client does not know the correct cluster ID // at startup and needs to get the cluster ID with the first request (i.e. GetMembers). if s.IsClosed() { return &pdpb.GetClusterInfoResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, errs.ErrServerNotStarted.FastGenByArgs().Error()), }, nil } var tsoServiceAddrs []string svcModes := make([]pdpb.ServiceMode, 0) if s.IsAPIServiceMode() { svcModes = append(svcModes, pdpb.ServiceMode_API_SVC_MODE) tsoServiceAddrs = s.keyspaceGroupManager.GetTSOServiceAddrs() } else { svcModes = append(svcModes, pdpb.ServiceMode_PD_SVC_MODE) } return &pdpb.GetClusterInfoResponse{ Header: s.header(), ServiceModes: svcModes, TsoUrls: tsoServiceAddrs, }, nil } // GetMinTS implements gRPC PDServer. In PD service mode, it simply returns a timestamp. // In API service mode, it queries all tso servers and gets the minimum timestamp across // all keyspace groups. func (s *GrpcServer) GetMinTS( ctx context.Context, request *pdpb.GetMinTSRequest, ) (*pdpb.GetMinTSResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetMinTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetMinTS(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetMinTSResponse), nil } var ( minTS *pdpb.Timestamp err error ) if s.IsAPIServiceMode() { minTS, err = s.GetMinTSFromTSOService(tso.GlobalDCLocation) } else { start := time.Now() ts, internalErr := s.tsoAllocatorManager.HandleRequest(ctx, tso.GlobalDCLocation, 1) if internalErr == nil { tsoHandleDuration.Observe(time.Since(start).Seconds()) } minTS = &ts } if err != nil { return &pdpb.GetMinTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), Timestamp: minTS, }, nil } return &pdpb.GetMinTSResponse{ Header: s.header(), Timestamp: minTS, }, nil } // GetMinTSFromTSOService queries all tso servers and gets the minimum timestamp across // all keyspace groups. func (s *GrpcServer) GetMinTSFromTSOService(dcLocation string) (*pdpb.Timestamp, error) { addrs := s.keyspaceGroupManager.GetTSOServiceAddrs() if len(addrs) == 0 { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs("no tso servers/pods discovered") } // Get the minimal timestamp from the TSO servers/pods var mutex syncutil.Mutex resps := make([]*tsopb.GetMinTSResponse, 0) wg := sync.WaitGroup{} wg.Add(len(addrs)) for _, addr := range addrs { go func(addr string) { defer wg.Done() resp, err := s.getMinTSFromSingleServer(s.ctx, dcLocation, addr) if err != nil || resp == nil { log.Warn("failed to get min ts from tso server", zap.String("address", addr), zap.Error(err)) return } mutex.Lock() defer mutex.Unlock() resps = append(resps, resp) }(addr) } wg.Wait() // Check the results. The returned minimal timestamp is valid if all the conditions are met: // 1. The number of responses is equal to the number of TSO servers/pods. // 2. The number of keyspace groups asked is equal to the number of TSO servers/pods. // 3. The minimal timestamp is not zero. var ( minTS *pdpb.Timestamp keyspaceGroupsAsked uint32 ) if len(resps) == 0 { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs("none of tso server/pod responded") } emptyTS := &pdpb.Timestamp{} keyspaceGroupsTotal := resps[0].KeyspaceGroupsTotal for _, resp := range resps { if resp.KeyspaceGroupsTotal == 0 { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs("the tso service has no keyspace group") } if resp.KeyspaceGroupsTotal != keyspaceGroupsTotal { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs( "the tso service has inconsistent keyspace group total count") } keyspaceGroupsAsked += resp.KeyspaceGroupsServing if tsoutil.CompareTimestamp(resp.Timestamp, emptyTS) > 0 && (minTS == nil || tsoutil.CompareTimestamp(resp.Timestamp, minTS) < 0) { minTS = resp.Timestamp } } if keyspaceGroupsAsked != keyspaceGroupsTotal { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs( fmt.Sprintf("can't query all the tso keyspace groups. Asked %d, expected %d", keyspaceGroupsAsked, keyspaceGroupsTotal)) } if minTS == nil { return &pdpb.Timestamp{}, errs.ErrGetMinTS.FastGenByArgs("the tso service is not ready") } return minTS, nil } func (s *GrpcServer) getMinTSFromSingleServer( ctx context.Context, dcLocation, tsoSrvAddr string, ) (*tsopb.GetMinTSResponse, error) { cc, err := s.getDelegateClient(s.ctx, tsoSrvAddr) if err != nil { return nil, errs.ErrClientGetMinTSO.FastGenByArgs( fmt.Sprintf("can't connect to tso server %s", tsoSrvAddr)) } cctx, cancel := context.WithTimeout(ctx, getMinTSFromTSOServerTimeout) defer cancel() resp, err := tsopb.NewTSOClient(cc).GetMinTS( cctx, &tsopb.GetMinTSRequest{ Header: &tsopb.RequestHeader{ ClusterId: s.ClusterID(), }, DcLocation: dcLocation, }) if err != nil { attachErr := errors.Errorf("error:%s target:%s status:%s", err, cc.Target(), cc.GetState().String()) return nil, errs.ErrClientGetMinTSO.Wrap(attachErr).GenWithStackByCause() } if resp == nil { attachErr := errors.Errorf("error:%s target:%s status:%s", "no min ts info collected", cc.Target(), cc.GetState().String()) return nil, errs.ErrClientGetMinTSO.Wrap(attachErr).GenWithStackByCause() } if resp.GetHeader().GetError() != nil { attachErr := errors.Errorf("error:%s target:%s status:%s", resp.GetHeader().GetError().String(), cc.Target(), cc.GetState().String()) return nil, errs.ErrClientGetMinTSO.Wrap(attachErr).GenWithStackByCause() } return resp, nil } // GetMembers implements gRPC PDServer. func (s *GrpcServer) GetMembers(context.Context, *pdpb.GetMembersRequest) (*pdpb.GetMembersResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetMembersResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } // Here we purposely do not check the cluster ID because the client does not know the correct cluster ID // at startup and needs to get the cluster ID with the first request (i.e. GetMembers). if s.IsClosed() { return &pdpb.GetMembersResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, errs.ErrServerNotStarted.FastGenByArgs().Error()), }, nil } members, err := cluster.GetMembers(s.GetClient()) if err != nil { return &pdpb.GetMembersResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } var etcdLeader, pdLeader *pdpb.Member leaderID := s.member.GetEtcdLeader() for _, m := range members { if m.MemberId == leaderID { etcdLeader = m break } } tsoAllocatorLeaders := make(map[string]*pdpb.Member) if !s.IsAPIServiceMode() { tsoAllocatorManager := s.GetTSOAllocatorManager() tsoAllocatorLeaders, err = tsoAllocatorManager.GetLocalAllocatorLeaders() } if err != nil { return &pdpb.GetMembersResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } leader := s.member.GetLeader() for _, m := range members { if m.MemberId == leader.GetMemberId() { pdLeader = m break } } return &pdpb.GetMembersResponse{ Header: s.header(), Members: members, Leader: pdLeader, EtcdLeader: etcdLeader, TsoAllocatorLeaders: tsoAllocatorLeaders, }, nil } // Tso implements gRPC PDServer. func (s *GrpcServer) Tso(stream pdpb.PD_TsoServer) error { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return err } } if s.IsAPIServiceMode() { return s.forwardTSO(stream) } var ( doneCh chan struct{} errCh chan error ) ctx, cancel := context.WithCancel(stream.Context()) defer cancel() for { // Prevent unnecessary performance overhead of the channel. if errCh != nil { select { case err := <-errCh: return errors.WithStack(err) default: } } request, err := stream.Recv() if err == io.EOF { return nil } if err != nil { return errors.WithStack(err) } forwardedHost := grpcutil.GetForwardedHost(stream.Context()) if !s.isLocalRequest(forwardedHost) { clientConn, err := s.getDelegateClient(s.ctx, forwardedHost) if err != nil { return errors.WithStack(err) } if errCh == nil { doneCh = make(chan struct{}) defer close(doneCh) // nolint errCh = make(chan error) } tsoRequest := tsoutil.NewPDProtoRequest(forwardedHost, clientConn, request, stream) s.tsoDispatcher.DispatchRequest(ctx, tsoRequest, s.pdProtoFactory, doneCh, errCh, s.tsoPrimaryWatcher) continue } start := time.Now() // TSO uses leader lease to determine validity. No need to check leader here. if s.IsClosed() { return status.Errorf(codes.Unknown, "server not started") } if clusterID := s.ClusterID(); request.GetHeader().GetClusterId() != clusterID { return status.Errorf(codes.FailedPrecondition, "mismatch cluster id, need %d but got %d", clusterID, request.GetHeader().GetClusterId()) } count := request.GetCount() ctx, task := trace.NewTask(ctx, "tso") ts, err := s.tsoAllocatorManager.HandleRequest(ctx, request.GetDcLocation(), count) task.End() tsoHandleDuration.Observe(time.Since(start).Seconds()) if err != nil { return status.Errorf(codes.Unknown, err.Error()) } response := &pdpb.TsoResponse{ Header: s.header(), Timestamp: &ts, Count: count, } if err := stream.Send(response); err != nil { return errors.WithStack(err) } } } // Bootstrap implements gRPC PDServer. func (s *GrpcServer) Bootstrap(ctx context.Context, request *pdpb.BootstrapRequest) (*pdpb.BootstrapResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.BootstrapResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).Bootstrap(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.BootstrapResponse), nil } rc := s.GetRaftCluster() if rc != nil { err := &pdpb.Error{ Type: pdpb.ErrorType_ALREADY_BOOTSTRAPPED, Message: "cluster is already bootstrapped", } return &pdpb.BootstrapResponse{ Header: s.errorHeader(err), }, nil } res, err := s.bootstrapCluster(request) if err != nil { return &pdpb.BootstrapResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } res.Header = s.header() return res, nil } // IsBootstrapped implements gRPC PDServer. func (s *GrpcServer) IsBootstrapped(ctx context.Context, request *pdpb.IsBootstrappedRequest) (*pdpb.IsBootstrappedResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.IsBootstrappedResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).IsBootstrapped(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.IsBootstrappedResponse), err } rc := s.GetRaftCluster() return &pdpb.IsBootstrappedResponse{ Header: s.header(), Bootstrapped: rc != nil, }, nil } // AllocID implements gRPC PDServer. func (s *GrpcServer) AllocID(ctx context.Context, request *pdpb.AllocIDRequest) (*pdpb.AllocIDResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.AllocIDResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).AllocID(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.AllocIDResponse), err } // We can use an allocator for all types ID allocation. id, err := s.idAllocator.Alloc() if err != nil { return &pdpb.AllocIDResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.AllocIDResponse{ Header: s.header(), Id: id, }, nil } // IsSnapshotRecovering implements gRPC PDServer. func (s *GrpcServer) IsSnapshotRecovering(ctx context.Context, _ *pdpb.IsSnapshotRecoveringRequest) (*pdpb.IsSnapshotRecoveringResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.IsSnapshotRecoveringResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } // recovering mark is stored in etcd directly, there's no need to forward. marked, err := s.Server.IsSnapshotRecovering(ctx) if err != nil { return &pdpb.IsSnapshotRecoveringResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.IsSnapshotRecoveringResponse{ Header: s.header(), Marked: marked, }, nil } // GetStore implements gRPC PDServer. func (s *GrpcServer) GetStore(ctx context.Context, request *pdpb.GetStoreRequest) (*pdpb.GetStoreResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetStoreResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetStore(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetStoreResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.GetStoreResponse{Header: s.notBootstrappedHeader()}, nil } storeID := request.GetStoreId() store := rc.GetStore(storeID) if store == nil { return &pdpb.GetStoreResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, fmt.Sprintf("invalid store ID %d, not found", storeID)), }, nil } return &pdpb.GetStoreResponse{ Header: s.header(), Store: store.GetMeta(), Stats: store.GetStoreStats(), }, nil } // checkStore returns an error response if the store exists and is in tombstone state. // It returns nil if it can't get the store. func checkStore(rc *cluster.RaftCluster, storeID uint64) *pdpb.Error { store := rc.GetStore(storeID) if store != nil { if store.IsRemoved() { return &pdpb.Error{ Type: pdpb.ErrorType_STORE_TOMBSTONE, Message: "store is tombstone", } } } return nil } // PutStore implements gRPC PDServer. func (s *GrpcServer) PutStore(ctx context.Context, request *pdpb.PutStoreRequest) (*pdpb.PutStoreResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.PutStoreResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).PutStore(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.PutStoreResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.PutStoreResponse{Header: s.notBootstrappedHeader()}, nil } store := request.GetStore() if pberr := checkStore(rc, store.GetId()); pberr != nil { return &pdpb.PutStoreResponse{ Header: s.errorHeader(pberr), }, nil } // NOTE: can be removed when placement rules feature is enabled by default. if !s.GetConfig().Replication.EnablePlacementRules && core.IsStoreContainLabel(store, core.EngineKey, core.EngineTiFlash) { return &pdpb.PutStoreResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "placement rules is disabled"), }, nil } if err := rc.PutMetaStore(store); err != nil { return &pdpb.PutStoreResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } log.Info("put store ok", zap.Stringer("store", store)) CheckPDVersionWithClusterVersion(s.persistOptions) return &pdpb.PutStoreResponse{ Header: s.header(), ReplicationStatus: rc.GetReplicationMode().GetReplicationStatus(), }, nil } // GetAllStores implements gRPC PDServer. func (s *GrpcServer) GetAllStores(ctx context.Context, request *pdpb.GetAllStoresRequest) (*pdpb.GetAllStoresResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetAllStoresResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetAllStores(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetAllStoresResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.GetAllStoresResponse{Header: s.notBootstrappedHeader()}, nil } // Don't return tombstone stores. var stores []*metapb.Store if request.GetExcludeTombstoneStores() { for _, store := range rc.GetMetaStores() { if store.GetNodeState() != metapb.NodeState_Removed { stores = append(stores, store) } } } else { stores = rc.GetMetaStores() } return &pdpb.GetAllStoresResponse{ Header: s.header(), Stores: stores, }, nil } // StoreHeartbeat implements gRPC PDServer. func (s *GrpcServer) StoreHeartbeat(ctx context.Context, request *pdpb.StoreHeartbeatRequest) (*pdpb.StoreHeartbeatResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.StoreHeartbeatResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, errs.ErrRateLimitExceeded.FastGenByArgs().Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).StoreHeartbeat(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.StoreHeartbeatResponse), err } if request.GetStats() == nil { return nil, errors.Errorf("invalid store heartbeat command, but %v", request) } rc := s.GetRaftCluster() if rc == nil { return &pdpb.StoreHeartbeatResponse{Header: s.notBootstrappedHeader()}, nil } if pberr := checkStore(rc, request.GetStats().GetStoreId()); pberr != nil { return &pdpb.StoreHeartbeatResponse{ Header: s.errorHeader(pberr), }, nil } storeID := request.GetStats().GetStoreId() store := rc.GetStore(storeID) if store == nil { return &pdpb.StoreHeartbeatResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, fmt.Sprintf("store %v not found", storeID)), }, nil } resp := &pdpb.StoreHeartbeatResponse{Header: s.header()} // Bypass stats handling if the store report for unsafe recover is not empty. if request.GetStoreReport() == nil { storeAddress := store.GetAddress() storeLabel := strconv.FormatUint(storeID, 10) start := time.Now() err := rc.HandleStoreHeartbeat(request, resp) if err != nil { return &pdpb.StoreHeartbeatResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } s.handleDamagedStore(request.GetStats()) storeHeartbeatHandleDuration.WithLabelValues(storeAddress, storeLabel).Observe(time.Since(start).Seconds()) if rc.IsServiceIndependent(constant.SchedulingServiceName) { forwardCli, _ := s.updateSchedulingClient(ctx) cli := forwardCli.getClient() if cli != nil { req := &schedulingpb.StoreHeartbeatRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, Stats: request.GetStats(), } if _, err := cli.StoreHeartbeat(ctx, req); err != nil { errStoreHeartbeatSend.Inc() log.Debug("forward store heartbeat failed", zap.Error(err)) // reset to let it be updated in the next request s.schedulingClient.CompareAndSwap(forwardCli, &schedulingClient{}) } } } } if status := request.GetDrAutosyncStatus(); status != nil { rc.GetReplicationMode().UpdateStoreDRStatus(request.GetStats().GetStoreId(), status) } resp.ReplicationStatus = rc.GetReplicationMode().GetReplicationStatus() resp.ClusterVersion = rc.GetClusterVersion() rc.GetUnsafeRecoveryController().HandleStoreHeartbeat(request, resp) return resp, nil } // 1. forwardedHost is empty, return nil // 2. forwardedHost is not empty and forwardedHost is equal to pre, return pre // 3. the rest of cases, update forwardedHost and return new client func (s *GrpcServer) updateSchedulingClient(ctx context.Context) (*schedulingClient, error) { forwardedHost, _ := s.GetServicePrimaryAddr(ctx, constant.SchedulingServiceName) if forwardedHost == "" { return nil, ErrNotFoundSchedulingAddr } pre := s.schedulingClient.Load() if pre != nil && forwardedHost == pre.(*schedulingClient).getPrimaryAddr() { return pre.(*schedulingClient), nil } client, err := s.getDelegateClient(ctx, forwardedHost) if err != nil { log.Error("get delegate client failed", zap.Error(err)) return nil, err } forwardCli := &schedulingClient{ client: schedulingpb.NewSchedulingClient(client), primary: forwardedHost, } swapped := s.schedulingClient.CompareAndSwap(pre, forwardCli) if swapped { oldForwardedHost := "" if pre != nil { oldForwardedHost = pre.(*schedulingClient).getPrimaryAddr() } log.Info("update scheduling client", zap.String("old-forwarded-host", oldForwardedHost), zap.String("new-forwarded-host", forwardedHost)) } return forwardCli, nil } // bucketHeartbeatServer wraps PD_ReportBucketsServer to ensure when any error // occurs on SendAndClose() or Recv(), both endpoints will be closed. type bucketHeartbeatServer struct { stream pdpb.PD_ReportBucketsServer closed int32 } func (b *bucketHeartbeatServer) send(bucket *pdpb.ReportBucketsResponse) error { if atomic.LoadInt32(&b.closed) == 1 { return status.Errorf(codes.Canceled, "stream is closed") } done := make(chan error, 1) go func() { defer logutil.LogPanic() done <- b.stream.SendAndClose(bucket) }() timer := time.NewTimer(heartbeatSendTimeout) defer timer.Stop() select { case err := <-done: if err != nil { atomic.StoreInt32(&b.closed, 1) } return err case <-timer.C: atomic.StoreInt32(&b.closed, 1) return ErrSendHeartbeatTimeout } } func (b *bucketHeartbeatServer) recv() (*pdpb.ReportBucketsRequest, error) { if atomic.LoadInt32(&b.closed) == 1 { return nil, io.EOF } req, err := b.stream.Recv() if err != nil { atomic.StoreInt32(&b.closed, 1) return nil, errors.WithStack(err) } return req, nil } // ReportBuckets implements gRPC PDServer func (s *GrpcServer) ReportBuckets(stream pdpb.PD_ReportBucketsServer) error { var ( server = &bucketHeartbeatServer{stream: stream} forwardStream pdpb.PD_ReportBucketsClient cancel context.CancelFunc lastForwardedHost string errCh chan error ) defer func() { if cancel != nil { cancel() } }() if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return err } } for { request, err := server.recv() failpoint.Inject("grpcClientClosed", func() { err = status.Error(codes.Canceled, "grpc client closed") request = nil }) if err == io.EOF { return nil } if err != nil { return errors.WithStack(err) } forwardedHost := grpcutil.GetForwardedHost(stream.Context()) failpoint.Inject("grpcClientClosed", func() { forwardedHost = s.GetMember().Member().GetClientUrls()[0] }) if !s.isLocalRequest(forwardedHost) { if forwardStream == nil || lastForwardedHost != forwardedHost { if cancel != nil { cancel() } client, err := s.getDelegateClient(s.ctx, forwardedHost) if err != nil { return err } log.Info("create bucket report forward stream", zap.String("forwarded-host", forwardedHost)) forwardStream, cancel, err = s.createReportBucketsForwardStream(client) if err != nil { return err } lastForwardedHost = forwardedHost errCh = make(chan error, 1) go forwardReportBucketClientToServer(forwardStream, server, errCh) } if err := forwardStream.Send(request); err != nil { return errors.WithStack(err) } select { case err := <-errCh: return err default: } continue } rc := s.GetRaftCluster() if rc == nil { resp := &pdpb.ReportBucketsResponse{ Header: s.notBootstrappedHeader(), } err := server.send(resp) return errors.WithStack(err) } if err := s.validateRequest(request.GetHeader()); err != nil { return err } buckets := request.GetBuckets() if buckets == nil || len(buckets.Keys) == 0 { continue } var ( storeLabel string storeAddress string ) store := rc.GetLeaderStoreByRegionID(buckets.GetRegionId()) if store == nil { // As TiKV report buckets just after the region heartbeat, for new created region, PD may receive buckets report before the first region heartbeat is handled. // So we should not return error here. log.Warn("the store of the bucket in region is not found ", zap.Uint64("region-id", buckets.GetRegionId())) } else { storeLabel = strconv.FormatUint(store.GetID(), 10) storeAddress = store.GetAddress() } bucketReportCounter.WithLabelValues(storeAddress, storeLabel, "report", "recv").Inc() start := time.Now() err = rc.HandleReportBuckets(buckets) if err != nil { bucketReportCounter.WithLabelValues(storeAddress, storeLabel, "report", "err").Inc() continue } bucketReportInterval.WithLabelValues(storeAddress, storeLabel).Observe(float64(buckets.GetPeriodInMs() / 1000)) bucketReportLatency.WithLabelValues(storeAddress, storeLabel).Observe(time.Since(start).Seconds()) bucketReportCounter.WithLabelValues(storeAddress, storeLabel, "report", "ok").Inc() } } // RegionHeartbeat implements gRPC PDServer. func (s *GrpcServer) RegionHeartbeat(stream pdpb.PD_RegionHeartbeatServer) error { var ( server = &heartbeatServer{stream: stream} flowRoundDivisor = s.persistOptions.GetPDServerConfig().FlowRoundByDigit cancel context.CancelFunc lastBind time.Time errCh chan error forwardStream pdpb.PD_RegionHeartbeatClient lastForwardedHost string forwardErrCh chan error forwardSchedulingStream schedulingpb.Scheduling_RegionHeartbeatClient lastForwardedSchedulingHost string ) defer func() { // cancel the forward stream if cancel != nil { cancel() } }() if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return err } } for { request, err := server.Recv() if err == io.EOF { return nil } if err != nil { return errors.WithStack(err) } forwardedHost := grpcutil.GetForwardedHost(stream.Context()) failpoint.Inject("grpcClientClosed", func() { forwardedHost = s.GetMember().Member().GetClientUrls()[0] }) if !s.isLocalRequest(forwardedHost) { if forwardStream == nil || lastForwardedHost != forwardedHost { if cancel != nil { cancel() } client, err := s.getDelegateClient(s.ctx, forwardedHost) if err != nil { return err } log.Info("create region heartbeat forward stream", zap.String("forwarded-host", forwardedHost)) forwardStream, cancel, err = s.createRegionHeartbeatForwardStream(client) if err != nil { return err } lastForwardedHost = forwardedHost errCh = make(chan error, 1) go forwardRegionHeartbeatClientToServer(forwardStream, server, errCh) } if err := forwardStream.Send(request); err != nil { return errors.WithStack(err) } select { case err := <-errCh: return err default: } continue } rc := s.GetRaftCluster() if rc == nil { resp := &pdpb.RegionHeartbeatResponse{ Header: s.notBootstrappedHeader(), } err := server.Send(resp) return errors.WithStack(err) } if err = s.validateRequest(request.GetHeader()); err != nil { return err } storeID := request.GetLeader().GetStoreId() storeLabel := strconv.FormatUint(storeID, 10) store := rc.GetStore(storeID) if store == nil { return errors.Errorf("invalid store ID %d, not found", storeID) } storeAddress := store.GetAddress() regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "recv").Inc() regionHeartbeatLatency.WithLabelValues(storeAddress, storeLabel).Observe(float64(time.Now().Unix()) - float64(request.GetInterval().GetEndTimestamp())) if time.Since(lastBind) > s.cfg.HeartbeatStreamBindInterval.Duration { regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "bind").Inc() s.hbStreams.BindStream(storeID, server) // refresh FlowRoundByDigit flowRoundDivisor = s.persistOptions.GetPDServerConfig().FlowRoundByDigit lastBind = time.Now() } region := core.RegionFromHeartbeat(request, flowRoundDivisor) if region.GetLeader() == nil { log.Error("invalid request, the leader is nil", zap.Reflect("request", request), errs.ZapError(errs.ErrLeaderNil)) regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "invalid-leader").Inc() msg := fmt.Sprintf("invalid request leader, %v", request) s.hbStreams.SendErr(pdpb.ErrorType_UNKNOWN, msg, request.GetLeader()) continue } if region.GetID() == 0 { regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "invalid-region").Inc() msg := fmt.Sprintf("invalid request region, %v", request) s.hbStreams.SendErr(pdpb.ErrorType_UNKNOWN, msg, request.GetLeader()) continue } // If the region peer count is 0, then we should not handle this. if len(region.GetPeers()) == 0 { log.Warn("invalid region, zero region peer count", logutil.ZapRedactStringer("region-meta", core.RegionToHexMeta(region.GetMeta()))) regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "no-peer").Inc() msg := fmt.Sprintf("invalid region, zero region peer count: %v", logutil.RedactStringer(core.RegionToHexMeta(region.GetMeta()))) s.hbStreams.SendErr(pdpb.ErrorType_UNKNOWN, msg, request.GetLeader()) continue } start := time.Now() err = rc.HandleRegionHeartbeat(region) if err != nil { regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "err").Inc() msg := err.Error() s.hbStreams.SendErr(pdpb.ErrorType_UNKNOWN, msg, request.GetLeader()) continue } regionHeartbeatHandleDuration.WithLabelValues(storeAddress, storeLabel).Observe(time.Since(start).Seconds()) regionHeartbeatCounter.WithLabelValues(storeAddress, storeLabel, "report", "ok").Inc() if rc.IsServiceIndependent(constant.SchedulingServiceName) { if forwardErrCh != nil { select { case err, ok := <-forwardErrCh: if ok { if cancel != nil { cancel() } forwardSchedulingStream = nil log.Error("meet error and need to re-establish the stream", zap.Error(err)) } default: } } forwardedSchedulingHost, ok := s.GetServicePrimaryAddr(stream.Context(), constant.SchedulingServiceName) if !ok || len(forwardedSchedulingHost) == 0 { log.Debug("failed to find scheduling service primary address") if cancel != nil { cancel() } continue } if forwardSchedulingStream == nil || lastForwardedSchedulingHost != forwardedSchedulingHost { if cancel != nil { cancel() } client, err := s.getDelegateClient(s.ctx, forwardedSchedulingHost) if err != nil { errRegionHeartbeatClient.Inc() log.Error("failed to get client", zap.Error(err)) continue } log.Debug("create scheduling forwarding stream", zap.String("forwarded-host", forwardedSchedulingHost)) forwardSchedulingStream, _, cancel, err = createRegionHeartbeatSchedulingStream(stream.Context(), client) if err != nil { errRegionHeartbeatStream.Inc() log.Debug("failed to create stream", zap.Error(err)) continue } lastForwardedSchedulingHost = forwardedSchedulingHost forwardErrCh = make(chan error, 1) go forwardRegionHeartbeatToScheduling(rc, forwardSchedulingStream, server, forwardErrCh) } schedulingpbReq := &schedulingpb.RegionHeartbeatRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, Region: request.GetRegion(), Leader: request.GetLeader(), DownPeers: request.GetDownPeers(), PendingPeers: request.GetPendingPeers(), BytesWritten: request.GetBytesWritten(), BytesRead: request.GetBytesRead(), KeysWritten: request.GetKeysWritten(), KeysRead: request.GetKeysRead(), ApproximateSize: request.GetApproximateSize(), ApproximateKeys: request.GetApproximateKeys(), Interval: request.GetInterval(), Term: request.GetTerm(), QueryStats: request.GetQueryStats(), } if err := forwardSchedulingStream.Send(schedulingpbReq); err != nil { forwardSchedulingStream = nil if grpcutil.NeedRebuildConnection(err) { s.closeDelegateClient(lastForwardedSchedulingHost) } errRegionHeartbeatSend.Inc() log.Error("failed to send request to scheduling service", zap.Error(err)) } select { case err, ok := <-forwardErrCh: if ok { forwardSchedulingStream = nil errRegionHeartbeatRecv.Inc() log.Error("failed to send response", zap.Error(err)) } default: } } } } // GetRegion implements gRPC PDServer. func (s *GrpcServer) GetRegion(ctx context.Context, request *pdpb.GetRegionRequest) (*pdpb.GetRegionResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetRegion(ctx, request) } followerHandle := new(bool) if rsp, err := s.unaryFollowerMiddleware(ctx, request, fn, followerHandle); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetRegionResponse), nil } var rc *cluster.RaftCluster var region *core.RegionInfo if *followerHandle { rc = s.cluster if !rc.GetRegionSyncer().IsRunning() { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } region = rc.GetRegionByKey(request.GetRegionKey()) if region == nil { log.Warn("follower get region nil", zap.String("key", string(request.GetRegionKey()))) return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } } else { rc = s.GetRaftCluster() if rc == nil { return &pdpb.GetRegionResponse{Header: s.notBootstrappedHeader()}, nil } region = rc.GetRegionByKey(request.GetRegionKey()) if region == nil { log.Warn("leader get region nil", zap.String("key", string(request.GetRegionKey()))) return &pdpb.GetRegionResponse{Header: s.header()}, nil } } var buckets *metapb.Buckets // FIXME: If the bucket is disabled dynamically, the bucket information is returned unexpectedly if !*followerHandle && rc.GetStoreConfig().IsEnableRegionBucket() && request.GetNeedBuckets() { buckets = region.GetBuckets() } return &pdpb.GetRegionResponse{ Header: s.header(), Region: region.GetMeta(), Leader: region.GetLeader(), DownPeers: region.GetDownPeers(), PendingPeers: region.GetPendingPeers(), Buckets: buckets, }, nil } // GetPrevRegion implements gRPC PDServer func (s *GrpcServer) GetPrevRegion(ctx context.Context, request *pdpb.GetRegionRequest) (*pdpb.GetRegionResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetPrevRegion(ctx, request) } followerHandle := new(bool) if rsp, err := s.unaryFollowerMiddleware(ctx, request, fn, followerHandle); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetRegionResponse), err } var rc *cluster.RaftCluster if *followerHandle { // no need to check running status rc = s.cluster if !rc.GetRegionSyncer().IsRunning() { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } } else { rc = s.GetRaftCluster() if rc == nil { return &pdpb.GetRegionResponse{Header: s.notBootstrappedHeader()}, nil } } region := rc.GetPrevRegionByKey(request.GetRegionKey()) if region == nil { if *followerHandle { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } return &pdpb.GetRegionResponse{Header: s.header()}, nil } var buckets *metapb.Buckets // FIXME: If the bucket is disabled dynamically, the bucket information is returned unexpectedly if !*followerHandle && rc.GetStoreConfig().IsEnableRegionBucket() && request.GetNeedBuckets() { buckets = region.GetBuckets() } return &pdpb.GetRegionResponse{ Header: s.header(), Region: region.GetMeta(), Leader: region.GetLeader(), DownPeers: region.GetDownPeers(), PendingPeers: region.GetPendingPeers(), Buckets: buckets, }, nil } // GetRegionByID implements gRPC PDServer. func (s *GrpcServer) GetRegionByID(ctx context.Context, request *pdpb.GetRegionByIDRequest) (*pdpb.GetRegionResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetRegionByID(ctx, request) } followerHandle := new(bool) if rsp, err := s.unaryFollowerMiddleware(ctx, request, fn, followerHandle); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetRegionResponse), err } var rc *cluster.RaftCluster if *followerHandle { rc = s.cluster if !rc.GetRegionSyncer().IsRunning() { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } } else { rc = s.GetRaftCluster() if rc == nil { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } } region := rc.GetRegion(request.GetRegionId()) failpoint.Inject("followerHandleError", func() { if *followerHandle { region = nil } }) if region == nil { if *followerHandle { return &pdpb.GetRegionResponse{Header: s.regionNotFound()}, nil } return &pdpb.GetRegionResponse{Header: s.header()}, nil } var buckets *metapb.Buckets if !*followerHandle && rc.GetStoreConfig().IsEnableRegionBucket() && request.GetNeedBuckets() { buckets = region.GetBuckets() } return &pdpb.GetRegionResponse{ Header: s.header(), Region: region.GetMeta(), Leader: region.GetLeader(), DownPeers: region.GetDownPeers(), PendingPeers: region.GetPendingPeers(), Buckets: buckets, }, nil } // Deprecated: use BatchScanRegions instead. // ScanRegions implements gRPC PDServer. func (s *GrpcServer) ScanRegions(ctx context.Context, request *pdpb.ScanRegionsRequest) (*pdpb.ScanRegionsResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.ScanRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).ScanRegions(ctx, request) } followerHandle := new(bool) if rsp, err := s.unaryFollowerMiddleware(ctx, request, fn, followerHandle); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.ScanRegionsResponse), nil } var rc *cluster.RaftCluster if *followerHandle { rc = s.cluster if !rc.GetRegionSyncer().IsRunning() { return &pdpb.ScanRegionsResponse{Header: s.regionNotFound()}, nil } } else { rc = s.GetRaftCluster() if rc == nil { return &pdpb.ScanRegionsResponse{Header: s.notBootstrappedHeader()}, nil } } regions := rc.ScanRegions(request.GetStartKey(), request.GetEndKey(), int(request.GetLimit())) if *followerHandle && len(regions) == 0 { return &pdpb.ScanRegionsResponse{Header: s.regionNotFound()}, nil } resp := &pdpb.ScanRegionsResponse{Header: s.header()} for _, r := range regions { leader := r.GetLeader() if leader == nil { leader = &metapb.Peer{} } // Set RegionMetas and Leaders to make it compatible with old client. resp.RegionMetas = append(resp.RegionMetas, r.GetMeta()) resp.Leaders = append(resp.Leaders, leader) resp.Regions = append(resp.Regions, &pdpb.Region{ Region: r.GetMeta(), Leader: leader, DownPeers: r.GetDownPeers(), PendingPeers: r.GetPendingPeers(), }) } return resp, nil } // BatchScanRegions implements gRPC PDServer. func (s *GrpcServer) BatchScanRegions(ctx context.Context, request *pdpb.BatchScanRegionsRequest) (*pdpb.BatchScanRegionsResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.BatchScanRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).BatchScanRegions(ctx, request) } followerHandle := new(bool) if rsp, err := s.unaryFollowerMiddleware(ctx, request, fn, followerHandle); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.BatchScanRegionsResponse), nil } var rc *cluster.RaftCluster if *followerHandle { rc = s.cluster if !rc.GetRegionSyncer().IsRunning() { return &pdpb.BatchScanRegionsResponse{Header: s.regionNotFound()}, nil } } else { rc = s.GetRaftCluster() if rc == nil { return &pdpb.BatchScanRegionsResponse{Header: s.notBootstrappedHeader()}, nil } } needBucket := request.GetNeedBuckets() && !*followerHandle && rc.GetStoreConfig().IsEnableRegionBucket() limit := request.GetLimit() // cast to core.KeyRanges and check the validation. keyRanges := core.NewKeyRangesWithSize(len(request.GetRanges())) reqRanges := request.GetRanges() for i, reqRange := range reqRanges { if i > 0 { if bytes.Compare(reqRange.StartKey, reqRanges[i-1].EndKey) < 0 { return &pdpb.BatchScanRegionsResponse{Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "invalid key range, ranges overlapped")}, nil } } if len(reqRange.EndKey) > 0 && bytes.Compare(reqRange.StartKey, reqRange.EndKey) > 0 { return &pdpb.BatchScanRegionsResponse{Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "invalid key range, start key > end key")}, nil } keyRanges.Append(reqRange.StartKey, reqRange.EndKey) } scanOptions := []core.BatchScanRegionsOptionFunc{core.WithLimit(int(limit))} if request.ContainAllKeyRange { scanOptions = append(scanOptions, core.WithOutputMustContainAllKeyRange()) } res, err := rc.BatchScanRegions(keyRanges, scanOptions...) if err != nil { if errs.ErrRegionNotAdjacent.Equal(multierr.Errors(err)[0]) { return &pdpb.BatchScanRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_REGIONS_NOT_CONTAIN_ALL_KEY_RANGE, err.Error()), }, nil } return &pdpb.BatchScanRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } regions := make([]*pdpb.Region, 0, len(res)) for _, r := range res { leader := r.GetLeader() if leader == nil { leader = &metapb.Peer{} } var buckets *metapb.Buckets if needBucket { buckets = r.GetBuckets() } regions = append(regions, &pdpb.Region{ Region: r.GetMeta(), Leader: leader, DownPeers: r.GetDownPeers(), PendingPeers: r.GetPendingPeers(), Buckets: buckets, }) } if *followerHandle && len(regions) == 0 { return &pdpb.BatchScanRegionsResponse{Header: s.regionNotFound()}, nil } resp := &pdpb.BatchScanRegionsResponse{Header: s.header(), Regions: regions} return resp, nil } // AskSplit implements gRPC PDServer. func (s *GrpcServer) AskSplit(ctx context.Context, request *pdpb.AskSplitRequest) (*pdpb.AskSplitResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.AskSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).AskSplit(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.AskSplitResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.AskSplitResponse{Header: s.notBootstrappedHeader()}, nil } if request.GetRegion() == nil { return &pdpb.AskSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_REGION_NOT_FOUND, "missing region for split"), }, nil } split, err := rc.HandleAskSplit(request) if err != nil { return &pdpb.AskSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.AskSplitResponse{ Header: s.header(), NewRegionId: split.NewRegionId, NewPeerIds: split.NewPeerIds, }, nil } // AskBatchSplit implements gRPC PDServer. func (s *GrpcServer) AskBatchSplit(ctx context.Context, request *pdpb.AskBatchSplitRequest) (*pdpb.AskBatchSplitResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.AskBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } rc := s.GetRaftCluster() if rc == nil { return &pdpb.AskBatchSplitResponse{Header: s.notBootstrappedHeader()}, nil } if rc.IsServiceIndependent(constant.SchedulingServiceName) { forwardCli, err := s.updateSchedulingClient(ctx) if err != nil { return &pdpb.AskBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } cli := forwardCli.getClient() if cli != nil { req := &schedulingpb.AskBatchSplitRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, Region: request.GetRegion(), SplitCount: request.GetSplitCount(), } resp, err := cli.AskBatchSplit(ctx, req) if err != nil { // reset to let it be updated in the next request s.schedulingClient.CompareAndSwap(forwardCli, &schedulingClient{}) return convertAskSplitResponse(resp), err } return convertAskSplitResponse(resp), nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).AskBatchSplit(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.AskBatchSplitResponse), err } if !versioninfo.IsFeatureSupported(rc.GetOpts().GetClusterVersion(), versioninfo.BatchSplit) { return &pdpb.AskBatchSplitResponse{Header: s.incompatibleVersion("batch_split")}, nil } if request.GetRegion() == nil { return &pdpb.AskBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_REGION_NOT_FOUND, "missing region for split"), }, nil } split, err := rc.HandleAskBatchSplit(request) if err != nil { return &pdpb.AskBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.AskBatchSplitResponse{ Header: s.header(), Ids: split.Ids, }, nil } // ReportSplit implements gRPC PDServer. func (s *GrpcServer) ReportSplit(ctx context.Context, request *pdpb.ReportSplitRequest) (*pdpb.ReportSplitResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.ReportSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).ReportSplit(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.ReportSplitResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.ReportSplitResponse{Header: s.notBootstrappedHeader()}, nil } _, err := rc.HandleReportSplit(request) if err != nil { return &pdpb.ReportSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.ReportSplitResponse{ Header: s.header(), }, nil } // ReportBatchSplit implements gRPC PDServer. func (s *GrpcServer) ReportBatchSplit(ctx context.Context, request *pdpb.ReportBatchSplitRequest) (*pdpb.ReportBatchSplitResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.ReportBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).ReportBatchSplit(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.ReportBatchSplitResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.ReportBatchSplitResponse{Header: s.notBootstrappedHeader()}, nil } _, err := rc.HandleBatchReportSplit(request) if err != nil { return &pdpb.ReportBatchSplitResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return &pdpb.ReportBatchSplitResponse{ Header: s.header(), }, nil } // GetClusterConfig implements gRPC PDServer. func (s *GrpcServer) GetClusterConfig(ctx context.Context, request *pdpb.GetClusterConfigRequest) (*pdpb.GetClusterConfigResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetClusterConfigResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetClusterConfig(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetClusterConfigResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.GetClusterConfigResponse{Header: s.notBootstrappedHeader()}, nil } return &pdpb.GetClusterConfigResponse{ Header: s.header(), Cluster: rc.GetMetaCluster(), }, nil } // PutClusterConfig implements gRPC PDServer. func (s *GrpcServer) PutClusterConfig(ctx context.Context, request *pdpb.PutClusterConfigRequest) (*pdpb.PutClusterConfigResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.PutClusterConfigResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).PutClusterConfig(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.PutClusterConfigResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.PutClusterConfigResponse{Header: s.notBootstrappedHeader()}, nil } conf := request.GetCluster() if err := rc.PutMetaCluster(conf); err != nil { return &pdpb.PutClusterConfigResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } log.Info("put cluster config ok", zap.Reflect("config", conf)) return &pdpb.PutClusterConfigResponse{ Header: s.header(), }, nil } // ScatterRegion implements gRPC PDServer. func (s *GrpcServer) ScatterRegion(ctx context.Context, request *pdpb.ScatterRegionRequest) (*pdpb.ScatterRegionResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.ScatterRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } rc := s.GetRaftCluster() if rc == nil { return &pdpb.ScatterRegionResponse{Header: s.notBootstrappedHeader()}, nil } if rc.IsServiceIndependent(constant.SchedulingServiceName) { forwardCli, err := s.updateSchedulingClient(ctx) if err != nil { return &pdpb.ScatterRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } cli := forwardCli.getClient() if cli != nil { var regionsID []uint64 // nolint:staticcheck if request.GetRegionId() != 0 { // nolint:staticcheck regionsID = []uint64{request.GetRegionId()} } else { regionsID = request.GetRegionsId() } if len(regionsID) == 0 { return &pdpb.ScatterRegionResponse{ Header: s.invalidValue("regions id is required"), }, nil } req := &schedulingpb.ScatterRegionsRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, RegionsId: regionsID, Group: request.GetGroup(), RetryLimit: request.GetRetryLimit(), SkipStoreLimit: request.GetSkipStoreLimit(), } resp, err := cli.ScatterRegions(ctx, req) if err != nil { errScatterRegionSend.Inc() // reset to let it be updated in the next request s.schedulingClient.CompareAndSwap(forwardCli, &schedulingClient{}) return convertScatterResponse(resp), err } return convertScatterResponse(resp), nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).ScatterRegion(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.ScatterRegionResponse), err } if len(request.GetRegionsId()) > 0 { percentage, err := scatterRegions(rc, request.GetRegionsId(), request.GetGroup(), int(request.GetRetryLimit()), request.GetSkipStoreLimit()) if err != nil { return nil, err } return &pdpb.ScatterRegionResponse{ Header: s.header(), FinishedPercentage: uint64(percentage), }, nil } // TODO: Deprecate it use `request.GetRegionsID`. // nolint:staticcheck region := rc.GetRegion(request.GetRegionId()) if region == nil { if request.GetRegion() == nil { return &pdpb.ScatterRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_REGION_NOT_FOUND, "region %d not found"), }, nil } region = core.NewRegionInfo(request.GetRegion(), request.GetLeader()) } op, err := rc.GetRegionScatterer().Scatter(region, request.GetGroup(), request.GetSkipStoreLimit()) if err != nil { return nil, err } if op != nil { if !rc.GetOperatorController().AddOperator(op) { return &pdpb.ScatterRegionResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "operator canceled because cannot add an operator to the execute queue"), }, nil } } return &pdpb.ScatterRegionResponse{ Header: s.header(), FinishedPercentage: 100, }, nil } // GetGCSafePoint implements gRPC PDServer. func (s *GrpcServer) GetGCSafePoint(ctx context.Context, request *pdpb.GetGCSafePointRequest) (*pdpb.GetGCSafePointResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetGCSafePointResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetGCSafePoint(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetGCSafePointResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.GetGCSafePointResponse{Header: s.notBootstrappedHeader()}, nil } safePoint, err := s.gcSafePointManager.LoadGCSafePoint() if err != nil { return nil, err } return &pdpb.GetGCSafePointResponse{ Header: s.header(), SafePoint: safePoint, }, nil } // SyncRegions syncs the regions. func (s *GrpcServer) SyncRegions(stream pdpb.PD_SyncRegionsServer) error { if s.IsClosed() || s.cluster == nil { return ErrNotStarted } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return err } } ctx := s.cluster.Context() if ctx == nil { return ErrNotStarted } return s.cluster.GetRegionSyncer().Sync(ctx, stream) } // UpdateGCSafePoint implements gRPC PDServer. func (s *GrpcServer) UpdateGCSafePoint(ctx context.Context, request *pdpb.UpdateGCSafePointRequest) (*pdpb.UpdateGCSafePointResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.UpdateGCSafePointResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).UpdateGCSafePoint(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.UpdateGCSafePointResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.UpdateGCSafePointResponse{Header: s.notBootstrappedHeader()}, nil } newSafePoint := request.GetSafePoint() oldSafePoint, err := s.gcSafePointManager.UpdateGCSafePoint(newSafePoint) if err != nil { return nil, err } if newSafePoint > oldSafePoint { log.Info("updated gc safe point", zap.Uint64("safe-point", newSafePoint)) } else if newSafePoint < oldSafePoint { log.Warn("trying to update gc safe point", zap.Uint64("old-safe-point", oldSafePoint), zap.Uint64("new-safe-point", newSafePoint)) newSafePoint = oldSafePoint } return &pdpb.UpdateGCSafePointResponse{ Header: s.header(), NewSafePoint: newSafePoint, }, nil } // UpdateServiceGCSafePoint update the safepoint for specific service func (s *GrpcServer) UpdateServiceGCSafePoint(ctx context.Context, request *pdpb.UpdateServiceGCSafePointRequest) (*pdpb.UpdateServiceGCSafePointResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.UpdateServiceGCSafePointResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).UpdateServiceGCSafePoint(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.UpdateServiceGCSafePointResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.UpdateServiceGCSafePointResponse{Header: s.notBootstrappedHeader()}, nil } var storage endpoint.GCSafePointStorage = s.storage if request.TTL <= 0 { if err := storage.RemoveServiceGCSafePoint(string(request.ServiceId)); err != nil { return nil, err } } nowTSO, err := s.getGlobalTSO(ctx) if err != nil { return nil, err } now, _ := tsoutil.ParseTimestamp(nowTSO) serviceID := string(request.ServiceId) min, updated, err := s.gcSafePointManager.UpdateServiceGCSafePoint(serviceID, request.GetSafePoint(), request.GetTTL(), now) if err != nil { return nil, err } if updated { log.Info("update service GC safe point", zap.String("service-id", serviceID), zap.Int64("expire-at", now.Unix()+request.GetTTL()), zap.Uint64("safepoint", request.GetSafePoint())) } return &pdpb.UpdateServiceGCSafePointResponse{ Header: s.header(), ServiceId: []byte(min.ServiceID), TTL: min.ExpiredAt - now.Unix(), MinSafePoint: min.SafePoint, }, nil } // GetOperator gets information about the operator belonging to the specify region. func (s *GrpcServer) GetOperator(ctx context.Context, request *pdpb.GetOperatorRequest) (*pdpb.GetOperatorResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetOperatorResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } rc := s.GetRaftCluster() if rc == nil { return &pdpb.GetOperatorResponse{Header: s.notBootstrappedHeader()}, nil } if rc.IsServiceIndependent(constant.SchedulingServiceName) { forwardCli, err := s.updateSchedulingClient(ctx) if err != nil { return &pdpb.GetOperatorResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } cli := forwardCli.getClient() if cli != nil { req := &schedulingpb.GetOperatorRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, RegionId: request.GetRegionId(), } resp, err := cli.GetOperator(ctx, req) if err != nil { errGetOperatorSend.Inc() // reset to let it be updated in the next request s.schedulingClient.CompareAndSwap(forwardCli, &schedulingClient{}) return convertOperatorResponse(resp), err } return convertOperatorResponse(resp), nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetOperator(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetOperatorResponse), err } opController := rc.GetOperatorController() requestID := request.GetRegionId() r := opController.GetOperatorStatus(requestID) if r == nil { header := s.errorHeader(&pdpb.Error{ Type: pdpb.ErrorType_REGION_NOT_FOUND, Message: "Not Found", }) return &pdpb.GetOperatorResponse{Header: header}, nil } return &pdpb.GetOperatorResponse{ Header: s.header(), RegionId: requestID, Desc: []byte(r.Desc()), Kind: []byte(r.Kind().String()), Status: r.Status, }, nil } // validateRequest checks if Server is leader and clusterID is matched. func (s *GrpcServer) validateRequest(header *pdpb.RequestHeader) error { return s.validateRoleInRequest(context.TODO(), header, nil) } // validateRoleInRequest checks if Server is leader when disallow follower-handle and clusterID is matched. // TODO: Call it in gRPC interceptor. func (s *GrpcServer) validateRoleInRequest(ctx context.Context, header *pdpb.RequestHeader, allowFollower *bool) error { if s.IsClosed() { return ErrNotStarted } if !s.member.IsLeader() { if allowFollower == nil { return ErrNotLeader } if !grpcutil.IsFollowerHandleEnabled(ctx) { // TODO: change the error code return ErrFollowerHandlingNotAllowed } *allowFollower = true } if clusterID := s.ClusterID(); header.GetClusterId() != clusterID { return status.Errorf(codes.FailedPrecondition, "mismatch cluster id, need %d but got %d", clusterID, header.GetClusterId()) } return nil } func (s *GrpcServer) header() *pdpb.ResponseHeader { clusterID := s.ClusterID() if clusterID == 0 { return s.wrapErrorToHeader(pdpb.ErrorType_NOT_BOOTSTRAPPED, "cluster id is not ready") } return &pdpb.ResponseHeader{ClusterId: clusterID} } func (s *GrpcServer) wrapErrorToHeader(errorType pdpb.ErrorType, message string) *pdpb.ResponseHeader { return s.errorHeader(&pdpb.Error{ Type: errorType, Message: message, }) } func (s *GrpcServer) errorHeader(err *pdpb.Error) *pdpb.ResponseHeader { return &pdpb.ResponseHeader{ ClusterId: s.ClusterID(), Error: err, } } func (s *GrpcServer) notBootstrappedHeader() *pdpb.ResponseHeader { return s.errorHeader(&pdpb.Error{ Type: pdpb.ErrorType_NOT_BOOTSTRAPPED, Message: "cluster is not bootstrapped", }) } func (s *GrpcServer) incompatibleVersion(tag string) *pdpb.ResponseHeader { msg := fmt.Sprintf("%s incompatible with current cluster version %s", tag, s.persistOptions.GetClusterVersion()) return s.errorHeader(&pdpb.Error{ Type: pdpb.ErrorType_INCOMPATIBLE_VERSION, Message: msg, }) } func (s *GrpcServer) invalidValue(msg string) *pdpb.ResponseHeader { return s.errorHeader(&pdpb.Error{ Type: pdpb.ErrorType_INVALID_VALUE, Message: msg, }) } func (s *GrpcServer) regionNotFound() *pdpb.ResponseHeader { return s.errorHeader(&pdpb.Error{ Type: pdpb.ErrorType_REGION_NOT_FOUND, Message: "region not found", }) } func convertHeader(header *schedulingpb.ResponseHeader) *pdpb.ResponseHeader { switch header.GetError().GetType() { case schedulingpb.ErrorType_UNKNOWN: if strings.Contains(header.GetError().GetMessage(), "region not found") { return &pdpb.ResponseHeader{ ClusterId: header.GetClusterId(), Error: &pdpb.Error{ Type: pdpb.ErrorType_REGION_NOT_FOUND, Message: header.GetError().GetMessage(), }, } } return &pdpb.ResponseHeader{ ClusterId: header.GetClusterId(), Error: &pdpb.Error{ Type: pdpb.ErrorType_UNKNOWN, Message: header.GetError().GetMessage(), }, } default: return &pdpb.ResponseHeader{ClusterId: header.GetClusterId()} } } func convertSplitResponse(resp *schedulingpb.SplitRegionsResponse) *pdpb.SplitRegionsResponse { return &pdpb.SplitRegionsResponse{ Header: convertHeader(resp.GetHeader()), FinishedPercentage: resp.GetFinishedPercentage(), } } func convertScatterResponse(resp *schedulingpb.ScatterRegionsResponse) *pdpb.ScatterRegionResponse { return &pdpb.ScatterRegionResponse{ Header: convertHeader(resp.GetHeader()), FinishedPercentage: resp.GetFinishedPercentage(), } } func convertOperatorResponse(resp *schedulingpb.GetOperatorResponse) *pdpb.GetOperatorResponse { return &pdpb.GetOperatorResponse{ Header: convertHeader(resp.GetHeader()), RegionId: resp.GetRegionId(), Desc: resp.GetDesc(), Kind: resp.GetKind(), Status: resp.GetStatus(), } } func convertAskSplitResponse(resp *schedulingpb.AskBatchSplitResponse) *pdpb.AskBatchSplitResponse { return &pdpb.AskBatchSplitResponse{ Header: convertHeader(resp.GetHeader()), Ids: resp.GetIds(), } } // Only used for the TestLocalAllocatorLeaderChange. var mockLocalAllocatorLeaderChangeFlag = false // SyncMaxTS will check whether MaxTS is the biggest one among all Local TSOs this PD is holding when skipCheck is set, // and write it into all Local TSO Allocators then if it's indeed the biggest one. func (s *GrpcServer) SyncMaxTS(_ context.Context, request *pdpb.SyncMaxTSRequest) (*pdpb.SyncMaxTSResponse, error) { // TODO: support local tso forward in api service mode in the future. if err := s.validateInternalRequest(request.GetHeader(), true); err != nil { return nil, err } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } tsoAllocatorManager := s.GetTSOAllocatorManager() // There is no dc-location found in this server, return err. if tsoAllocatorManager.GetClusterDCLocationsNumber() == 0 { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "empty cluster dc-location found, checker may not work properly"), }, nil } // Get all Local TSO Allocator leaders allocatorLeaders, err := tsoAllocatorManager.GetHoldingLocalAllocatorLeaders() if err != nil { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } if !request.GetSkipCheck() { var maxLocalTS *pdpb.Timestamp syncedDCs := make([]string, 0, len(allocatorLeaders)) for _, allocator := range allocatorLeaders { // No longer leader, just skip here because // the global allocator will check if all DCs are handled. if !allocator.IsAllocatorLeader() { continue } currentLocalTSO, err := allocator.GetCurrentTSO() if err != nil { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } if tsoutil.CompareTimestamp(currentLocalTSO, maxLocalTS) > 0 { maxLocalTS = currentLocalTSO } syncedDCs = append(syncedDCs, allocator.GetDCLocation()) } failpoint.Inject("mockLocalAllocatorLeaderChange", func() { if !mockLocalAllocatorLeaderChangeFlag { maxLocalTS = nil request.MaxTs = nil mockLocalAllocatorLeaderChangeFlag = true } }) if maxLocalTS == nil { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "local tso allocator leaders have changed during the sync, should retry"), }, nil } if request.GetMaxTs() == nil { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, "empty maxTS in the request, should retry"), }, nil } // Found a bigger or equal maxLocalTS, return it directly. cmpResult := tsoutil.CompareTimestamp(maxLocalTS, request.GetMaxTs()) if cmpResult >= 0 { // Found an equal maxLocalTS, plus 1 to logical part before returning it. // For example, we have a Global TSO t1 and a Local TSO t2, they have the // same physical and logical parts. After being differentiating with suffix, // there will be (t1.logical << suffixNum + 0) < (t2.logical << suffixNum + N), // where N is bigger than 0, which will cause a Global TSO fallback than the previous Local TSO. if cmpResult == 0 { maxLocalTS.Logical += 1 } return &pdpb.SyncMaxTSResponse{ Header: s.header(), MaxLocalTs: maxLocalTS, SyncedDcs: syncedDCs, }, nil } } syncedDCs := make([]string, 0, len(allocatorLeaders)) for _, allocator := range allocatorLeaders { if !allocator.IsAllocatorLeader() { continue } if err := allocator.WriteTSO(request.GetMaxTs()); err != nil { return &pdpb.SyncMaxTSResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } syncedDCs = append(syncedDCs, allocator.GetDCLocation()) } return &pdpb.SyncMaxTSResponse{ Header: s.header(), SyncedDcs: syncedDCs, }, nil } // SplitRegions split regions by the given split keys func (s *GrpcServer) SplitRegions(ctx context.Context, request *pdpb.SplitRegionsRequest) (*pdpb.SplitRegionsResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.SplitRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } rc := s.GetRaftCluster() if rc == nil { return &pdpb.SplitRegionsResponse{Header: s.notBootstrappedHeader()}, nil } if rc.IsServiceIndependent(constant.SchedulingServiceName) { forwardCli, err := s.updateSchedulingClient(ctx) if err != nil { return &pdpb.SplitRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } cli := forwardCli.getClient() if cli != nil { req := &schedulingpb.SplitRegionsRequest{ Header: &schedulingpb.RequestHeader{ ClusterId: request.GetHeader().GetClusterId(), SenderId: request.GetHeader().GetSenderId(), }, SplitKeys: request.GetSplitKeys(), RetryLimit: request.GetRetryLimit(), } resp, err := cli.SplitRegions(ctx, req) if err != nil { errSplitRegionsSend.Inc() // reset to let it be updated in the next request s.schedulingClient.CompareAndSwap(forwardCli, &schedulingClient{}) return convertSplitResponse(resp), err } return convertSplitResponse(resp), nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).SplitRegions(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.SplitRegionsResponse), err } finishedPercentage, newRegionIDs := rc.GetRegionSplitter().SplitRegions(ctx, request.GetSplitKeys(), int(request.GetRetryLimit())) return &pdpb.SplitRegionsResponse{ Header: s.header(), RegionsId: newRegionIDs, FinishedPercentage: uint64(finishedPercentage), }, nil } // SplitAndScatterRegions split regions by the given split keys, and scatter regions. // Only regions which split successfully will be scattered. // scatterFinishedPercentage indicates the percentage of successfully split regions that are scattered. func (s *GrpcServer) SplitAndScatterRegions(ctx context.Context, request *pdpb.SplitAndScatterRegionsRequest) (*pdpb.SplitAndScatterRegionsResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.SplitAndScatterRegionsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).SplitAndScatterRegions(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.SplitAndScatterRegionsResponse), err } rc := s.GetRaftCluster() if rc == nil { return &pdpb.SplitAndScatterRegionsResponse{Header: s.notBootstrappedHeader()}, nil } splitFinishedPercentage, newRegionIDs := rc.GetRegionSplitter().SplitRegions(ctx, request.GetSplitKeys(), int(request.GetRetryLimit())) scatterFinishedPercentage, err := scatterRegions(rc, newRegionIDs, request.GetGroup(), int(request.GetRetryLimit()), false) if err != nil { return nil, err } return &pdpb.SplitAndScatterRegionsResponse{ Header: s.header(), RegionsId: newRegionIDs, SplitFinishedPercentage: uint64(splitFinishedPercentage), ScatterFinishedPercentage: uint64(scatterFinishedPercentage), }, nil } // scatterRegions add operators to scatter regions and return the processed percentage and error func scatterRegions(cluster *cluster.RaftCluster, regionsID []uint64, group string, retryLimit int, skipStoreLimit bool) (int, error) { opsCount, failures, err := cluster.GetRegionScatterer().ScatterRegionsByID(regionsID, group, retryLimit, skipStoreLimit) if err != nil { return 0, err } percentage := 100 if len(failures) > 0 { percentage = 100 - 100*len(failures)/(opsCount+len(failures)) log.Debug("scatter regions", zap.Errors("failures", func() []error { r := make([]error, 0, len(failures)) for _, err := range failures { r = append(r, err) } return r }())) } return percentage, nil } // GetDCLocationInfo gets the dc-location info of the given dc-location from PD leader's TSO allocator manager. func (s *GrpcServer) GetDCLocationInfo(ctx context.Context, request *pdpb.GetDCLocationInfoRequest) (*pdpb.GetDCLocationInfoResponse, error) { // TODO: support local tso forward in api service mode in the future. var err error if err = s.validateInternalRequest(request.GetHeader(), false); err != nil { return nil, err } if !s.member.IsLeader() { return nil, ErrNotLeader } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetDCLocationInfoResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } am := s.GetTSOAllocatorManager() info, ok := am.GetDCLocationInfo(request.GetDcLocation()) if !ok { am.ClusterDCLocationChecker() return &pdpb.GetDCLocationInfoResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, fmt.Sprintf("dc-location %s is not found", request.GetDcLocation())), }, nil } resp := &pdpb.GetDCLocationInfoResponse{ Header: s.header(), Suffix: info.Suffix, } // Because the number of suffix bits is changing dynamically according to the dc-location number, // there is a corner case may cause the Local TSO is not unique while member changing. // Example: // t1: xxxxxxxxxxxxxxx1 | 11 // t2: xxxxxxxxxxxxxxx | 111 // So we will force the newly added Local TSO Allocator to have a Global TSO synchronization // when it becomes the Local TSO Allocator leader. // Please take a look at path_to_url for more details. if resp.MaxTs, err = am.GetMaxLocalTSO(ctx); err != nil { return &pdpb.GetDCLocationInfoResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } return resp, nil } // validateInternalRequest checks if server is closed, which is used to validate // the gRPC communication between PD servers internally. func (s *GrpcServer) validateInternalRequest(header *pdpb.RequestHeader, onlyAllowLeader bool) error { if s.IsClosed() { return ErrNotStarted } // If onlyAllowLeader is true, check whether the sender is PD leader. if onlyAllowLeader { leaderID := s.GetLeader().GetMemberId() if leaderID != header.GetSenderId() { return status.Errorf(codes.FailedPrecondition, "%s, need %d but got %d", errs.MismatchLeaderErr, leaderID, header.GetSenderId()) } } return nil } // for CDC compatibility, we need to initialize config path to `globalConfigPath` const globalConfigPath = "/global/config/" // StoreGlobalConfig store global config into etcd by transaction // Since item value needs to support marshal of different struct types, // it should be set to `Payload bytes` instead of `Value string` func (s *GrpcServer) StoreGlobalConfig(_ context.Context, request *pdpb.StoreGlobalConfigRequest) (*pdpb.StoreGlobalConfigResponse, error) { if s.client == nil { return nil, ErrEtcdNotStarted } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.StoreGlobalConfigResponse{ Error: &pdpb.Error{ Type: pdpb.ErrorType_UNKNOWN, Message: err.Error(), }, }, nil } } configPath := request.GetConfigPath() if configPath == "" { configPath = globalConfigPath } ops := make([]clientv3.Op, len(request.Changes)) for i, item := range request.Changes { name := path.Join(configPath, item.GetName()) switch item.GetKind() { case pdpb.EventType_PUT: // For CDC compatibility, we need to check the Value field firstly. value := item.GetValue() if value == "" { value = string(item.GetPayload()) } ops[i] = clientv3.OpPut(name, value) case pdpb.EventType_DELETE: ops[i] = clientv3.OpDelete(name) } } res, err := kv.NewSlowLogTxn(s.client).Then(ops...).Commit() if err != nil { return &pdpb.StoreGlobalConfigResponse{}, err } if !res.Succeeded { return &pdpb.StoreGlobalConfigResponse{}, errors.Errorf("failed to execute StoreGlobalConfig transaction") } return &pdpb.StoreGlobalConfigResponse{}, nil } // LoadGlobalConfig support 2 ways to load global config from etcd // - `Names` iteratively get value from `ConfigPath/Name` but not care about revision // - `ConfigPath` if `Names` is nil can get all values and revision of current path func (s *GrpcServer) LoadGlobalConfig(ctx context.Context, request *pdpb.LoadGlobalConfigRequest) (*pdpb.LoadGlobalConfigResponse, error) { if s.client == nil { return nil, ErrEtcdNotStarted } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return nil, err } } configPath := request.GetConfigPath() if configPath == "" { configPath = globalConfigPath } // Since item value needs to support marshal of different struct types, // it should be set to `Payload bytes` instead of `Value string`. if request.Names != nil { res := make([]*pdpb.GlobalConfigItem, len(request.Names)) for i, name := range request.Names { r, err := s.client.Get(ctx, path.Join(configPath, name)) if err != nil { res[i] = &pdpb.GlobalConfigItem{Name: name, Error: &pdpb.Error{Type: pdpb.ErrorType_UNKNOWN, Message: err.Error()}} } else if len(r.Kvs) == 0 { msg := "key " + name + " not found" res[i] = &pdpb.GlobalConfigItem{Name: name, Error: &pdpb.Error{Type: pdpb.ErrorType_GLOBAL_CONFIG_NOT_FOUND, Message: msg}} } else { res[i] = &pdpb.GlobalConfigItem{Name: name, Payload: r.Kvs[0].Value, Kind: pdpb.EventType_PUT} } } return &pdpb.LoadGlobalConfigResponse{Items: res}, nil } r, err := s.client.Get(ctx, configPath, clientv3.WithPrefix()) if err != nil { return &pdpb.LoadGlobalConfigResponse{}, err } res := make([]*pdpb.GlobalConfigItem, len(r.Kvs)) for i, value := range r.Kvs { res[i] = &pdpb.GlobalConfigItem{Kind: pdpb.EventType_PUT, Name: string(value.Key), Payload: value.Value} } return &pdpb.LoadGlobalConfigResponse{Items: res, Revision: r.Header.GetRevision()}, nil } // WatchGlobalConfig will retry on recoverable errors forever until reconnected // by Etcd.Watch() as long as the context has not been canceled or timed out. // Watch on revision which greater than or equal to the required revision. func (s *GrpcServer) WatchGlobalConfig(req *pdpb.WatchGlobalConfigRequest, server pdpb.PD_WatchGlobalConfigServer) error { if s.client == nil { return ErrEtcdNotStarted } if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return err } } ctx, cancel := context.WithCancel(server.Context()) defer cancel() configPath := req.GetConfigPath() if configPath == "" { configPath = globalConfigPath } revision := req.GetRevision() // If the revision is compacted, will meet required revision has been compacted error. // - If required revision < CompactRevision, we need to reload all configs to avoid losing data. // - If required revision >= CompactRevision, just keep watching. // Use WithPrevKV() to get the previous key-value pair when get Delete Event. watchChan := s.client.Watch(ctx, configPath, clientv3.WithPrefix(), clientv3.WithRev(revision), clientv3.WithPrevKV()) for { select { case <-ctx.Done(): return nil case <-s.Context().Done(): return nil case res := <-watchChan: if res.Err() != nil { var resp pdpb.WatchGlobalConfigResponse if revision < res.CompactRevision { resp.Header = s.wrapErrorToHeader(pdpb.ErrorType_DATA_COMPACTED, fmt.Sprintf("required watch revision: %d is smaller than current compact/min revision %d.", revision, res.CompactRevision)) } else { resp.Header = s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, fmt.Sprintf("watch channel meet other error %s.", res.Err().Error())) } if err := server.Send(&resp); err != nil { return err } // Err() indicates that this WatchResponse holds a channel-closing error. return res.Err() } revision = res.Header.GetRevision() cfgs := make([]*pdpb.GlobalConfigItem, 0, len(res.Events)) for _, e := range res.Events { // Since item value needs to support marshal of different struct types, // it should be set to `Payload bytes` instead of `Value string`. switch e.Type { case clientv3.EventTypePut: cfgs = append(cfgs, &pdpb.GlobalConfigItem{Name: string(e.Kv.Key), Payload: e.Kv.Value, Kind: pdpb.EventType(e.Type)}) case clientv3.EventTypeDelete: if e.PrevKv != nil { cfgs = append(cfgs, &pdpb.GlobalConfigItem{Name: string(e.Kv.Key), Payload: e.PrevKv.Value, Kind: pdpb.EventType(e.Type)}) } else { // Prev-kv is compacted means there must have been a delete event before this event, // which means that this is just a duplicated event, so we can just ignore it. log.Info("previous key-value pair has been compacted", zap.String("required-key", string(e.Kv.Key))) } } } if len(cfgs) > 0 { if err := server.Send(&pdpb.WatchGlobalConfigResponse{Changes: cfgs, Revision: res.Header.GetRevision()}); err != nil { return err } } } } } // Evict the leaders when the store is damaged. Damaged regions are emergency errors // and requires user to manually remove the `evict-leader-scheduler` with pd-ctl func (s *GrpcServer) handleDamagedStore(stats *pdpb.StoreStats) { // TODO: regions have no special process for the time being // and need to be removed in the future damagedRegions := stats.GetDamagedRegionsId() if len(damagedRegions) == 0 { return } for _, regionID := range stats.GetDamagedRegionsId() { // Remove peers to make sst recovery physically delete files in TiKV. err := s.GetHandler().AddRemovePeerOperator(regionID, stats.GetStoreId()) if err != nil { log.Error("store damaged but can't add remove peer operator", zap.Uint64("region-id", regionID), zap.Uint64("store-id", stats.GetStoreId()), zap.String("error", err.Error())) } else { log.Info("added remove peer operator due to damaged region", zap.Uint64("region-id", regionID), zap.Uint64("store-id", stats.GetStoreId())) } } } // ReportMinResolvedTS implements gRPC PDServer. func (s *GrpcServer) ReportMinResolvedTS(ctx context.Context, request *pdpb.ReportMinResolvedTsRequest) (*pdpb.ReportMinResolvedTsResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.ReportMinResolvedTsResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).ReportMinResolvedTS(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.ReportMinResolvedTsResponse), nil } rc := s.GetRaftCluster() if rc == nil { return &pdpb.ReportMinResolvedTsResponse{Header: s.notBootstrappedHeader()}, nil } storeID := request.GetStoreId() minResolvedTS := request.GetMinResolvedTs() if err := rc.SetMinResolvedTS(storeID, minResolvedTS); err != nil { return nil, err } log.Debug("updated min resolved-ts", zap.Uint64("store", storeID), zap.Uint64("min resolved-ts", minResolvedTS)) return &pdpb.ReportMinResolvedTsResponse{ Header: s.header(), }, nil } // SetExternalTimestamp implements gRPC PDServer. func (s *GrpcServer) SetExternalTimestamp(ctx context.Context, request *pdpb.SetExternalTimestampRequest) (*pdpb.SetExternalTimestampResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.SetExternalTimestampResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).SetExternalTimestamp(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.SetExternalTimestampResponse), nil } nowTSO, err := s.getGlobalTSO(ctx) if err != nil { return nil, err } globalTS := tsoutil.GenerateTS(&nowTSO) externalTS := request.GetTimestamp() log.Debug("try to set external timestamp", zap.Uint64("external-ts", externalTS), zap.Uint64("global-ts", globalTS)) if err := s.SetExternalTS(externalTS, globalTS); err != nil { return &pdpb.SetExternalTimestampResponse{Header: s.invalidValue(err.Error())}, nil } return &pdpb.SetExternalTimestampResponse{ Header: s.header(), }, nil } // GetExternalTimestamp implements gRPC PDServer. func (s *GrpcServer) GetExternalTimestamp(ctx context.Context, request *pdpb.GetExternalTimestampRequest) (*pdpb.GetExternalTimestampResponse, error) { if s.GetServiceMiddlewarePersistOptions().IsGRPCRateLimitEnabled() { fName := currentFunction() limiter := s.GetGRPCRateLimiter() if done, err := limiter.Allow(fName); err == nil { defer done() } else { return &pdpb.GetExternalTimestampResponse{ Header: s.wrapErrorToHeader(pdpb.ErrorType_UNKNOWN, err.Error()), }, nil } } fn := func(ctx context.Context, client *grpc.ClientConn) (any, error) { return pdpb.NewPDClient(client).GetExternalTimestamp(ctx, request) } if rsp, err := s.unaryMiddleware(ctx, request, fn); err != nil { return nil, err } else if rsp != nil { return rsp.(*pdpb.GetExternalTimestampResponse), nil } timestamp := s.GetExternalTS() return &pdpb.GetExternalTimestampResponse{ Header: s.header(), Timestamp: timestamp, }, nil } func currentFunction() string { counter, _, _, _ := runtime.Caller(1) s := strings.Split(runtime.FuncForPC(counter).Name(), ".") return s[len(s)-1] } ```
```haskell -- editorconfig-checker-disable-file {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- | Functions for compiling PIR recursive let-bound functions into PLC. module PlutusIR.Compiler.Recursion where import PlutusIR import PlutusIR.Compiler.Definitions import PlutusIR.Compiler.Provenance import PlutusIR.Compiler.Types import PlutusIR.Error import PlutusIR.MkPir qualified as PIR import Control.Monad import Control.Monad.Error.Lens import Control.Monad.Trans import Data.List.NonEmpty hiding (length) import Data.Set qualified as Set import PlutusCore qualified as PLC import PlutusCore.MkPlc qualified as PLC import PlutusCore.Quote import PlutusCore.StdLib.Data.Function qualified as Function import PlutusCore.StdLib.Meta.Data.Tuple qualified as Tuple {- Note [Recursive lets] We need to define these with a fixpoint. We can derive a fixpoint operator for values already. However, we also need to work out how to encode recursion over multiple values simultaneously. The answer is simple - we pass them through as a tuple. Overall, the translation looks like this. We convert this: let rec f_1 : t_1 = b_1 .. f_i : t_i = b_i in result into this: (\tuple : forall r . (t_1 -> .. -> t_i -> r) -> r . let f_1 = _1 tuple .. f_i = _i tuple in result ) ($fixN i$ (\choose f_1 ... f_i . choose b_1 ... b_i)) where _i is the accessor for the ith component of a tuple. This scheme is a little complicated - why don't we just pass a function directly to the fixed tuple that consumes the values? Why do the second round of let-binding? The answer is that in order to use the tuple we have to provide a result type. If we used it directly, we would have to provide the type of the *result* term, which we may not know. Here we merely have to provide it with the types of the f_is, which we *do* know. -} -- See Note [Recursive lets] -- | Compile a mutually recursive list of var decls bound in a body. compileRecTerms :: Compiling m e uni fun a => PIRTerm uni fun a -> NonEmpty (TermDef TyName Name uni fun (Provenance a)) -> DefT SharedName uni fun (Provenance a) m (PIRTerm uni fun a) compileRecTerms body bs = do p <- lift getEnclosing fixpoint <- mkFixpoint bs Tuple.bindTuple p (PIR._varDeclName . PIR.defVar <$> toList bs) fixpoint body -- | Given a list of var decls, create a tuples of values that computes their mutually recursive fixpoint. mkFixpoint :: forall m e uni fun a . Compiling m e uni fun a => NonEmpty (TermDef TyName Name uni fun (Provenance a)) -> DefT SharedName uni fun (Provenance a) m (Tuple.Tuple (Term TyName Name uni fun) uni (Provenance a)) mkFixpoint bs = do p0 <- lift getEnclosing funs <- forM bs $ \(PIR.Def (PIR.VarDecl p name ty) term) -> case PIR.mkFunctionDef p name ty term of Just fun -> pure fun Nothing -> lift $ throwing _Error $ CompilationError (PLC.typeAnn ty) "Recursive values must be of function type" -- See Note [Extra definitions while compiling let-bindings] let arity = fromIntegral $ length funs fixByKey = FixBy fixNKey = FixpointCombinator arity let mkFixByDef = do name <- liftQuote $ toProgramName fixByKey let (fixByTerm, fixByType) = Function.fixByAndType pure (PLC.Def (PLC.VarDecl noProvenance name (noProvenance <$ fixByType)) (noProvenance <$ fixByTerm, Strict), mempty) let mkFixNDef = do name <- liftQuote $ toProgramName fixNKey ((fixNTerm, fixNType), fixNDeps) <- if arity == 1 then pure (Function.fixAndType, mempty) -- fixN depends on fixBy else do fixBy <- lookupOrDefineTerm p0 fixByKey mkFixByDef pure (Function.fixNAndType arity (void fixBy), Set.singleton fixByKey) pure (PLC.Def (PLC.VarDecl noProvenance name (noProvenance <$ fixNType)) (noProvenance <$ fixNTerm, Strict), fixNDeps) fixN <- lookupOrDefineTerm p0 fixNKey mkFixNDef liftQuote $ case funs of -- Takes a list of function defs and function bodies and turns them into a Scott-encoded tuple, which -- happens to be exactly what we want f :| [] -> Tuple.getSpineToTuple p0 [(PLC.functionDefToType f, Function.getSingleFixOf p0 fixN f)] f :| fs -> Function.getMutualFixOf p0 fixN (f:fs) ```
Mark Campbell Ireland (born 5 April 1960) became the Archdeacon of Blackburn in 2016. Ireland was educated at University of St Andrews and Wycliffe Hall Oxford; and ordained in 1985. He was a director of Saltmine Trust (company 02930528) from 2005 to 2008. After curacies in Blackburn and Lancaster he held incumbencies in Baxenden, Walsall and Wellington with Eyton before his archdeacon's appointment. He is a member of the General Synod of the Church of England. In relation to same-sex marriage, Ireland has stated: "I want to maintain the church's traditional doctrine of marriage, but also to belong to a church that truly welcomes and includes gay and lesbian people." References Archdeacons of Blackburn 1960 births Living people Alumni of the University of St Andrews Alumni of Wycliffe Hall, Oxford Members of the General Synod of the Church of England
```javascript import { group, check, sleep } from 'k6'; import http from 'k6/http'; // Version: 1.2 // Creator: BrowserMob Proxy export let options = { maxRedirects: 0, }; export default function() { group("Page 0 - Page 0 $ 6 \" \x00\n\t\u2028", function() { let res, redirectUrl, json; // Request #0 res = http.post("path_to_url", `{ "locale": "sv-SE", "merchant_urls": { "checkout": "path_to_url", "confirmation": "path_to_url", "push": "path_to_url{checkout.order.id}?merchant_id=smi-merchant-all-validation\u0026env=perf", "terms": "path_to_url" }, "options": {}, "order_amount": 16278, "order_lines": [ { "image_url": "path_to_url", "name": "Mediokra Betong Lampa. Tangentbord", "product_url": "path_to_url", "quantity": 1, "quantity_unit": "kg", "reference": "jkwedq9f6t", "tax_rate": 800, "total_amount": 16278, "total_discount_amount": 0, "total_tax_amount": 1206, "type": "physical", "unit_price": 16278 } ], "order_tax_amount": 1206, "purchase_country": "se", "purchase_currency": "SEK", "shipping_countries": ["AD", "AE", "AG", "AI", "AL", "AM", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BB", "BD", "BE", "BF", "BG", "BH", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BZ", "CA", "CC", "CH", "CK", "CL", "CM", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EH", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GP", "GQ", "GR", "GS", "GT", "GU", "HK", "HM", "HN", "HR", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KR", "KW", "KY", "KZ", "LC", "LI", "LK", "LS", "LT", "LU", "LV", "MA", "MC", "ME", "MF", "MG", "MH", "MK", "ML", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RW", "SA", "SB", "SC", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SR", "ST", "SV", "SX", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TO", "TR", "TT", "TV", "TW", "TZ", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "WF", "WS", "YT", "ZA", "ZM"] }`, { "headers": { "Authorization": "Basic stuffz", "User-Agent": "SysTest - perf", "Accept": "application/json; charset=utf-8", "Content-Type": "application/json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-host.example.com" } } ) if (!check(res, {"status is 201": (r) => r.status === 201 })) { return }; redirectUrl = res.headers.Location; json = JSON.parse(res.body); // Request #1 res = http.get(redirectUrl, { "headers": { "Authorization": "Basic stuffz", "User-Agent": "SysTest - perf", "Accept": "application/json; charset=utf-8", "Content-Type": "application/json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-host.example.com" } } ) if (!check(res, {"status is 200": (r) => r.status === 200 })) { return }; json = JSON.parse(res.body); // Request #2 res = http.get("path_to_url", { "headers": { "Authorization": "Checkout otherStuffz", "User-Agent": "SysTest - perf", "Accept": "application/vnd.checkout.server-order-v1+json", "Content-Type": "application/vnd.checkout.client-order-v1+json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-other-host.example.com" } } ) if (!check(res, {"status is 200": (r) => r.status === 200 })) { return }; json = JSON.parse(res.body); // Request #3 res = http.post("path_to_url", `{ "allowed_billing_countries": ["${json.allowed_billing_countries[0]}", "${json.allowed_billing_countries[1]}", "${json.allowed_billing_countries[2]}", "${json.allowed_billing_countries[3]}", "${json.allowed_billing_countries[4]}", "${json.allowed_billing_countries[5]}", "${json.allowed_billing_countries[6]}", "${json.allowed_billing_countries[7]}", "${json.allowed_billing_countries[8]}", "${json.allowed_billing_countries[9]}", "${json.allowed_billing_countries[10]}", "${json.allowed_billing_countries[11]}", "${json.allowed_billing_countries[12]}", "${json.allowed_billing_countries[13]}", "${json.allowed_billing_countries[14]}", "${json.allowed_billing_countries[15]}", "${json.allowed_billing_countries[16]}", "${json.allowed_billing_countries[17]}", "${json.allowed_billing_countries[18]}", "${json.allowed_billing_countries[19]}", "${json.allowed_billing_countries[20]}", "${json.allowed_billing_countries[21]}", "${json.allowed_billing_countries[22]}", "${json.allowed_billing_countries[23]}", "${json.allowed_billing_countries[24]}", "${json.allowed_billing_countries[25]}", "${json.allowed_billing_countries[26]}", "${json.allowed_billing_countries[27]}", "${json.allowed_billing_countries[28]}", "${json.allowed_billing_countries[29]}", "${json.allowed_billing_countries[30]}", "${json.allowed_billing_countries[31]}", "${json.allowed_billing_countries[32]}", "${json.allowed_billing_countries[33]}", "${json.allowed_billing_countries[34]}", "${json.allowed_billing_countries[35]}", "${json.allowed_billing_countries[36]}", "${json.allowed_billing_countries[37]}", "${json.allowed_billing_countries[38]}", "${json.allowed_billing_countries[39]}", "${json.allowed_billing_countries[40]}", "${json.allowed_billing_countries[41]}", "${json.allowed_billing_countries[42]}", "${json.allowed_billing_countries[43]}", "${json.allowed_billing_countries[44]}", "${json.allowed_billing_countries[45]}", "${json.allowed_billing_countries[46]}", "${json.allowed_billing_countries[47]}", "${json.allowed_billing_countries[48]}", "${json.allowed_billing_countries[49]}", "${json.allowed_billing_countries[50]}", "${json.allowed_billing_countries[51]}", "${json.allowed_billing_countries[52]}", "${json.allowed_billing_countries[53]}", "${json.allowed_billing_countries[54]}", "${json.allowed_billing_countries[55]}", "${json.allowed_billing_countries[56]}", "${json.allowed_billing_countries[57]}", "${json.allowed_billing_countries[58]}", "${json.allowed_billing_countries[59]}", "${json.allowed_billing_countries[60]}", "${json.allowed_billing_countries[61]}", "${json.allowed_billing_countries[62]}", "${json.allowed_billing_countries[63]}", "${json.allowed_billing_countries[64]}", "${json.allowed_billing_countries[65]}", "${json.allowed_billing_countries[66]}", "${json.allowed_billing_countries[67]}", "${json.allowed_billing_countries[68]}", "${json.allowed_billing_countries[69]}", "${json.allowed_billing_countries[70]}", "${json.allowed_billing_countries[71]}", "${json.allowed_billing_countries[72]}", "${json.allowed_billing_countries[73]}", "${json.allowed_billing_countries[74]}", "${json.allowed_billing_countries[75]}", "${json.allowed_billing_countries[76]}", "${json.allowed_billing_countries[77]}", "${json.allowed_billing_countries[78]}", "${json.allowed_billing_countries[79]}", "${json.allowed_billing_countries[80]}", "${json.allowed_billing_countries[81]}", "${json.allowed_billing_countries[82]}", "${json.allowed_billing_countries[83]}", "${json.allowed_billing_countries[84]}", "${json.allowed_billing_countries[85]}", "${json.allowed_billing_countries[86]}", "${json.allowed_billing_countries[87]}", "${json.allowed_billing_countries[88]}", "${json.allowed_billing_countries[89]}", "${json.allowed_billing_countries[90]}", "${json.allowed_billing_countries[91]}", "${json.allowed_billing_countries[92]}", "${json.allowed_billing_countries[93]}", "${json.allowed_billing_countries[94]}", "${json.allowed_billing_countries[95]}", "${json.allowed_billing_countries[96]}", "${json.allowed_billing_countries[97]}", "${json.allowed_billing_countries[98]}", "${json.allowed_billing_countries[99]}", "${json.allowed_billing_countries[100]}", "${json.allowed_billing_countries[101]}", "${json.allowed_billing_countries[102]}", "${json.allowed_billing_countries[103]}", "${json.allowed_billing_countries[104]}", "${json.allowed_billing_countries[105]}", "${json.allowed_billing_countries[106]}", "${json.allowed_billing_countries[107]}", "${json.allowed_billing_countries[108]}", "${json.allowed_billing_countries[109]}", "${json.allowed_billing_countries[110]}", "${json.allowed_billing_countries[111]}", "${json.allowed_billing_countries[112]}", "${json.allowed_billing_countries[113]}", "${json.allowed_billing_countries[114]}", "${json.allowed_billing_countries[115]}", "${json.allowed_billing_countries[116]}", "${json.allowed_billing_countries[117]}", "${json.allowed_billing_countries[118]}", "${json.allowed_billing_countries[119]}", "${json.allowed_billing_countries[120]}", "${json.allowed_billing_countries[121]}", "${json.allowed_billing_countries[122]}", "${json.allowed_billing_countries[123]}", "${json.allowed_billing_countries[124]}", "${json.allowed_billing_countries[125]}", "${json.allowed_billing_countries[126]}", "${json.allowed_billing_countries[127]}", "${json.allowed_billing_countries[128]}", "${json.allowed_billing_countries[129]}", "${json.allowed_billing_countries[130]}", "${json.allowed_billing_countries[131]}", "${json.allowed_billing_countries[132]}", "${json.allowed_billing_countries[133]}", "${json.allowed_billing_countries[134]}", "${json.allowed_billing_countries[135]}", "${json.allowed_billing_countries[136]}", "${json.allowed_billing_countries[137]}", "${json.allowed_billing_countries[138]}", "${json.allowed_billing_countries[139]}", "${json.allowed_billing_countries[140]}", "${json.allowed_billing_countries[141]}", "${json.allowed_billing_countries[142]}", "${json.allowed_billing_countries[143]}", "${json.allowed_billing_countries[144]}", "${json.allowed_billing_countries[145]}", "${json.allowed_billing_countries[146]}", "${json.allowed_billing_countries[147]}", "${json.allowed_billing_countries[148]}", "${json.allowed_billing_countries[149]}", "${json.allowed_billing_countries[150]}", "${json.allowed_billing_countries[151]}", "${json.allowed_billing_countries[152]}", "${json.allowed_billing_countries[153]}", "${json.allowed_billing_countries[154]}", "${json.allowed_billing_countries[155]}", "${json.allowed_billing_countries[156]}", "${json.allowed_billing_countries[157]}", "${json.allowed_billing_countries[158]}", "${json.allowed_billing_countries[159]}", "${json.allowed_billing_countries[160]}", "${json.allowed_billing_countries[161]}", "${json.allowed_billing_countries[162]}", "${json.allowed_billing_countries[163]}", "${json.allowed_billing_countries[164]}", "${json.allowed_billing_countries[165]}", "${json.allowed_billing_countries[166]}", "${json.allowed_billing_countries[167]}", "${json.allowed_billing_countries[168]}", "${json.allowed_billing_countries[169]}", "${json.allowed_billing_countries[170]}", "${json.allowed_billing_countries[171]}", "${json.allowed_billing_countries[172]}", "${json.allowed_billing_countries[173]}", "${json.allowed_billing_countries[174]}", "${json.allowed_billing_countries[175]}", "${json.allowed_billing_countries[176]}", "${json.allowed_billing_countries[177]}", "${json.allowed_billing_countries[178]}", "${json.allowed_billing_countries[179]}", "${json.allowed_billing_countries[180]}", "${json.allowed_billing_countries[181]}", "${json.allowed_billing_countries[182]}", "${json.allowed_billing_countries[183]}", "${json.allowed_billing_countries[184]}", "${json.allowed_billing_countries[185]}", "${json.allowed_billing_countries[186]}", "${json.allowed_billing_countries[187]}", "${json.allowed_billing_countries[188]}", "${json.allowed_billing_countries[189]}", "${json.allowed_billing_countries[190]}", "${json.allowed_billing_countries[191]}", "${json.allowed_billing_countries[192]}", "${json.allowed_billing_countries[193]}", "${json.allowed_billing_countries[194]}", "${json.allowed_billing_countries[195]}", "${json.allowed_billing_countries[196]}", "${json.allowed_billing_countries[197]}", "${json.allowed_billing_countries[198]}", "${json.allowed_billing_countries[199]}", "${json.allowed_billing_countries[200]}", "${json.allowed_billing_countries[201]}", "${json.allowed_billing_countries[202]}", "${json.allowed_billing_countries[203]}", "${json.allowed_billing_countries[204]}", "${json.allowed_billing_countries[205]}", "${json.allowed_billing_countries[206]}", "${json.allowed_billing_countries[207]}", "${json.allowed_billing_countries[208]}", "${json.allowed_billing_countries[209]}", "${json.allowed_billing_countries[210]}"], "allowed_shipping_countries": ["${json.allowed_shipping_countries[0]}", "${json.allowed_shipping_countries[1]}", "${json.allowed_shipping_countries[2]}", "${json.allowed_shipping_countries[3]}", "${json.allowed_shipping_countries[4]}", "${json.allowed_shipping_countries[5]}", "${json.allowed_shipping_countries[6]}", "${json.allowed_shipping_countries[7]}", "${json.allowed_shipping_countries[8]}", "${json.allowed_shipping_countries[9]}", "${json.allowed_shipping_countries[10]}", "${json.allowed_shipping_countries[11]}", "${json.allowed_shipping_countries[12]}", "${json.allowed_shipping_countries[13]}", "${json.allowed_shipping_countries[14]}", "${json.allowed_shipping_countries[15]}", "${json.allowed_shipping_countries[16]}", "${json.allowed_shipping_countries[17]}", "${json.allowed_shipping_countries[18]}", "${json.allowed_shipping_countries[19]}", "${json.allowed_shipping_countries[20]}", "${json.allowed_shipping_countries[21]}", "${json.allowed_shipping_countries[22]}", "${json.allowed_shipping_countries[23]}", "${json.allowed_shipping_countries[24]}", "${json.allowed_shipping_countries[25]}", "${json.allowed_shipping_countries[26]}", "${json.allowed_shipping_countries[27]}", "${json.allowed_shipping_countries[28]}", "${json.allowed_shipping_countries[29]}", "${json.allowed_shipping_countries[30]}", "${json.allowed_shipping_countries[31]}", "${json.allowed_shipping_countries[32]}", "${json.allowed_shipping_countries[33]}", "${json.allowed_shipping_countries[34]}", "${json.allowed_shipping_countries[35]}", "${json.allowed_shipping_countries[36]}", "${json.allowed_shipping_countries[37]}", "${json.allowed_shipping_countries[38]}", "${json.allowed_shipping_countries[39]}", "${json.allowed_shipping_countries[40]}", "${json.allowed_shipping_countries[41]}", "${json.allowed_shipping_countries[42]}", "${json.allowed_shipping_countries[43]}", "${json.allowed_shipping_countries[44]}", "${json.allowed_shipping_countries[45]}", "${json.allowed_shipping_countries[46]}", "${json.allowed_shipping_countries[47]}", "${json.allowed_shipping_countries[48]}", "${json.allowed_shipping_countries[49]}", "${json.allowed_shipping_countries[50]}", "${json.allowed_shipping_countries[51]}", "${json.allowed_shipping_countries[52]}", "${json.allowed_shipping_countries[53]}", "${json.allowed_shipping_countries[54]}", "${json.allowed_shipping_countries[55]}", "${json.allowed_shipping_countries[56]}", "${json.allowed_shipping_countries[57]}", "${json.allowed_shipping_countries[58]}", "${json.allowed_shipping_countries[59]}", "${json.allowed_shipping_countries[60]}", "${json.allowed_shipping_countries[61]}", "${json.allowed_shipping_countries[62]}", "${json.allowed_shipping_countries[63]}", "${json.allowed_shipping_countries[64]}", "${json.allowed_shipping_countries[65]}", "${json.allowed_shipping_countries[66]}", "${json.allowed_shipping_countries[67]}", "${json.allowed_shipping_countries[68]}", "${json.allowed_shipping_countries[69]}", "${json.allowed_shipping_countries[70]}", "${json.allowed_shipping_countries[71]}", "${json.allowed_shipping_countries[72]}", "${json.allowed_shipping_countries[73]}", "${json.allowed_shipping_countries[74]}", "${json.allowed_shipping_countries[75]}", "${json.allowed_shipping_countries[76]}", "${json.allowed_shipping_countries[77]}", "${json.allowed_shipping_countries[78]}", "${json.allowed_shipping_countries[79]}", "${json.allowed_shipping_countries[80]}", "${json.allowed_shipping_countries[81]}", "${json.allowed_shipping_countries[82]}", "${json.allowed_shipping_countries[83]}", "${json.allowed_shipping_countries[84]}", "${json.allowed_shipping_countries[85]}", "${json.allowed_shipping_countries[86]}", "${json.allowed_shipping_countries[87]}", "${json.allowed_shipping_countries[88]}", "${json.allowed_shipping_countries[89]}", "${json.allowed_shipping_countries[90]}", "${json.allowed_shipping_countries[91]}", "${json.allowed_shipping_countries[92]}", "${json.allowed_shipping_countries[93]}", "${json.allowed_shipping_countries[94]}", "${json.allowed_shipping_countries[95]}", "${json.allowed_shipping_countries[96]}", "${json.allowed_shipping_countries[97]}", "${json.allowed_shipping_countries[98]}", "${json.allowed_shipping_countries[99]}", "${json.allowed_shipping_countries[100]}", "${json.allowed_shipping_countries[101]}", "${json.allowed_shipping_countries[102]}", "${json.allowed_shipping_countries[103]}", "${json.allowed_shipping_countries[104]}", "${json.allowed_shipping_countries[105]}", "${json.allowed_shipping_countries[106]}", "${json.allowed_shipping_countries[107]}", "${json.allowed_shipping_countries[108]}", "${json.allowed_shipping_countries[109]}", "${json.allowed_shipping_countries[110]}", "${json.allowed_shipping_countries[111]}", "${json.allowed_shipping_countries[112]}", "${json.allowed_shipping_countries[113]}", "${json.allowed_shipping_countries[114]}", "${json.allowed_shipping_countries[115]}", "${json.allowed_shipping_countries[116]}", "${json.allowed_shipping_countries[117]}", "${json.allowed_shipping_countries[118]}", "${json.allowed_shipping_countries[119]}", "${json.allowed_shipping_countries[120]}", "${json.allowed_shipping_countries[121]}", "${json.allowed_shipping_countries[122]}", "${json.allowed_shipping_countries[123]}", "${json.allowed_shipping_countries[124]}", "${json.allowed_shipping_countries[125]}", "${json.allowed_shipping_countries[126]}", "${json.allowed_shipping_countries[127]}", "${json.allowed_shipping_countries[128]}", "${json.allowed_shipping_countries[129]}", "${json.allowed_shipping_countries[130]}", "${json.allowed_shipping_countries[131]}", "${json.allowed_shipping_countries[132]}", "${json.allowed_shipping_countries[133]}", "${json.allowed_shipping_countries[134]}", "${json.allowed_shipping_countries[135]}", "${json.allowed_shipping_countries[136]}", "${json.allowed_shipping_countries[137]}", "${json.allowed_shipping_countries[138]}", "${json.allowed_shipping_countries[139]}", "${json.allowed_shipping_countries[140]}", "${json.allowed_shipping_countries[141]}", "${json.allowed_shipping_countries[142]}", "${json.allowed_shipping_countries[143]}", "${json.allowed_shipping_countries[144]}", "${json.allowed_shipping_countries[145]}", "${json.allowed_shipping_countries[146]}", "${json.allowed_shipping_countries[147]}", "${json.allowed_shipping_countries[148]}", "${json.allowed_shipping_countries[149]}", "${json.allowed_shipping_countries[150]}", "${json.allowed_shipping_countries[151]}", "${json.allowed_shipping_countries[152]}", "${json.allowed_shipping_countries[153]}", "${json.allowed_shipping_countries[154]}", "${json.allowed_shipping_countries[155]}", "${json.allowed_shipping_countries[156]}", "${json.allowed_shipping_countries[157]}", "${json.allowed_shipping_countries[158]}", "${json.allowed_shipping_countries[159]}", "${json.allowed_shipping_countries[160]}", "${json.allowed_shipping_countries[161]}", "${json.allowed_shipping_countries[162]}", "${json.allowed_shipping_countries[163]}", "${json.allowed_shipping_countries[164]}", "${json.allowed_shipping_countries[165]}", "${json.allowed_shipping_countries[166]}", "${json.allowed_shipping_countries[167]}", "${json.allowed_shipping_countries[168]}", "${json.allowed_shipping_countries[169]}", "${json.allowed_shipping_countries[170]}", "${json.allowed_shipping_countries[171]}", "${json.allowed_shipping_countries[172]}", "${json.allowed_shipping_countries[173]}", "${json.allowed_shipping_countries[174]}", "${json.allowed_shipping_countries[175]}", "${json.allowed_shipping_countries[176]}", "${json.allowed_shipping_countries[177]}", "${json.allowed_shipping_countries[178]}", "${json.allowed_shipping_countries[179]}", "${json.allowed_shipping_countries[180]}", "${json.allowed_shipping_countries[181]}", "${json.allowed_shipping_countries[182]}", "${json.allowed_shipping_countries[183]}", "${json.allowed_shipping_countries[184]}", "${json.allowed_shipping_countries[185]}", "${json.allowed_shipping_countries[186]}", "${json.allowed_shipping_countries[187]}", "${json.allowed_shipping_countries[188]}", "${json.allowed_shipping_countries[189]}", "${json.allowed_shipping_countries[190]}", "${json.allowed_shipping_countries[191]}", "${json.allowed_shipping_countries[192]}", "${json.allowed_shipping_countries[193]}", "${json.allowed_shipping_countries[194]}", "${json.allowed_shipping_countries[195]}", "${json.allowed_shipping_countries[196]}", "${json.allowed_shipping_countries[197]}", "${json.allowed_shipping_countries[198]}", "${json.allowed_shipping_countries[199]}", "${json.allowed_shipping_countries[200]}", "${json.allowed_shipping_countries[201]}", "${json.allowed_shipping_countries[202]}", "${json.allowed_shipping_countries[203]}", "${json.allowed_shipping_countries[204]}", "${json.allowed_shipping_countries[205]}", "${json.allowed_shipping_countries[206]}", "${json.allowed_shipping_countries[207]}", "${json.allowed_shipping_countries[208]}", "${json.allowed_shipping_countries[209]}", "${json.allowed_shipping_countries[210]}"], "cart": { "items": [ { "image_url": "${json.cart.items[0].image_url}", "name": "${json.cart.items[0].name}", "product_url": "${json.cart.items[0].product_url}", "quantity": "${json.cart.items[0].quantity}", "reference": "${json.cart.items[0].reference}", "tax_rate": "${json.cart.items[0].tax_rate}", "total_price_excluding_tax": "${json.cart.items[0].total_price_excluding_tax}", "total_price_including_tax": "${json.cart.items[0].total_price_including_tax}", "total_tax_amount": "${json.cart.items[0].total_tax_amount}", "type": "${json.cart.items[0].type}", "unit_price": "${json.cart.items[0].unit_price}" } ], "subtotal": "${json.cart.subtotal}", "total_discount_amount_excluding_tax": "${json.cart.total_discount_amount_excluding_tax}", "total_price_excluding_tax": "${json.cart.total_price_excluding_tax}", "total_price_including_tax": "${json.cart.total_price_including_tax}", "total_shipping_amount_excluding_tax": "${json.cart.total_shipping_amount_excluding_tax}", "total_store_credit": "${json.cart.total_store_credit}", "total_surcharge_amount_excluding_tax": "${json.cart.total_surcharge_amount_excluding_tax}", "total_tax_amount": "${json.cart.total_tax_amount}" }, "merchant_urls": { "checkout": "${json.merchant_urls.checkout}", "confirmation": "${json.merchant_urls.confirmation}", "terms": "${json.merchant_urls.terms}" }, "options": { "allow_separate_shipping_address": "${json.options.allow_separate_shipping_address}", "allowed_customer_types": ["${json.options.allowed_customer_types[0]}"], "date_of_birth_mandatory": "${json.options.date_of_birth_mandatory}", "national_identification_number_mandatory": "${json.options.national_identification_number_mandatory}", "payment_selector_on_load": "${json.options.payment_selector_on_load}" }, "preview_payment_methods": [ { "data": { "days": "${json.preview_payment_methods[0].data.days}" }, "id": "${json.preview_payment_methods[0].id}", "type": "${json.preview_payment_methods[0].type}" }, { "data": { "allow_saved_card": "${json.preview_payment_methods[1].data.allow_saved_card}", "available_cards": ["${json.preview_payment_methods[1].data.available_cards[0]}", "${json.preview_payment_methods[1].data.available_cards[1]}"], "do_save_card": "${json.preview_payment_methods[1].data.do_save_card}" }, "id": "${json.preview_payment_methods[1].id}", "type": "${json.preview_payment_methods[1].type}" } ], "required_fields": ["${json.required_fields[0]}", "${json.required_fields[1]}"], "shared": { "billing_address": { "country": "${json.shared.billing_address.country}" }, "challenge": { "country": "${json.shared.challenge.country}", "email": "drop+b28643c0e7c74da6b6ff2f4131aa3d64+d0+gr@example.com", "postal_code": "10066" }, "currency": "${json.shared.currency}", "customer": { "type": "${json.shared.customer.type}" }, "language": "${json.shared.language}" }, "status": { "prescreened": "${json.status.prescreened}", "require_terms_consent": "${json.status.require_terms_consent}" } }`, { "headers": { "Authorization": "Checkout otherStuffz", "User-Agent": "SysTest - perf", "Content-Type": "application/vnd.checkout.client-order-v1+json", "Accept": "application/vnd.checkout.server-order-v1+json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-other-host.example.com" } } ) if (!check(res, {"status is 200": (r) => r.status === 200 })) { return }; json = JSON.parse(res.body); // Request #4 res = http.post("path_to_url", `{ "allowed_billing_countries": ["${json.allowed_billing_countries[0]}", "${json.allowed_billing_countries[1]}", "${json.allowed_billing_countries[2]}", "${json.allowed_billing_countries[3]}", "${json.allowed_billing_countries[4]}", "${json.allowed_billing_countries[5]}", "${json.allowed_billing_countries[6]}", "${json.allowed_billing_countries[7]}", "${json.allowed_billing_countries[8]}", "${json.allowed_billing_countries[9]}", "${json.allowed_billing_countries[10]}", "${json.allowed_billing_countries[11]}", "${json.allowed_billing_countries[12]}", "${json.allowed_billing_countries[13]}", "${json.allowed_billing_countries[14]}", "${json.allowed_billing_countries[15]}", "${json.allowed_billing_countries[16]}", "${json.allowed_billing_countries[17]}", "${json.allowed_billing_countries[18]}", "${json.allowed_billing_countries[19]}", "${json.allowed_billing_countries[20]}", "${json.allowed_billing_countries[21]}", "${json.allowed_billing_countries[22]}", "${json.allowed_billing_countries[23]}", "${json.allowed_billing_countries[24]}", "${json.allowed_billing_countries[25]}", "${json.allowed_billing_countries[26]}", "${json.allowed_billing_countries[27]}", "${json.allowed_billing_countries[28]}", "${json.allowed_billing_countries[29]}", "${json.allowed_billing_countries[30]}", "${json.allowed_billing_countries[31]}", "${json.allowed_billing_countries[32]}", "${json.allowed_billing_countries[33]}", "${json.allowed_billing_countries[34]}", "${json.allowed_billing_countries[35]}", "${json.allowed_billing_countries[36]}", "${json.allowed_billing_countries[37]}", "${json.allowed_billing_countries[38]}", "${json.allowed_billing_countries[39]}", "${json.allowed_billing_countries[40]}", "${json.allowed_billing_countries[41]}", "${json.allowed_billing_countries[42]}", "${json.allowed_billing_countries[43]}", "${json.allowed_billing_countries[44]}", "${json.allowed_billing_countries[45]}", "${json.allowed_billing_countries[46]}", "${json.allowed_billing_countries[47]}", "${json.allowed_billing_countries[48]}", "${json.allowed_billing_countries[49]}", "${json.allowed_billing_countries[50]}", "${json.allowed_billing_countries[51]}", "${json.allowed_billing_countries[52]}", "${json.allowed_billing_countries[53]}", "${json.allowed_billing_countries[54]}", "${json.allowed_billing_countries[55]}", "${json.allowed_billing_countries[56]}", "${json.allowed_billing_countries[57]}", "${json.allowed_billing_countries[58]}", "${json.allowed_billing_countries[59]}", "${json.allowed_billing_countries[60]}", "${json.allowed_billing_countries[61]}", "${json.allowed_billing_countries[62]}", "${json.allowed_billing_countries[63]}", "${json.allowed_billing_countries[64]}", "${json.allowed_billing_countries[65]}", "${json.allowed_billing_countries[66]}", "${json.allowed_billing_countries[67]}", "${json.allowed_billing_countries[68]}", "${json.allowed_billing_countries[69]}", "${json.allowed_billing_countries[70]}", "${json.allowed_billing_countries[71]}", "${json.allowed_billing_countries[72]}", "${json.allowed_billing_countries[73]}", "${json.allowed_billing_countries[74]}", "${json.allowed_billing_countries[75]}", "${json.allowed_billing_countries[76]}", "${json.allowed_billing_countries[77]}", "${json.allowed_billing_countries[78]}", "${json.allowed_billing_countries[79]}", "${json.allowed_billing_countries[80]}", "${json.allowed_billing_countries[81]}", "${json.allowed_billing_countries[82]}", "${json.allowed_billing_countries[83]}", "${json.allowed_billing_countries[84]}", "${json.allowed_billing_countries[85]}", "${json.allowed_billing_countries[86]}", "${json.allowed_billing_countries[87]}", "${json.allowed_billing_countries[88]}", "${json.allowed_billing_countries[89]}", "${json.allowed_billing_countries[90]}", "${json.allowed_billing_countries[91]}", "${json.allowed_billing_countries[92]}", "${json.allowed_billing_countries[93]}", "${json.allowed_billing_countries[94]}", "${json.allowed_billing_countries[95]}", "${json.allowed_billing_countries[96]}", "${json.allowed_billing_countries[97]}", "${json.allowed_billing_countries[98]}", "${json.allowed_billing_countries[99]}", "${json.allowed_billing_countries[100]}", "${json.allowed_billing_countries[101]}", "${json.allowed_billing_countries[102]}", "${json.allowed_billing_countries[103]}", "${json.allowed_billing_countries[104]}", "${json.allowed_billing_countries[105]}", "${json.allowed_billing_countries[106]}", "${json.allowed_billing_countries[107]}", "${json.allowed_billing_countries[108]}", "${json.allowed_billing_countries[109]}", "${json.allowed_billing_countries[110]}", "${json.allowed_billing_countries[111]}", "${json.allowed_billing_countries[112]}", "${json.allowed_billing_countries[113]}", "${json.allowed_billing_countries[114]}", "${json.allowed_billing_countries[115]}", "${json.allowed_billing_countries[116]}", "${json.allowed_billing_countries[117]}", "${json.allowed_billing_countries[118]}", "${json.allowed_billing_countries[119]}", "${json.allowed_billing_countries[120]}", "${json.allowed_billing_countries[121]}", "${json.allowed_billing_countries[122]}", "${json.allowed_billing_countries[123]}", "${json.allowed_billing_countries[124]}", "${json.allowed_billing_countries[125]}", "${json.allowed_billing_countries[126]}", "${json.allowed_billing_countries[127]}", "${json.allowed_billing_countries[128]}", "${json.allowed_billing_countries[129]}", "${json.allowed_billing_countries[130]}", "${json.allowed_billing_countries[131]}", "${json.allowed_billing_countries[132]}", "${json.allowed_billing_countries[133]}", "${json.allowed_billing_countries[134]}", "${json.allowed_billing_countries[135]}", "${json.allowed_billing_countries[136]}", "${json.allowed_billing_countries[137]}", "${json.allowed_billing_countries[138]}", "${json.allowed_billing_countries[139]}", "${json.allowed_billing_countries[140]}", "${json.allowed_billing_countries[141]}", "${json.allowed_billing_countries[142]}", "${json.allowed_billing_countries[143]}", "${json.allowed_billing_countries[144]}", "${json.allowed_billing_countries[145]}", "${json.allowed_billing_countries[146]}", "${json.allowed_billing_countries[147]}", "${json.allowed_billing_countries[148]}", "${json.allowed_billing_countries[149]}", "${json.allowed_billing_countries[150]}", "${json.allowed_billing_countries[151]}", "${json.allowed_billing_countries[152]}", "${json.allowed_billing_countries[153]}", "${json.allowed_billing_countries[154]}", "${json.allowed_billing_countries[155]}", "${json.allowed_billing_countries[156]}", "${json.allowed_billing_countries[157]}", "${json.allowed_billing_countries[158]}", "${json.allowed_billing_countries[159]}", "${json.allowed_billing_countries[160]}", "${json.allowed_billing_countries[161]}", "${json.allowed_billing_countries[162]}", "${json.allowed_billing_countries[163]}", "${json.allowed_billing_countries[164]}", "${json.allowed_billing_countries[165]}", "${json.allowed_billing_countries[166]}", "${json.allowed_billing_countries[167]}", "${json.allowed_billing_countries[168]}", "${json.allowed_billing_countries[169]}", "${json.allowed_billing_countries[170]}", "${json.allowed_billing_countries[171]}", "${json.allowed_billing_countries[172]}", "${json.allowed_billing_countries[173]}", "${json.allowed_billing_countries[174]}", "${json.allowed_billing_countries[175]}", "${json.allowed_billing_countries[176]}", "${json.allowed_billing_countries[177]}", "${json.allowed_billing_countries[178]}", "${json.allowed_billing_countries[179]}", "${json.allowed_billing_countries[180]}", "${json.allowed_billing_countries[181]}", "${json.allowed_billing_countries[182]}", "${json.allowed_billing_countries[183]}", "${json.allowed_billing_countries[184]}", "${json.allowed_billing_countries[185]}", "${json.allowed_billing_countries[186]}", "${json.allowed_billing_countries[187]}", "${json.allowed_billing_countries[188]}", "${json.allowed_billing_countries[189]}", "${json.allowed_billing_countries[190]}", "${json.allowed_billing_countries[191]}", "${json.allowed_billing_countries[192]}", "${json.allowed_billing_countries[193]}", "${json.allowed_billing_countries[194]}", "${json.allowed_billing_countries[195]}", "${json.allowed_billing_countries[196]}", "${json.allowed_billing_countries[197]}", "${json.allowed_billing_countries[198]}", "${json.allowed_billing_countries[199]}", "${json.allowed_billing_countries[200]}", "${json.allowed_billing_countries[201]}", "${json.allowed_billing_countries[202]}", "${json.allowed_billing_countries[203]}", "${json.allowed_billing_countries[204]}", "${json.allowed_billing_countries[205]}", "${json.allowed_billing_countries[206]}", "${json.allowed_billing_countries[207]}", "${json.allowed_billing_countries[208]}", "${json.allowed_billing_countries[209]}", "${json.allowed_billing_countries[210]}"], "allowed_shipping_countries": ["${json.allowed_shipping_countries[0]}", "${json.allowed_shipping_countries[1]}", "${json.allowed_shipping_countries[2]}", "${json.allowed_shipping_countries[3]}", "${json.allowed_shipping_countries[4]}", "${json.allowed_shipping_countries[5]}", "${json.allowed_shipping_countries[6]}", "${json.allowed_shipping_countries[7]}", "${json.allowed_shipping_countries[8]}", "${json.allowed_shipping_countries[9]}", "${json.allowed_shipping_countries[10]}", "${json.allowed_shipping_countries[11]}", "${json.allowed_shipping_countries[12]}", "${json.allowed_shipping_countries[13]}", "${json.allowed_shipping_countries[14]}", "${json.allowed_shipping_countries[15]}", "${json.allowed_shipping_countries[16]}", "${json.allowed_shipping_countries[17]}", "${json.allowed_shipping_countries[18]}", "${json.allowed_shipping_countries[19]}", "${json.allowed_shipping_countries[20]}", "${json.allowed_shipping_countries[21]}", "${json.allowed_shipping_countries[22]}", "${json.allowed_shipping_countries[23]}", "${json.allowed_shipping_countries[24]}", "${json.allowed_shipping_countries[25]}", "${json.allowed_shipping_countries[26]}", "${json.allowed_shipping_countries[27]}", "${json.allowed_shipping_countries[28]}", "${json.allowed_shipping_countries[29]}", "${json.allowed_shipping_countries[30]}", "${json.allowed_shipping_countries[31]}", "${json.allowed_shipping_countries[32]}", "${json.allowed_shipping_countries[33]}", "${json.allowed_shipping_countries[34]}", "${json.allowed_shipping_countries[35]}", "${json.allowed_shipping_countries[36]}", "${json.allowed_shipping_countries[37]}", "${json.allowed_shipping_countries[38]}", "${json.allowed_shipping_countries[39]}", "${json.allowed_shipping_countries[40]}", "${json.allowed_shipping_countries[41]}", "${json.allowed_shipping_countries[42]}", "${json.allowed_shipping_countries[43]}", "${json.allowed_shipping_countries[44]}", "${json.allowed_shipping_countries[45]}", "${json.allowed_shipping_countries[46]}", "${json.allowed_shipping_countries[47]}", "${json.allowed_shipping_countries[48]}", "${json.allowed_shipping_countries[49]}", "${json.allowed_shipping_countries[50]}", "${json.allowed_shipping_countries[51]}", "${json.allowed_shipping_countries[52]}", "${json.allowed_shipping_countries[53]}", "${json.allowed_shipping_countries[54]}", "${json.allowed_shipping_countries[55]}", "${json.allowed_shipping_countries[56]}", "${json.allowed_shipping_countries[57]}", "${json.allowed_shipping_countries[58]}", "${json.allowed_shipping_countries[59]}", "${json.allowed_shipping_countries[60]}", "${json.allowed_shipping_countries[61]}", "${json.allowed_shipping_countries[62]}", "${json.allowed_shipping_countries[63]}", "${json.allowed_shipping_countries[64]}", "${json.allowed_shipping_countries[65]}", "${json.allowed_shipping_countries[66]}", "${json.allowed_shipping_countries[67]}", "${json.allowed_shipping_countries[68]}", "${json.allowed_shipping_countries[69]}", "${json.allowed_shipping_countries[70]}", "${json.allowed_shipping_countries[71]}", "${json.allowed_shipping_countries[72]}", "${json.allowed_shipping_countries[73]}", "${json.allowed_shipping_countries[74]}", "${json.allowed_shipping_countries[75]}", "${json.allowed_shipping_countries[76]}", "${json.allowed_shipping_countries[77]}", "${json.allowed_shipping_countries[78]}", "${json.allowed_shipping_countries[79]}", "${json.allowed_shipping_countries[80]}", "${json.allowed_shipping_countries[81]}", "${json.allowed_shipping_countries[82]}", "${json.allowed_shipping_countries[83]}", "${json.allowed_shipping_countries[84]}", "${json.allowed_shipping_countries[85]}", "${json.allowed_shipping_countries[86]}", "${json.allowed_shipping_countries[87]}", "${json.allowed_shipping_countries[88]}", "${json.allowed_shipping_countries[89]}", "${json.allowed_shipping_countries[90]}", "${json.allowed_shipping_countries[91]}", "${json.allowed_shipping_countries[92]}", "${json.allowed_shipping_countries[93]}", "${json.allowed_shipping_countries[94]}", "${json.allowed_shipping_countries[95]}", "${json.allowed_shipping_countries[96]}", "${json.allowed_shipping_countries[97]}", "${json.allowed_shipping_countries[98]}", "${json.allowed_shipping_countries[99]}", "${json.allowed_shipping_countries[100]}", "${json.allowed_shipping_countries[101]}", "${json.allowed_shipping_countries[102]}", "${json.allowed_shipping_countries[103]}", "${json.allowed_shipping_countries[104]}", "${json.allowed_shipping_countries[105]}", "${json.allowed_shipping_countries[106]}", "${json.allowed_shipping_countries[107]}", "${json.allowed_shipping_countries[108]}", "${json.allowed_shipping_countries[109]}", "${json.allowed_shipping_countries[110]}", "${json.allowed_shipping_countries[111]}", "${json.allowed_shipping_countries[112]}", "${json.allowed_shipping_countries[113]}", "${json.allowed_shipping_countries[114]}", "${json.allowed_shipping_countries[115]}", "${json.allowed_shipping_countries[116]}", "${json.allowed_shipping_countries[117]}", "${json.allowed_shipping_countries[118]}", "${json.allowed_shipping_countries[119]}", "${json.allowed_shipping_countries[120]}", "${json.allowed_shipping_countries[121]}", "${json.allowed_shipping_countries[122]}", "${json.allowed_shipping_countries[123]}", "${json.allowed_shipping_countries[124]}", "${json.allowed_shipping_countries[125]}", "${json.allowed_shipping_countries[126]}", "${json.allowed_shipping_countries[127]}", "${json.allowed_shipping_countries[128]}", "${json.allowed_shipping_countries[129]}", "${json.allowed_shipping_countries[130]}", "${json.allowed_shipping_countries[131]}", "${json.allowed_shipping_countries[132]}", "${json.allowed_shipping_countries[133]}", "${json.allowed_shipping_countries[134]}", "${json.allowed_shipping_countries[135]}", "${json.allowed_shipping_countries[136]}", "${json.allowed_shipping_countries[137]}", "${json.allowed_shipping_countries[138]}", "${json.allowed_shipping_countries[139]}", "${json.allowed_shipping_countries[140]}", "${json.allowed_shipping_countries[141]}", "${json.allowed_shipping_countries[142]}", "${json.allowed_shipping_countries[143]}", "${json.allowed_shipping_countries[144]}", "${json.allowed_shipping_countries[145]}", "${json.allowed_shipping_countries[146]}", "${json.allowed_shipping_countries[147]}", "${json.allowed_shipping_countries[148]}", "${json.allowed_shipping_countries[149]}", "${json.allowed_shipping_countries[150]}", "${json.allowed_shipping_countries[151]}", "${json.allowed_shipping_countries[152]}", "${json.allowed_shipping_countries[153]}", "${json.allowed_shipping_countries[154]}", "${json.allowed_shipping_countries[155]}", "${json.allowed_shipping_countries[156]}", "${json.allowed_shipping_countries[157]}", "${json.allowed_shipping_countries[158]}", "${json.allowed_shipping_countries[159]}", "${json.allowed_shipping_countries[160]}", "${json.allowed_shipping_countries[161]}", "${json.allowed_shipping_countries[162]}", "${json.allowed_shipping_countries[163]}", "${json.allowed_shipping_countries[164]}", "${json.allowed_shipping_countries[165]}", "${json.allowed_shipping_countries[166]}", "${json.allowed_shipping_countries[167]}", "${json.allowed_shipping_countries[168]}", "${json.allowed_shipping_countries[169]}", "${json.allowed_shipping_countries[170]}", "${json.allowed_shipping_countries[171]}", "${json.allowed_shipping_countries[172]}", "${json.allowed_shipping_countries[173]}", "${json.allowed_shipping_countries[174]}", "${json.allowed_shipping_countries[175]}", "${json.allowed_shipping_countries[176]}", "${json.allowed_shipping_countries[177]}", "${json.allowed_shipping_countries[178]}", "${json.allowed_shipping_countries[179]}", "${json.allowed_shipping_countries[180]}", "${json.allowed_shipping_countries[181]}", "${json.allowed_shipping_countries[182]}", "${json.allowed_shipping_countries[183]}", "${json.allowed_shipping_countries[184]}", "${json.allowed_shipping_countries[185]}", "${json.allowed_shipping_countries[186]}", "${json.allowed_shipping_countries[187]}", "${json.allowed_shipping_countries[188]}", "${json.allowed_shipping_countries[189]}", "${json.allowed_shipping_countries[190]}", "${json.allowed_shipping_countries[191]}", "${json.allowed_shipping_countries[192]}", "${json.allowed_shipping_countries[193]}", "${json.allowed_shipping_countries[194]}", "${json.allowed_shipping_countries[195]}", "${json.allowed_shipping_countries[196]}", "${json.allowed_shipping_countries[197]}", "${json.allowed_shipping_countries[198]}", "${json.allowed_shipping_countries[199]}", "${json.allowed_shipping_countries[200]}", "${json.allowed_shipping_countries[201]}", "${json.allowed_shipping_countries[202]}", "${json.allowed_shipping_countries[203]}", "${json.allowed_shipping_countries[204]}", "${json.allowed_shipping_countries[205]}", "${json.allowed_shipping_countries[206]}", "${json.allowed_shipping_countries[207]}", "${json.allowed_shipping_countries[208]}", "${json.allowed_shipping_countries[209]}", "${json.allowed_shipping_countries[210]}"], "analytics_user_id": "${json.analytics_user_id}", "cart": { "items": [ { "image_url": "${json.cart.items[0].image_url}", "name": "${json.cart.items[0].name}", "product_url": "${json.cart.items[0].product_url}", "quantity": "${json.cart.items[0].quantity}", "reference": "${json.cart.items[0].reference}", "tax_rate": "${json.cart.items[0].tax_rate}", "total_price_excluding_tax": "${json.cart.items[0].total_price_excluding_tax}", "total_price_including_tax": "${json.cart.items[0].total_price_including_tax}", "total_tax_amount": "${json.cart.items[0].total_tax_amount}", "type": "${json.cart.items[0].type}", "unit_price": "${json.cart.items[0].unit_price}" } ], "subtotal": "${json.cart.subtotal}", "total_discount_amount_excluding_tax": "${json.cart.total_discount_amount_excluding_tax}", "total_price_excluding_tax": "${json.cart.total_price_excluding_tax}", "total_price_including_tax": "${json.cart.total_price_including_tax}", "total_shipping_amount_excluding_tax": "${json.cart.total_shipping_amount_excluding_tax}", "total_store_credit": "${json.cart.total_store_credit}", "total_surcharge_amount_excluding_tax": "${json.cart.total_surcharge_amount_excluding_tax}", "total_tax_amount": "${json.cart.total_tax_amount}" }, "correlation_id": "f6df29e7-f850-4c36-81fc-11def2f44b81", "merchant_urls": { "checkout": "${json.merchant_urls.checkout}", "confirmation": "${json.merchant_urls.confirmation}", "terms": "${json.merchant_urls.terms}" }, "options": { "allow_separate_shipping_address": "${json.options.allow_separate_shipping_address}", "allowed_customer_types": ["${json.options.allowed_customer_types[0]}"], "date_of_birth_mandatory": "${json.options.date_of_birth_mandatory}", "national_identification_number_mandatory": "${json.options.national_identification_number_mandatory}", "payment_selector_on_load": "${json.options.payment_selector_on_load}" }, "preview_payment_methods": [ { "data": { "days": "${json.preview_payment_methods[0].data.days}" }, "id": "${json.preview_payment_methods[0].id}", "type": "${json.preview_payment_methods[0].type}" }, { "data": { "allow_saved_card": "${json.preview_payment_methods[1].data.allow_saved_card}", "available_cards": ["${json.preview_payment_methods[1].data.available_cards[0]}", "${json.preview_payment_methods[1].data.available_cards[1]}"], "do_save_card": "${json.preview_payment_methods[1].data.do_save_card}" }, "id": "${json.preview_payment_methods[1].id}", "type": "${json.preview_payment_methods[1].type}" } ], "required_fields": ["${json.required_fields[0]}", "${json.required_fields[1]}", "${json.required_fields[2]}", "${json.required_fields[3]}", "${json.required_fields[4]}", "${json.required_fields[5]}", "billing_address.care_of"], "shared": { "billing_address": { "care_of": "C/O Hakan Ostlund", "city": "AlingHelsingstadfors", "country": "${json.shared.billing_address.country}", "email": "${json.shared.billing_address.email}", "family_name": "Anglund", "given_name": "Eva InvoiceGreenNewSpec", "phone": "+46700012878", "postal_code": "${json.shared.billing_address.postal_code}", "street_address": "Sveavgen 44, 11111 Stockholm, Sweden Eriks Gata gatan" }, "challenge": { "country": "${json.shared.challenge.country}", "email": "${json.shared.challenge.email}", "postal_code": "${json.shared.challenge.postal_code}" }, "currency": "${json.shared.currency}", "customer": { "national_identification_number": "8910210312", "type": "${json.shared.customer.type}" }, "language": "${json.shared.language}" }, "status": { "prescreened": "${json.status.prescreened}", "require_terms_consent": "${json.status.require_terms_consent}" } }`, { "headers": { "Authorization": "Checkout otherStuffz", "User-Agent": "SysTest - perf", "Content-Type": "application/vnd.checkout.client-order-v1+json", "Accept": "application/vnd.checkout.server-order-v1+json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-other-host.example.com" } } ) if (!check(res, {"status is 200": (r) => r.status === 200 })) { return }; json = JSON.parse(res.body); // Request #5 res = http.post("path_to_url", `{ "allowed_billing_countries": ["${json.allowed_billing_countries[0]}", "${json.allowed_billing_countries[1]}", "${json.allowed_billing_countries[2]}", "${json.allowed_billing_countries[3]}", "${json.allowed_billing_countries[4]}", "${json.allowed_billing_countries[5]}", "${json.allowed_billing_countries[6]}", "${json.allowed_billing_countries[7]}", "${json.allowed_billing_countries[8]}", "${json.allowed_billing_countries[9]}", "${json.allowed_billing_countries[10]}", "${json.allowed_billing_countries[11]}", "${json.allowed_billing_countries[12]}", "${json.allowed_billing_countries[13]}", "${json.allowed_billing_countries[14]}", "${json.allowed_billing_countries[15]}", "${json.allowed_billing_countries[16]}", "${json.allowed_billing_countries[17]}", "${json.allowed_billing_countries[18]}", "${json.allowed_billing_countries[19]}", "${json.allowed_billing_countries[20]}", "${json.allowed_billing_countries[21]}", "${json.allowed_billing_countries[22]}", "${json.allowed_billing_countries[23]}", "${json.allowed_billing_countries[24]}", "${json.allowed_billing_countries[25]}", "${json.allowed_billing_countries[26]}", "${json.allowed_billing_countries[27]}", "${json.allowed_billing_countries[28]}", "${json.allowed_billing_countries[29]}", "${json.allowed_billing_countries[30]}", "${json.allowed_billing_countries[31]}", "${json.allowed_billing_countries[32]}", "${json.allowed_billing_countries[33]}", "${json.allowed_billing_countries[34]}", "${json.allowed_billing_countries[35]}", "${json.allowed_billing_countries[36]}", "${json.allowed_billing_countries[37]}", "${json.allowed_billing_countries[38]}", "${json.allowed_billing_countries[39]}", "${json.allowed_billing_countries[40]}", "${json.allowed_billing_countries[41]}", "${json.allowed_billing_countries[42]}", "${json.allowed_billing_countries[43]}", "${json.allowed_billing_countries[44]}", "${json.allowed_billing_countries[45]}", "${json.allowed_billing_countries[46]}", "${json.allowed_billing_countries[47]}", "${json.allowed_billing_countries[48]}", "${json.allowed_billing_countries[49]}", "${json.allowed_billing_countries[50]}", "${json.allowed_billing_countries[51]}", "${json.allowed_billing_countries[52]}", "${json.allowed_billing_countries[53]}", "${json.allowed_billing_countries[54]}", "${json.allowed_billing_countries[55]}", "${json.allowed_billing_countries[56]}", "${json.allowed_billing_countries[57]}", "${json.allowed_billing_countries[58]}", "${json.allowed_billing_countries[59]}", "${json.allowed_billing_countries[60]}", "${json.allowed_billing_countries[61]}", "${json.allowed_billing_countries[62]}", "${json.allowed_billing_countries[63]}", "${json.allowed_billing_countries[64]}", "${json.allowed_billing_countries[65]}", "${json.allowed_billing_countries[66]}", "${json.allowed_billing_countries[67]}", "${json.allowed_billing_countries[68]}", "${json.allowed_billing_countries[69]}", "${json.allowed_billing_countries[70]}", "${json.allowed_billing_countries[71]}", "${json.allowed_billing_countries[72]}", "${json.allowed_billing_countries[73]}", "${json.allowed_billing_countries[74]}", "${json.allowed_billing_countries[75]}", "${json.allowed_billing_countries[76]}", "${json.allowed_billing_countries[77]}", "${json.allowed_billing_countries[78]}", "${json.allowed_billing_countries[79]}", "${json.allowed_billing_countries[80]}", "${json.allowed_billing_countries[81]}", "${json.allowed_billing_countries[82]}", "${json.allowed_billing_countries[83]}", "${json.allowed_billing_countries[84]}", "${json.allowed_billing_countries[85]}", "${json.allowed_billing_countries[86]}", "${json.allowed_billing_countries[87]}", "${json.allowed_billing_countries[88]}", "${json.allowed_billing_countries[89]}", "${json.allowed_billing_countries[90]}", "${json.allowed_billing_countries[91]}", "${json.allowed_billing_countries[92]}", "${json.allowed_billing_countries[93]}", "${json.allowed_billing_countries[94]}", "${json.allowed_billing_countries[95]}", "${json.allowed_billing_countries[96]}", "${json.allowed_billing_countries[97]}", "${json.allowed_billing_countries[98]}", "${json.allowed_billing_countries[99]}", "${json.allowed_billing_countries[100]}", "${json.allowed_billing_countries[101]}", "${json.allowed_billing_countries[102]}", "${json.allowed_billing_countries[103]}", "${json.allowed_billing_countries[104]}", "${json.allowed_billing_countries[105]}", "${json.allowed_billing_countries[106]}", "${json.allowed_billing_countries[107]}", "${json.allowed_billing_countries[108]}", "${json.allowed_billing_countries[109]}", "${json.allowed_billing_countries[110]}", "${json.allowed_billing_countries[111]}", "${json.allowed_billing_countries[112]}", "${json.allowed_billing_countries[113]}", "${json.allowed_billing_countries[114]}", "${json.allowed_billing_countries[115]}", "${json.allowed_billing_countries[116]}", "${json.allowed_billing_countries[117]}", "${json.allowed_billing_countries[118]}", "${json.allowed_billing_countries[119]}", "${json.allowed_billing_countries[120]}", "${json.allowed_billing_countries[121]}", "${json.allowed_billing_countries[122]}", "${json.allowed_billing_countries[123]}", "${json.allowed_billing_countries[124]}", "${json.allowed_billing_countries[125]}", "${json.allowed_billing_countries[126]}", "${json.allowed_billing_countries[127]}", "${json.allowed_billing_countries[128]}", "${json.allowed_billing_countries[129]}", "${json.allowed_billing_countries[130]}", "${json.allowed_billing_countries[131]}", "${json.allowed_billing_countries[132]}", "${json.allowed_billing_countries[133]}", "${json.allowed_billing_countries[134]}", "${json.allowed_billing_countries[135]}", "${json.allowed_billing_countries[136]}", "${json.allowed_billing_countries[137]}", "${json.allowed_billing_countries[138]}", "${json.allowed_billing_countries[139]}", "${json.allowed_billing_countries[140]}", "${json.allowed_billing_countries[141]}", "${json.allowed_billing_countries[142]}", "${json.allowed_billing_countries[143]}", "${json.allowed_billing_countries[144]}", "${json.allowed_billing_countries[145]}", "${json.allowed_billing_countries[146]}", "${json.allowed_billing_countries[147]}", "${json.allowed_billing_countries[148]}", "${json.allowed_billing_countries[149]}", "${json.allowed_billing_countries[150]}", "${json.allowed_billing_countries[151]}", "${json.allowed_billing_countries[152]}", "${json.allowed_billing_countries[153]}", "${json.allowed_billing_countries[154]}", "${json.allowed_billing_countries[155]}", "${json.allowed_billing_countries[156]}", "${json.allowed_billing_countries[157]}", "${json.allowed_billing_countries[158]}", "${json.allowed_billing_countries[159]}", "${json.allowed_billing_countries[160]}", "${json.allowed_billing_countries[161]}", "${json.allowed_billing_countries[162]}", "${json.allowed_billing_countries[163]}", "${json.allowed_billing_countries[164]}", "${json.allowed_billing_countries[165]}", "${json.allowed_billing_countries[166]}", "${json.allowed_billing_countries[167]}", "${json.allowed_billing_countries[168]}", "${json.allowed_billing_countries[169]}", "${json.allowed_billing_countries[170]}", "${json.allowed_billing_countries[171]}", "${json.allowed_billing_countries[172]}", "${json.allowed_billing_countries[173]}", "${json.allowed_billing_countries[174]}", "${json.allowed_billing_countries[175]}", "${json.allowed_billing_countries[176]}", "${json.allowed_billing_countries[177]}", "${json.allowed_billing_countries[178]}", "${json.allowed_billing_countries[179]}", "${json.allowed_billing_countries[180]}", "${json.allowed_billing_countries[181]}", "${json.allowed_billing_countries[182]}", "${json.allowed_billing_countries[183]}", "${json.allowed_billing_countries[184]}", "${json.allowed_billing_countries[185]}", "${json.allowed_billing_countries[186]}", "${json.allowed_billing_countries[187]}", "${json.allowed_billing_countries[188]}", "${json.allowed_billing_countries[189]}", "${json.allowed_billing_countries[190]}", "${json.allowed_billing_countries[191]}", "${json.allowed_billing_countries[192]}", "${json.allowed_billing_countries[193]}", "${json.allowed_billing_countries[194]}", "${json.allowed_billing_countries[195]}", "${json.allowed_billing_countries[196]}", "${json.allowed_billing_countries[197]}", "${json.allowed_billing_countries[198]}", "${json.allowed_billing_countries[199]}", "${json.allowed_billing_countries[200]}", "${json.allowed_billing_countries[201]}", "${json.allowed_billing_countries[202]}", "${json.allowed_billing_countries[203]}", "${json.allowed_billing_countries[204]}", "${json.allowed_billing_countries[205]}", "${json.allowed_billing_countries[206]}", "${json.allowed_billing_countries[207]}", "${json.allowed_billing_countries[208]}", "${json.allowed_billing_countries[209]}", "${json.allowed_billing_countries[210]}"], "allowed_shipping_countries": ["${json.allowed_shipping_countries[0]}", "${json.allowed_shipping_countries[1]}", "${json.allowed_shipping_countries[2]}", "${json.allowed_shipping_countries[3]}", "${json.allowed_shipping_countries[4]}", "${json.allowed_shipping_countries[5]}", "${json.allowed_shipping_countries[6]}", "${json.allowed_shipping_countries[7]}", "${json.allowed_shipping_countries[8]}", "${json.allowed_shipping_countries[9]}", "${json.allowed_shipping_countries[10]}", "${json.allowed_shipping_countries[11]}", "${json.allowed_shipping_countries[12]}", "${json.allowed_shipping_countries[13]}", "${json.allowed_shipping_countries[14]}", "${json.allowed_shipping_countries[15]}", "${json.allowed_shipping_countries[16]}", "${json.allowed_shipping_countries[17]}", "${json.allowed_shipping_countries[18]}", "${json.allowed_shipping_countries[19]}", "${json.allowed_shipping_countries[20]}", "${json.allowed_shipping_countries[21]}", "${json.allowed_shipping_countries[22]}", "${json.allowed_shipping_countries[23]}", "${json.allowed_shipping_countries[24]}", "${json.allowed_shipping_countries[25]}", "${json.allowed_shipping_countries[26]}", "${json.allowed_shipping_countries[27]}", "${json.allowed_shipping_countries[28]}", "${json.allowed_shipping_countries[29]}", "${json.allowed_shipping_countries[30]}", "${json.allowed_shipping_countries[31]}", "${json.allowed_shipping_countries[32]}", "${json.allowed_shipping_countries[33]}", "${json.allowed_shipping_countries[34]}", "${json.allowed_shipping_countries[35]}", "${json.allowed_shipping_countries[36]}", "${json.allowed_shipping_countries[37]}", "${json.allowed_shipping_countries[38]}", "${json.allowed_shipping_countries[39]}", "${json.allowed_shipping_countries[40]}", "${json.allowed_shipping_countries[41]}", "${json.allowed_shipping_countries[42]}", "${json.allowed_shipping_countries[43]}", "${json.allowed_shipping_countries[44]}", "${json.allowed_shipping_countries[45]}", "${json.allowed_shipping_countries[46]}", "${json.allowed_shipping_countries[47]}", "${json.allowed_shipping_countries[48]}", "${json.allowed_shipping_countries[49]}", "${json.allowed_shipping_countries[50]}", "${json.allowed_shipping_countries[51]}", "${json.allowed_shipping_countries[52]}", "${json.allowed_shipping_countries[53]}", "${json.allowed_shipping_countries[54]}", "${json.allowed_shipping_countries[55]}", "${json.allowed_shipping_countries[56]}", "${json.allowed_shipping_countries[57]}", "${json.allowed_shipping_countries[58]}", "${json.allowed_shipping_countries[59]}", "${json.allowed_shipping_countries[60]}", "${json.allowed_shipping_countries[61]}", "${json.allowed_shipping_countries[62]}", "${json.allowed_shipping_countries[63]}", "${json.allowed_shipping_countries[64]}", "${json.allowed_shipping_countries[65]}", "${json.allowed_shipping_countries[66]}", "${json.allowed_shipping_countries[67]}", "${json.allowed_shipping_countries[68]}", "${json.allowed_shipping_countries[69]}", "${json.allowed_shipping_countries[70]}", "${json.allowed_shipping_countries[71]}", "${json.allowed_shipping_countries[72]}", "${json.allowed_shipping_countries[73]}", "${json.allowed_shipping_countries[74]}", "${json.allowed_shipping_countries[75]}", "${json.allowed_shipping_countries[76]}", "${json.allowed_shipping_countries[77]}", "${json.allowed_shipping_countries[78]}", "${json.allowed_shipping_countries[79]}", "${json.allowed_shipping_countries[80]}", "${json.allowed_shipping_countries[81]}", "${json.allowed_shipping_countries[82]}", "${json.allowed_shipping_countries[83]}", "${json.allowed_shipping_countries[84]}", "${json.allowed_shipping_countries[85]}", "${json.allowed_shipping_countries[86]}", "${json.allowed_shipping_countries[87]}", "${json.allowed_shipping_countries[88]}", "${json.allowed_shipping_countries[89]}", "${json.allowed_shipping_countries[90]}", "${json.allowed_shipping_countries[91]}", "${json.allowed_shipping_countries[92]}", "${json.allowed_shipping_countries[93]}", "${json.allowed_shipping_countries[94]}", "${json.allowed_shipping_countries[95]}", "${json.allowed_shipping_countries[96]}", "${json.allowed_shipping_countries[97]}", "${json.allowed_shipping_countries[98]}", "${json.allowed_shipping_countries[99]}", "${json.allowed_shipping_countries[100]}", "${json.allowed_shipping_countries[101]}", "${json.allowed_shipping_countries[102]}", "${json.allowed_shipping_countries[103]}", "${json.allowed_shipping_countries[104]}", "${json.allowed_shipping_countries[105]}", "${json.allowed_shipping_countries[106]}", "${json.allowed_shipping_countries[107]}", "${json.allowed_shipping_countries[108]}", "${json.allowed_shipping_countries[109]}", "${json.allowed_shipping_countries[110]}", "${json.allowed_shipping_countries[111]}", "${json.allowed_shipping_countries[112]}", "${json.allowed_shipping_countries[113]}", "${json.allowed_shipping_countries[114]}", "${json.allowed_shipping_countries[115]}", "${json.allowed_shipping_countries[116]}", "${json.allowed_shipping_countries[117]}", "${json.allowed_shipping_countries[118]}", "${json.allowed_shipping_countries[119]}", "${json.allowed_shipping_countries[120]}", "${json.allowed_shipping_countries[121]}", "${json.allowed_shipping_countries[122]}", "${json.allowed_shipping_countries[123]}", "${json.allowed_shipping_countries[124]}", "${json.allowed_shipping_countries[125]}", "${json.allowed_shipping_countries[126]}", "${json.allowed_shipping_countries[127]}", "${json.allowed_shipping_countries[128]}", "${json.allowed_shipping_countries[129]}", "${json.allowed_shipping_countries[130]}", "${json.allowed_shipping_countries[131]}", "${json.allowed_shipping_countries[132]}", "${json.allowed_shipping_countries[133]}", "${json.allowed_shipping_countries[134]}", "${json.allowed_shipping_countries[135]}", "${json.allowed_shipping_countries[136]}", "${json.allowed_shipping_countries[137]}", "${json.allowed_shipping_countries[138]}", "${json.allowed_shipping_countries[139]}", "${json.allowed_shipping_countries[140]}", "${json.allowed_shipping_countries[141]}", "${json.allowed_shipping_countries[142]}", "${json.allowed_shipping_countries[143]}", "${json.allowed_shipping_countries[144]}", "${json.allowed_shipping_countries[145]}", "${json.allowed_shipping_countries[146]}", "${json.allowed_shipping_countries[147]}", "${json.allowed_shipping_countries[148]}", "${json.allowed_shipping_countries[149]}", "${json.allowed_shipping_countries[150]}", "${json.allowed_shipping_countries[151]}", "${json.allowed_shipping_countries[152]}", "${json.allowed_shipping_countries[153]}", "${json.allowed_shipping_countries[154]}", "${json.allowed_shipping_countries[155]}", "${json.allowed_shipping_countries[156]}", "${json.allowed_shipping_countries[157]}", "${json.allowed_shipping_countries[158]}", "${json.allowed_shipping_countries[159]}", "${json.allowed_shipping_countries[160]}", "${json.allowed_shipping_countries[161]}", "${json.allowed_shipping_countries[162]}", "${json.allowed_shipping_countries[163]}", "${json.allowed_shipping_countries[164]}", "${json.allowed_shipping_countries[165]}", "${json.allowed_shipping_countries[166]}", "${json.allowed_shipping_countries[167]}", "${json.allowed_shipping_countries[168]}", "${json.allowed_shipping_countries[169]}", "${json.allowed_shipping_countries[170]}", "${json.allowed_shipping_countries[171]}", "${json.allowed_shipping_countries[172]}", "${json.allowed_shipping_countries[173]}", "${json.allowed_shipping_countries[174]}", "${json.allowed_shipping_countries[175]}", "${json.allowed_shipping_countries[176]}", "${json.allowed_shipping_countries[177]}", "${json.allowed_shipping_countries[178]}", "${json.allowed_shipping_countries[179]}", "${json.allowed_shipping_countries[180]}", "${json.allowed_shipping_countries[181]}", "${json.allowed_shipping_countries[182]}", "${json.allowed_shipping_countries[183]}", "${json.allowed_shipping_countries[184]}", "${json.allowed_shipping_countries[185]}", "${json.allowed_shipping_countries[186]}", "${json.allowed_shipping_countries[187]}", "${json.allowed_shipping_countries[188]}", "${json.allowed_shipping_countries[189]}", "${json.allowed_shipping_countries[190]}", "${json.allowed_shipping_countries[191]}", "${json.allowed_shipping_countries[192]}", "${json.allowed_shipping_countries[193]}", "${json.allowed_shipping_countries[194]}", "${json.allowed_shipping_countries[195]}", "${json.allowed_shipping_countries[196]}", "${json.allowed_shipping_countries[197]}", "${json.allowed_shipping_countries[198]}", "${json.allowed_shipping_countries[199]}", "${json.allowed_shipping_countries[200]}", "${json.allowed_shipping_countries[201]}", "${json.allowed_shipping_countries[202]}", "${json.allowed_shipping_countries[203]}", "${json.allowed_shipping_countries[204]}", "${json.allowed_shipping_countries[205]}", "${json.allowed_shipping_countries[206]}", "${json.allowed_shipping_countries[207]}", "${json.allowed_shipping_countries[208]}", "${json.allowed_shipping_countries[209]}", "${json.allowed_shipping_countries[210]}"], "analytics_user_id": "${json.analytics_user_id}", "available_payment_methods": [ { "data": { "days": "${json.available_payment_methods[0].data.days}" }, "id": "${json.available_payment_methods[0].id}", "type": "${json.available_payment_methods[0].type}" } ], "cart": { "items": [ { "image_url": "${json.cart.items[0].image_url}", "name": "${json.cart.items[0].name}", "product_url": "${json.cart.items[0].product_url}", "quantity": "${json.cart.items[0].quantity}", "reference": "${json.cart.items[0].reference}", "tax_rate": "${json.cart.items[0].tax_rate}", "total_price_excluding_tax": "${json.cart.items[0].total_price_excluding_tax}", "total_price_including_tax": "${json.cart.items[0].total_price_including_tax}", "total_tax_amount": "${json.cart.items[0].total_tax_amount}", "type": "${json.cart.items[0].type}", "unit_price": "${json.cart.items[0].unit_price}" } ], "subtotal": "${json.cart.subtotal}", "total_discount_amount_excluding_tax": "${json.cart.total_discount_amount_excluding_tax}", "total_price_excluding_tax": "${json.cart.total_price_excluding_tax}", "total_price_including_tax": "${json.cart.total_price_including_tax}", "total_shipping_amount_excluding_tax": "${json.cart.total_shipping_amount_excluding_tax}", "total_store_credit": "${json.cart.total_store_credit}", "total_surcharge_amount_excluding_tax": "${json.cart.total_surcharge_amount_excluding_tax}", "total_tax_amount": "${json.cart.total_tax_amount}" }, "correlation_id": "a6c51342-b107-4463-a2a0-b530f1bac03e", "merchant_urls": { "checkout": "${json.merchant_urls.checkout}", "confirmation": "${json.merchant_urls.confirmation}", "terms": "${json.merchant_urls.terms}" }, "options": { "allow_separate_shipping_address": "${json.options.allow_separate_shipping_address}", "allowed_customer_types": ["${json.options.allowed_customer_types[0]}"], "date_of_birth_mandatory": "${json.options.date_of_birth_mandatory}", "national_identification_number_mandatory": "${json.options.national_identification_number_mandatory}", "payment_selector_on_load": "${json.options.payment_selector_on_load}" }, "shared": { "billing_address": { "care_of": "${json.shared.billing_address.care_of}", "city": "${json.shared.billing_address.city}", "country": "${json.shared.billing_address.country}", "email": "${json.shared.billing_address.email}", "family_name": "${json.shared.billing_address.family_name}", "given_name": "${json.shared.billing_address.given_name}", "phone": "${json.shared.billing_address.phone}", "postal_code": "${json.shared.billing_address.postal_code}", "street_address": "${json.shared.billing_address.street_address}", "street_address2": "${json.shared.billing_address.street_address2}" }, "challenge": { "country": "${json.shared.challenge.country}", "email": "${json.shared.challenge.email}", "postal_code": "${json.shared.challenge.postal_code}" }, "currency": "${json.shared.currency}", "customer": { "national_identification_number": "${json.shared.customer.national_identification_number}", "type": "${json.shared.customer.type}" }, "language": "${json.shared.language}", "selected_payment_method": { "data": { "days": 14 }, "id": "-1", "type": "invoice" } }, "status": { "prescreened": "${json.status.prescreened}", "require_terms_consent": "${json.status.require_terms_consent}" } }`, { "headers": { "Authorization": "Checkout otherStuffz", "User-Agent": "SysTest - perf", "Content-Type": "application/vnd.checkout.client-order-v1+json", "Accept": "application/vnd.checkout.server-order-v1+json", "Accept-Encoding": "gzip;q=1.0,deflate;q=0.6,identity;q=0.3", "Host": "some-other-host.example.com" } } ) if (!check(res, {"status is 200": (r) => r.status === 200 })) { return }; json = JSON.parse(res.body); // Request #6 res = http.connect("path_to_url", "" ) }); } ```
```objective-c //your_sha256_hash------------ // Anti-Grain Geometry (AGG) - Version 2.5 // A high quality rendering engine for C++ // Contact: mcseem@antigrain.com // mcseemagg@yahoo.com // path_to_url // // AGG is free software; you can redistribute it and/or // as published by the Free Software Foundation; either version 2 // // AGG is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // along with AGG; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, // MA 02110-1301, USA. //your_sha256_hash------------ #ifndef AGG_CLIP_LIANG_BARSKY_INCLUDED #define AGG_CLIP_LIANG_BARSKY_INCLUDED #include "agg_basics.h" namespace agg { //your_sha256_hash-------- enum clipping_flags_e { clipping_flags_x1_clipped = 4, clipping_flags_x2_clipped = 1, clipping_flags_y1_clipped = 8, clipping_flags_y2_clipped = 2, clipping_flags_x_clipped = clipping_flags_x1_clipped | clipping_flags_x2_clipped, clipping_flags_y_clipped = clipping_flags_y1_clipped | clipping_flags_y2_clipped }; //your_sha256_hashng_flags // Determine the clipping code of the vertex according to the // Cyrus-Beck line clipping algorithm // // | | // 0110 | 0010 | 0011 // | | // -------+--------+-------- clip_box.y2 // | | // 0100 | 0000 | 0001 // | | // -------+--------+-------- clip_box.y1 // | | // 1100 | 1000 | 1001 // | | // clip_box.x1 clip_box.x2 // // template<class T> inline unsigned clipping_flags(T x, T y, const rect_base<T>& clip_box) { return (x > clip_box.x2) | ((y > clip_box.y2) << 1) | ((x < clip_box.x1) << 2) | ((y < clip_box.y1) << 3); } //your_sha256_hash_flags_x template<class T> inline unsigned clipping_flags_x(T x, const rect_base<T>& clip_box) { return (x > clip_box.x2) | ((x < clip_box.x1) << 2); } //your_sha256_hash_flags_y template<class T> inline unsigned clipping_flags_y(T y, const rect_base<T>& clip_box) { return ((y > clip_box.y2) << 1) | ((y < clip_box.y1) << 3); } //your_sha256_hashg_barsky template<class T> inline unsigned clip_liang_barsky(T x1, T y1, T x2, T y2, const rect_base<T>& clip_box, T* x, T* y) { const double nearzero = 1e-30; double deltax = x2 - x1; double deltay = y2 - y1; double xin; double xout; double yin; double yout; double tinx; double tiny; double toutx; double touty; double tin1; double tin2; double tout1; unsigned np = 0; if(deltax == 0.0) { // bump off of the vertical deltax = (x1 > clip_box.x1) ? -nearzero : nearzero; } if(deltay == 0.0) { // bump off of the horizontal deltay = (y1 > clip_box.y1) ? -nearzero : nearzero; } if(deltax > 0.0) { // points to right xin = clip_box.x1; xout = clip_box.x2; } else { xin = clip_box.x2; xout = clip_box.x1; } if(deltay > 0.0) { // points up yin = clip_box.y1; yout = clip_box.y2; } else { yin = clip_box.y2; yout = clip_box.y1; } tinx = (xin - x1) / deltax; tiny = (yin - y1) / deltay; if (tinx < tiny) { // hits x first tin1 = tinx; tin2 = tiny; } else { // hits y first tin1 = tiny; tin2 = tinx; } if(tin1 <= 1.0) { if(0.0 < tin1) { *x++ = (T)xin; *y++ = (T)yin; ++np; } if(tin2 <= 1.0) { toutx = (xout - x1) / deltax; touty = (yout - y1) / deltay; tout1 = (toutx < touty) ? toutx : touty; if(tin2 > 0.0 || tout1 > 0.0) { if(tin2 <= tout1) { if(tin2 > 0.0) { if(tinx > tiny) { *x++ = (T)xin; *y++ = (T)(y1 + tinx * deltay); } else { *x++ = (T)(x1 + tiny * deltax); *y++ = (T)yin; } ++np; } if(tout1 < 1.0) { if(toutx < touty) { *x++ = (T)xout; *y++ = (T)(y1 + toutx * deltay); } else { *x++ = (T)(x1 + touty * deltax); *y++ = (T)yout; } } else { *x++ = x2; *y++ = y2; } ++np; } else { if(tinx > tiny) { *x++ = (T)xin; *y++ = (T)yout; } else { *x++ = (T)xout; *y++ = (T)yin; } ++np; } } } } return np; } //your_sha256_hash------------ template<class T> bool clip_move_point(T x1, T y1, T x2, T y2, const rect_base<T>& clip_box, T* x, T* y, unsigned flags) { T bound; if(flags & clipping_flags_x_clipped) { if(x1 == x2) { return false; } bound = (flags & clipping_flags_x1_clipped) ? clip_box.x1 : clip_box.x2; *y = (T)(double(bound - x1) * (y2 - y1) / (x2 - x1) + y1); *x = bound; } flags = clipping_flags_y(*y, clip_box); if(flags & clipping_flags_y_clipped) { if(y1 == y2) { return false; } bound = (flags & clipping_flags_y1_clipped) ? clip_box.y1 : clip_box.y2; *x = (T)(double(bound - y1) * (x2 - x1) / (y2 - y1) + x1); *y = bound; } return true; } //your_sha256_hash_segment // Returns: ret >= 4 - Fully clipped // (ret & 1) != 0 - First point has been moved // (ret & 2) != 0 - Second point has been moved // template<class T> unsigned clip_line_segment(T* x1, T* y1, T* x2, T* y2, const rect_base<T>& clip_box) { unsigned f1 = clipping_flags(*x1, *y1, clip_box); unsigned f2 = clipping_flags(*x2, *y2, clip_box); unsigned ret = 0; if((f2 | f1) == 0) { // Fully visible return 0; } if((f1 & clipping_flags_x_clipped) != 0 && (f1 & clipping_flags_x_clipped) == (f2 & clipping_flags_x_clipped)) { // Fully clipped return 4; } if((f1 & clipping_flags_y_clipped) != 0 && (f1 & clipping_flags_y_clipped) == (f2 & clipping_flags_y_clipped)) { // Fully clipped return 4; } T tx1 = *x1; T ty1 = *y1; T tx2 = *x2; T ty2 = *y2; if(f1) { if(!clip_move_point(tx1, ty1, tx2, ty2, clip_box, x1, y1, f1)) { return 4; } if(*x1 == *x2 && *y1 == *y2) { return 4; } ret |= 1; } if(f2) { if(!clip_move_point(tx1, ty1, tx2, ty2, clip_box, x2, y2, f2)) { return 4; } if(*x1 == *x2 && *y1 == *y2) { return 4; } ret |= 2; } return ret; } } #endif ```
```html {% if include.url %}{% assign url = include.url %}{% else %}{% assign url = page.url %}{% endif %}{{ site.url | replace:'www.','' }}{{site.baseurl}}{{ url | replace:'index.html','' }} ```
```python from .. core._core_utils import add_method from .. import _otio @add_method(_otio.ImageSequenceReference) def __str__(self): return ( 'ImageSequenceReference(' '"{}", "{}", "{}", {}, {}, {}, {}, {}, {}, {}, {})' .format( self.target_url_base, self.name_prefix, self.name_suffix, self.start_frame, self.frame_step, self.rate, self.frame_zero_padding, self.missing_frame_policy, self.available_range, self.available_image_bounds, self.metadata, ) ) @add_method(_otio.ImageSequenceReference) def __repr__(self): return ( 'ImageSequenceReference(' 'target_url_base={}, ' 'name_prefix={}, ' 'name_suffix={}, ' 'start_frame={}, ' 'frame_step={}, ' 'rate={}, ' 'frame_zero_padding={}, ' 'missing_frame_policy={}, ' 'available_range={}, ' 'available_image_bounds={}, ' 'metadata={}' ')' .format( repr(self.target_url_base), repr(self.name_prefix), repr(self.name_suffix), repr(self.start_frame), repr(self.frame_step), repr(self.rate), repr(self.frame_zero_padding), repr(self.missing_frame_policy), repr(self.available_range), repr(self.available_image_bounds), repr(self.metadata), ) ) @add_method(_otio.ImageSequenceReference) def frame_range_for_time_range(self, time_range): """Returns first and last frame numbers for the given time range in the reference. :rtype: tuple[int] :raises ValueError: if the provided time range is outside the available range. """ return ( self.frame_for_time(time_range.start_time), self.frame_for_time(time_range.end_time_inclusive()) ) @add_method(_otio.ImageSequenceReference) def abstract_target_url(self, symbol): """ Generates a target url for a frame where ``symbol`` is used in place of the frame number. This is often used to generate wildcard target urls. """ if not self.target_url_base.endswith("/"): base = self.target_url_base + "/" else: base = self.target_url_base return "{}{}{}{}".format( base, self.name_prefix, symbol, self.name_suffix ) ```
Bedfords Park is public open space of 215 acres or approximately 87Β½ hectares near Havering-atte-Bower in the London Borough of Havering in England. It is one of three large parklands around Havering-atte-Bower; the others are Havering Country Park and Pyrgo Park. Bedfords Park is a Site of Metropolitan Importance for Nature Conservation and a local nature reserve. It was awarded a Green Flag Award in 2007 which was maintained at least until 2013. The site is managed by Havering Council and the visitor centre is managed by the Essex Wildlife Trust. Geography Bedfords Park is situated on a south-facing slope, the top of which is 110 metres above sea level and affords views over east London and across the River Thames to Kent. The highest part of the park is on a ridge capped by a patch of Chalky Boulder Clay and it is this that allows the Round Pond to retain water. The park is made up of mixed parkland and deciduous woods and is much narrower on the ridge while broadening as you move down the slope in a southerly direction. Vehicular access is gained on the north side of the park from Broxhill Road, while there is pedestrian access from Lower Bedfords Road on the southern edge of the park, and also using a permissive footpath from Orange Tree Hill. History The land that makes up Bedfords Park originates in the two adjoining estates of Bedfords, believed to be named after the family of Robert de Bedford mentioned in 1285 and John Bedford who held the land in 1362, and Earls which later became known as Upper Bedfords. The latter was taken from John Derewin in 1212 by King John as forfeiture for homicide and given to William d'Aubigny, 3rd Earl of Arundel for the annual rent of one Sparrowhawk. The manor, which is one of the olderst in Havering and was one of four held in serjeanty, remained in the family at least until the time of the 5th Earl. In 1452 Thomas Cooke (who was to become Lord Mayor of London 10 years later) took possession of the manor of Bedfords, having already acquired the 'manor or messuage' of Earls, and held these as part of the Gidea Hall estate. This continued for 200 years until 1659 after which the properties changed hands several times, being briefly part of Gobions Manor, but by 1678 the two estates were held by Robert Wolley. After being sold out of Gobions Manor, the ownership was in the hands of various London merchants until 1771 when it was sold by Nathaniel Houlton to a Mr John Heaton. During this time there is no evidence of the owners living in either manor. Bedford Park's mansion John Heaton rebuilt the house at Bedfords as a two-storey brick mansion rendered with cement, and resided there until his death in 1818. Upper Bedfords (previously Earls) was also rebuilt during this time in a crenelated style, including a tower which has often caused it to be mistaken for a church. It was at this time (c. 1777) that the lane which had run from Broxhill Road to Lower Bedfords Road and going past Bedfords was closed by the owner. This is also the point at which today's park is recognisable as Chapman and Andre's survey of that time depicts the northern tree line, approach avenue, and park boundaries as they occur today. The property passed down two generations in the Heaton family, although it was let to tenants between 1826 and 1834. At this point the property again changed hands several times and was even listed in the Chelmsford Chronicle on 21 June 1867 (on page 4): It was purchased 3 years later from the mortgagee of the previous owner by Henry R. Stone in 1870. His son Henry J. Stone was the last lord of the manor, and although in parts, Bedfords was sold to Romford Urban District Council by his widow in 1933. Romford UDC opened the park to the public in 1934, and instituted a museum in the house which locals seem to have referred to as Bedfords Mansion, this focussing on natural history and containing stuffed birds and animals from the local area. It was also around this time that a captive herd of red deer were introduced. During World War II Bedfords House was used by the Auxiliary Fire Service and afterwards was used as the council archive but was gradually allowed to fall into disrepair until it was demolished in 1959. In the 1960s a cafe was built on the site of the house until this also fell out of use in the 1980s. Today the route of the public lane closed around 1777 can still be traced along the entrance drive, past the old stable block (the only part of the house to survive), and then down the slope to the east of the visitor centre, the whole being characterised by the avenue of mature Horse Chestnut trees. The site of the house is now occupied by a Visitor Centre run by Essex Wildlife Trust. Queen Anne's Oak This was an ancient oak tree that stood to the side of the mansion and the deer enclosure. The above is the earliest known photograph of the oak tree where legend has it, Queen Anne sat by this tree on her visits to the estate. That is why the estate took this concerted effort to help keep the tree alive for so long; including bricking around the base of the tree and bracing it with metal belts. It is also mentioned in the Romford Then and Now publication from 2005 where it mentions: A new oak tree was planted on the actual site of the tree to commemorate its memory in 2015. See also Havering parks and open spaces References External links Havering London Borough Council information about the park Photos Parks and open spaces in the London Borough of Havering Local nature reserves in Greater London Essex Wildlife Trust
```swift // // UITableViewCell.swift // Cacao // // Created by Alsey Coleman Miller on 6/26/17. // import Foundation #if os(iOS) import UIKit import CoreGraphics #else import Silica #endif /// A cell in a table view. /// /// This class includes properties and methods for setting and managing cell content /// and background (including text, images, and custom views), managing the cell /// selection and highlight state, managing accessory views, and initiating the /// editing of the cell contents. open class UITableViewCell: UIView { // MARK: - Initializing a `UITableViewCell` Object /// Initializes a table cell with a style and a reuse identifier and returns it to the caller. public required init(style: UITableViewCellStyle, reuseIdentifier: String?) { self.style = style self.reuseIdentifier = reuseIdentifier // while `UIView.init()` creates a view with an empty frame, // UIKit creates a {0,0, 320, 44} cell with this initializer super.init(frame: CGRect(origin: .zero, size: UITableViewCell.defaultSize)) self.setupTableViewCellCommon() } #if os(iOS) public required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } #endif // MARK: - Reusing Cells /// A string used to identify a cell that is reusable. public let reuseIdentifier: String? /// Prepares a reusable cell for reuse by the table view's delegate. open func prepareForReuse() { } // default implementation is empty // MARK: - Managing the Predefined Content /// Returns the label used for the main textual content of the table cell. public var textLabel: UILabel? { return _contentView.textLabel } /// Returns the secondary label of the table cell if one exists. public var detailTextLabel: UILabel? { return _contentView.detailTextLabel } /// Returns the image view of the table cell. public var imageView: UIImageView? { return _contentView.imageView } // MARK: - Accessing Views of the Cell Object /// Returns the content view of the cell object. /// /// The content view of a `UITableViewCell` object is the default superview for content displayed by the cell. /// If you want to customize cells by simply adding additional views, you should add them to the content view /// so they will be positioned appropriately as the cell transitions into and out of editing mode. public var contentView: UIView { return _contentView } private lazy var _contentView: UITableViewCellContentView = UITableViewCellContentView(frame: CGRect(origin: .zero, size: self.bounds.size), cell: self) /// The view used as the background of the cell. /// /// The default is `nil` for cells in plain-style tables (`.plain`) /// and non-nil for grouped-style tables (`.grouped`). /// `UITableViewCell` adds the background view as a subview behind all other views and uses its current frame location. public var backgroundView: UIView? /// The view used as the background of the cell when it is selected. /// /// The default is nil for cells in plain-style tables (`.plain`) /// and non-nil for section-group tables (`.grouped`). /// `UITableViewCell` adds the value of this property as a subview only when the cell is selected. /// It adds the selected background view as a subview directly above the background view (`backgroundView`) /// if it is not nil, or behind all other views. /// Calling `setSelected(_:animated:)` causes the selected background view to animate in and out with an alpha fade. public var selectedBackgroundView: UIView? /// The background view to use for a selected cell when the table view allows multiple row selections. public var multipleSelectionBackgroundView: UIView? // MARK: - Managing Accessory Views /// The type of standard accessory view the cell should use (normal state). /// /// The accessory view appears in the right side of the cell in the table views normal (default) state. /// The standard accessory views include the disclosure chevron; for a description of valid accessoryType constants, /// see `UITableViewCellAccessoryType`. /// The default is `.none`. /// If a custom accessory view is set through the accessoryView property, the value of this property is ignored. /// If the cell is enabled and the accessory type is detailDisclosureButton, the accessory view tracks touches and, /// when tapped, sends the data-source object a `tableView(_:accessoryButtonTappedForRowWith:)` message. /// /// The accessory-type image cross-fades between normal and editing states if it set for both states; /// use the `editingAccessoryType` property to set the accessory type for the cell during editing mode. /// If this property is not set for both states, the cell is animated to slide in or out, as necessary. public var accessoryType: UITableViewCellAccessoryType = .none /// A view that is used, typically as a control, on the right side of the cell (normal state). public var accessoryView: UIView? /// The type of standard accessory view the cell should use in the table views editing state. public var editingAccessoryType: UITableViewCellAccessoryType = .none /// A view that is used typically as a control on the right side of the cell when it is in editing mode. public var editingAccessoryView: UIView? // MARK: - Managing Cell Selection and Highlighting /// The style of selection for a cell. public var selectionStyle: UITableViewCellSelectionStyle = .blue /// A Boolean value that indicates whether the cell is selected. public var isSelected: Bool = false /// Sets the selected state of the cell, optionally animating the transition between states. public func setSelected(_ selected: Bool, animated: Bool) { } /// A Boolean value that indicates whether the cell is highlighted. public var isHighlighted: Bool = false /// Sets the highlighted state of the cell, optionally animating the transition between states. public func setHighlighted(_ highlighted: Bool, animated: Bool) { } // MARK: - Editing the Cell /// A Boolean value that indicates whether the cell is in an editable state. public var isEditing: Bool = false /// Toggles the receiver into and out of editing mode. public func setEditing(_ editing: Bool, animated: Bool) { } /// The editing style of the cell. public internal(set) var editingStyle: UITableViewCellEditingStyle = .none /// Returns whether the cell is currently showing the delete-confirmation button. /// /// When users tap the deletion control (the red circle to the left of the cell), /// the cell displays a "Delete" button on the right side of the cell; this string is localized. public internal(set) var showingDeleteConfirmation: Bool = false /// A Boolean value that determines whether the cell shows the reordering control. /// /// The reordering control is gray, multiple horizontal bar control on the right side of the cell. /// Users can drag this control to reorder the cell within the table. /// The default value is `false`. /// If the value is `true`, the reordering control temporarily replaces any accessory view. public var showsReorderControl: Bool = false // MARK: - Managing Content Indentation /// The indentation level of the cells content. /// /// The default value of the property is zero (no indentation). /// Assigning a positive value to this property indents the cells content from /// the left edge of the cell separator. The amount of indentation is equal to /// the indentation level multiplied by the value in the `indentationWidth` property. public var indentationLevel: Int = 0 /// The width for each level of indentation of a cell's content. /// /// The default indentation width is 10.0 points. public var indentationWidth: CGFloat = 10 /// A Boolean value that controls whether the cell background is indented when the table view is in editing mode. /// /// The default value is `true`. /// This property is unrelated to `indentationLevel`. /// The delegate can override this value in `tableView(_:shouldIndentWhileEditingRowAt:)`. /// This property has an effect only on table views created in the grouped style (`.grouped`); /// it has no effect on plain table views. public var shouldIndentWhileEditing: Bool = true /// The inset values for the cells content. /// /// You can use this property to add space between the current cells contents and the left and right edges of the table. /// Positive inset values move the cell content and cell separator inward and away from the table edges. /// Negative values are treated as if the inset is set to `0`. /// /// Only the left and right inset values are respected; the top and bottom inset values are ignored. /// The value assigned to this property takes precedence over any default separator insets set on the table view. public var separatorInset: UIEdgeInsets = UITableView.defaultSeparatorInset // MARK: - Overriden Methods open override func layoutSubviews() { super.layoutSubviews() // layoutFloatingContentView() // UIKit`-[UITableViewCellLayoutManager layoutSubviewsOfCell:]: // UIKit`-[UITableViewCell layoutSubviews]: // path_to_url UITableViewCellLayoutManager.layoutSubviews(of: self) } // MARK: - Private internal static let defaultSize = CGSize(width: 320, height: UITableView.defaultRowHeight) internal weak var tableView: UITableView? @_versioned internal let style: UITableViewCellStyle // added as subview in `init()` fileprivate lazy var separatorView: UITableViewCellSeparatorView = UITableViewCellSeparatorView() internal func configureSeparator(style: UITableViewCellSeparatorStyle, color: UIColor?) { separatorView.style = style separatorView.color = color } private func setupTableViewCellCommon() { // add mandatory subviews self.addSubview(separatorView) self.addSubview(contentView) // layout subviews self.layoutIfNeeded() } /// Arranges the subviews in the proper order private func orderSubviews() { if let view = selectedBackgroundView { sendSubview(toBack: view) } if let view = backgroundView { sendSubview(toBack: view) } bringSubview(toFront: contentView) if let view = accessoryView { bringSubview(toFront: view) } bringSubview(toFront: separatorView) } /// Called after default text label's text is changed fileprivate func contentViewLabelTextDidChange(_ label: UITableViewLabel) { setNeedsLayout() } } // MARK: - Supporting Types public enum UITableViewCellAccessoryType: Int { case none case disclosureIndicator case detailDisclosureButton case checkmark } public enum UITableViewCellSeparatorStyle: Int { case none case singleLine case singleLineEtched } public enum UITableViewCellStyle: Int { case `default` case value1 case value2 case subtitle } public enum UITableViewCellSelectionStyle: Int { case none case blue case gray } public enum UITableViewCellEditingStyle: Int { case none case delete case insert } // MARK: - Private Supporting Types internal final class UITableViewCellContentView: UIView { private(set) weak var cell: UITableViewCell! /// Returns the label used for the main textual content of the table cell. private(set) weak var textLabel: UITableViewLabel? /// Returns the secondary label of the table cell if one exists. private(set) weak var detailTextLabel: UITableViewLabel? /// Returns the image view of the table cell. private(set) weak var imageView: UIImageView? var isLayoutEngineSuspended: Bool = false required init(frame: CGRect, cell: UITableViewCell) { super.init(frame: frame) self.cell = cell // setup predefined views self.tableViewCellContentViewCommonSetup() } #if os(iOS) public required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } #endif private func tableViewCellContentViewCommonSetup() { // should only be called once assert(self.textLabel == nil) guard let cell = self.cell else { fatalError("No cell configured") } let textLabel = UITableViewCellLayoutManager.textLabel(for: cell) let detailTextLabel = UITableViewCellLayoutManager.detailTextLabel(for: cell) let imageView = UITableViewCellLayoutManager.imageView(for: cell) // add subviews let contentSubviews: [UIView?] = [textLabel, detailTextLabel, imageView] // add as subviews to content view contentSubviews.forEach { if let view = $0 { self.addSubview(view) } } // set properties self.textLabel = textLabel self.detailTextLabel = detailTextLabel self.imageView = imageView } override func layoutSubviews() { let contentFrame = self.frame let style = self.cell.style // layout default subviews switch style { case .default: let imageWidth = imageView?.image?.size.width ?? 0.0 let imageViewFrame = CGRect(x: 5, y: 0, width: imageWidth, height: contentFrame.size.height) let textLabelX = imageViewFrame.origin.x + 15 let textLabelFrame = CGRect(x: textLabelX, y: 0, width: contentFrame.size.width - textLabelX, height: contentFrame.size.height) imageView?.frame = imageViewFrame textLabel?.frame = textLabelFrame assert(detailTextLabel == nil, "No detail text label for \(style)") case .subtitle: // FIXME: subtitle layout break case .value1: // FIXME: value1 layout break case .value2: // FIXME: value2 layout break } } } /// Actual name is `_UITableViewCellSeparatorView` internal typealias UITableViewCellSeparatorView = _UITableViewCellSeparatorView internal final class _UITableViewCellSeparatorView: UIView { var style: UITableViewCellSeparatorStyle = .none { didSet { setNeedsDisplay() isHidden = style == .none } } var color: UIColor? { didSet { setNeedsDisplay() } } override init(frame: CGRect) { super.init(frame: frame) self.isHidden = true } #if os(iOS) public required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } #endif override func draw(_ rect: CGRect) { guard let context = UIGraphicsGetCurrentContext(), let color = self.color, style != .none else { return } color.setStroke() let line = UIBezierPath() line.move(to: .zero) line.addLine(to: CGPoint(x: bounds.size.width, y: 0)) line.lineWidth = 1 switch style { case .none: break case .singleLine: line.stroke() case .singleLineEtched: context.saveGState() context.setLineDash(phase: 0, lengths: [2, 2]) line.stroke() context.restoreGState() } } } internal final class UITableViewCellSelectedBackground: UIView { } internal struct UITableViewCellLayoutManager { static func layoutSubviews(of cell: UITableViewCell) { let bounds = cell.bounds let isSeparatorVisible = cell.separatorView.isHidden == false var contentFrame = CGRect(x: 0, y: 0, width: bounds.size.width, height: bounds.size.height - (isSeparatorVisible ? 1 : 0)) if let accessoryView = cell.accessoryView { /// calculate frame var frame = CGRect(x: bounds.size.width, y: 0, width: 0, height: 0) frame.size = accessoryView.sizeThatFits(bounds.size) frame.origin.x = bounds.size.width - frame.size.width frame.origin.y = round( 0.5 * (bounds.size.height - frame.size.height)) // set accessory frame accessoryView.frame = frame // adjust content frame based on accessory contentFrame.size.width = frame.origin.x - 1 } // set content frames cell.backgroundView?.frame = contentFrame cell.selectedBackgroundView?.frame = contentFrame; cell.contentView.frame = contentFrame; // set separator frame let separatorFrame = CGRect(x: cell.separatorInset.left, y: bounds.size.height - 1, width: bounds.size.width - (cell.separatorInset.right + cell.separatorInset.left), height: 1) cell.separatorView.frame = isSeparatorVisible ? separatorFrame : .zero } @inline(__always) static private func createLabel(for cell: UITableViewCell) -> UITableViewLabel { return UITableViewLabel(frame: .zero, cell: cell) } static func textLabel(for cell: UITableViewCell) -> UITableViewLabel { let style = cell.style let label = createLabel(for: cell) switch style { case .default: break case .subtitle: break case .value1: break case .value2: break } return label } static func detailTextLabel(for cell: UITableViewCell) -> UITableViewLabel? { let style = cell.style switch style { case .default: return nil case .subtitle: let label = createLabel(for: cell) return label case .value1: let label = createLabel(for: cell) return label case .value2: let label = createLabel(for: cell) return label } } static func imageView(for cell: UITableViewCell) -> UIImageView? { let style = cell.style switch style { case .default: let imageView = UIImageView() return imageView case .subtitle: let imageView = UIImageView() return imageView case .value1, .value2: return nil } } /* static func backgroundEndingRect(for cell: UITableViewCell) -> CGRect { }*/ } internal class UITableViewLabel: UILabel { private(set) weak var cell: UITableViewCell! override var text: String? { didSet { guard text != oldValue else { return } // inform cell of text change cell?.contentViewLabelTextDidChange(self) } } required init(frame: CGRect, cell: UITableViewCell) { super.init(frame: frame) self.cell = cell } #if os(iOS) public required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } #endif } // MARK: - ReusableView Protocol internal protocol ReusableView { var reuseIdentifier: String? { get } func prepareForReuse() } extension UITableViewCell: ReusableView { } ```
```java package com.eventyay.organizer.data; import android.os.SystemClock; import androidx.collection.ArrayMap; import org.threeten.bp.Duration; import java.util.Map; /** * Utility class that decides whether we should fetch some data or not. */ public class RateLimiter<K> { private final Map<K, Long> timestamps = new ArrayMap<>(); private final long timeout; public RateLimiter(Duration duration) { this.timeout = duration.toMillis(); } public synchronized boolean shouldFetch(K key) { Long lastFetched = timestamps.get(key); long now = now(); if (lastFetched == null) { timestamps.put(key, now); return true; } if (now - lastFetched > timeout) { timestamps.put(key, now); return true; } return false; } private long now() { return SystemClock.uptimeMillis(); } public synchronized void reset(K key) { timestamps.remove(key); } } ```
Bout It is the second studio album by American singer Jesse Powell. It was released by Silas Records on September 8, 1998 in the United States. It was Powell's first album to chart on the US Billboard 200, peaking at number 63. For sales in excess of 500,000 copies, Bout It was certified gold by the Recording Industry Association of America (RIAA) on June 8, 1999. Three singles were released from the album, including "I Wasn't with It", "You" and Bout It, 'Bout It". "You" is Powell's only hit to date on the US Billboard Hot 100, peaking at number 10 in 1999. Critical reception Allmusic editor Craig Lytle found that Bout It "isn't a gem but is generally decent. The singer doesn't incorporate an abundance of rapping (something many jeep soulsters were doing), but his producers do favor a very hip-hop-ish production style on "Up and Down," "I Wasn't with It," and other selections. Powell doesn't provide anything for the dancefloor – most of the time, he gets into a comfortable medium-tempo groove, whether he's being romantic or overtly sexual. Not a remarkable CD, but competent and usually likable." Track listing Notes denotes co-producer Personnel Gerald Albright – saxophone Carter Bradley – make-up James "Chip" Bunton – production coordination LaShawn Daniels – engineer, vocal producer Felipe Darrell – associate executive producer Kevin "KD" Davis – mixing Ken Deranteriasian – engineer Joey Elias – backing vocals, producer Paul "PE" Elliot – engineer Paul Erickson – engineer Mark J. Feist – producer, engineer, mixing Ben Garrison – mixing Jeff Gibbs – engineer Rawle Gittens – engineer Reno Greenfield – drum programming Susan Herndon – engineer Ashley Ingram – multi-instruments, producer Jhane Isaacs – stylist Eric Jackson – guitar Fred Jenkins III – multi-instruments, producer, mixing Rodney Jerkins – producer, mixing, vocal producer Jon-John – multi-instruments, producer Kenny "K-Smoove" Kornegay – producer L.D.J. – multi instruments, producer Tim "Flash" Mariner – engineer Manny Marroquin – engineer, mixing Meire Murakami – design Kenji Nakai – engineer, mixing Ashley Pigford – design Catrina Powell – backing vocals Jacob Powell – backing vocals Jesse Powell – vocals, backing vocals, producer Tamara Powell – backing vocals Chris Puram – mixing Jon-John Robinson – producer Pete Rock – producer, remixing Carl Roland – programming, multi-instruments, producer Sauce Money – rap Louis Silas Jr. – executive producer Dwight Sills – acoustic guitar, producer Dexter Simmons – mixing Michael Stokes – engineer, mixing, production supervisor Joe Warlick – assistant engineer, mixing assistant Emerald Williams – backing vocals Darryl Young – producer Charts Weekly charts Year-end charts Certifications References External links 1998 albums Albums produced by Louis Silas Jr. Jesse Powell albums Silas Records albums
Red Dress Day, or Red Dress Campaign, is an annual event held by the REDress Project in memory of the lives ofΒ Missing and Murdered Indigenous Women and Girls across Canada. This event was originally held on May 5, 2010, and continues annually. The event is sometimes held on other dates throughout the year to coincide with other days of action, such as National Indigenous Peoples Day. Associated names with this event include National Day of Awareness For Missing and Murdered Indigenous Women And Girls. Red Dress Day is one of several campaigns started by activists to call attention to disproportionate rates of violence against Indigenous women. Background Inspired by the work of MΓ©tis artist Jaime Black that would go on to spark the REDress Project, this day draws attention to the more than 1,000 Missing and Murdered Indigenous Women and Girls (MMIWG) in Canada. The project was started in 2010 after Black displayed an installation at the University of Winnipeg that included a series of empty red dresses to honour and symbolize the lost lives of Indigenous women at the hands of violence. On this day, participants are encouraged to display empty red dresses in public spaces or wear red dresses to show support for the lives of MMIWG. Additional activities taking place on this day include also marches, memorials, and walks across Canada. Installations of red dresses are displayed in museums, university campuses, and exhibits. The origins of this day began with the Walking With Our Sisters – K’omoks where a public memorial art installation had taken place in honour of Missing and Murdered Indigenous Women. May 5th was the birthday of Lisa Marie Young, a 21-year-old Tla-o-qui-aht woman who disappeared under suspicious circumstances from Nanaimo, BC on Canada Day 2002. Despite hundreds of tips from the public, RCMP investigators have made no progress in the case. In 2016, the first the Red Dress Awareness Campaign and Installation were organised and generated higher volumes of public attention on a national scale in both Canada and the United States. Although the REDress Project was not created as a call for a National Inquiry into MissingΒ  and Murdered Indigenous Women and Girls, many supporters of the project and subsequent REDress Day are in favour of such an investigation, and used the event to put pressure on the Canadian government. Legacy Alberta Staff from the Awo Taan Native Healing Lodge, a shelter for Indigenous women and children in Calgary, Alberta, organized a local Red Dress Day event on November 19, 2019 at Calgary’s Central Library. Attendees of the event had the chance to learn more about Missing and Murdered Indigenous Women and Girls, and were also invited to make a small felt doll wearing a red dress, inspired by the original project’s installation of red dresses. Ontario Students at Cardinal Leger Secondary School, in Brampton, Ontario, paid tribute to Missing and Murdered Indigenous Women and Girls on May 3, 2017, during an event inspired by Red Dress Day. The students wrote names taken from the Missing and Murdered Indigenous Women and Girls list on approximately 1,200 red feathers, which were then placed in trees near the school. See Also Am I Next Walking With Our Sisters Idle No More Sisters in Spirit References 2010 establishments in Canada Remembrance days Missing and Murdered Indigenous Women and Girls movement
```smalltalk using System.Drawing; using System.Drawing.Drawing2D; namespace Svg { /// <summary> /// An element used to group SVG shapes. /// </summary> [SvgElement("g")] public class SvgGroup : SvgVisualElement { /// <summary> /// Gets the <see cref="GraphicsPath"/> for this element. /// </summary> /// <value></value> public override System.Drawing.Drawing2D.GraphicsPath Path(ISvgRenderer renderer) { return GetPaths(this, renderer); } /// <summary> /// Gets the bounds of the element. /// </summary> /// <value>The bounds.</value> public override System.Drawing.RectangleF Bounds { get { var r = new RectangleF(); foreach(var c in this.Children) { if (c is SvgVisualElement) { // First it should check if rectangle is empty or it will return the wrong Bounds. // This is because when the Rectangle is Empty, the Union method adds as if the first values where X=0, Y=0 if (r.IsEmpty) { r = ((SvgVisualElement)c).Bounds; } else { var childBounds = ((SvgVisualElement)c).Bounds; if (!childBounds.IsEmpty) { r = RectangleF.Union(r, childBounds); } } } } return r; } } protected override bool Renderable { get { return false; } } public override SvgElement DeepCopy() { return DeepCopy<SvgGroup>(); } public override SvgElement DeepCopy<T>() { var newObj = base.DeepCopy<T>() as SvgGroup; if (this.Fill != null) newObj.Fill = this.Fill.DeepCopy() as SvgPaintServer; return newObj; } } } ```
The Twelve Mile Square Reservation, also called the Twelve Mile Square Reserve, was a tract of land in Ohio ceded by Indians to the United States of America in the Treaty of Greenville in 1795. This particular area of land immediately surrounding Fort Miami was considered to be of strategic importance by the United States government representatives. It was subsequently surveyed in a manner different from surrounding land, and lots sold, or granted, to settlers. History In 1680, Frontenac, the French Governor of Canada, established Fort Miami on the west bank of the Maumee River. It was the first fortification in Ohio built by non-indigenous people. The fort was used as a trading post for a short time, then abandoned. During the Northwest Indian War the British rebuilt Fort Miami to assist the Indians fighting the Americans. The Americans won the Battle of Fallen Timbers nearby in 1794. As a result of the battle, the Treaty of Greenville was signed, which ceded much of southern and eastern Ohio to the United States. In addition, Article 3 ceded a number of other tracts, including #8: "One piece twelve miles square, at the British fort on the Miami of the Lake, at the foot of the rapids." At that time, the Maumee river was called the Miami of the lake, and the foot of the rapids are today between the towns Maumee and Perrysburg. In 1795, Jay's Treaty led to the fort being abandoned in 1796. Survey and settlement While the French and British occupied the region, many white people settled near the fort. The United States wished to give legal title to these people, and sell the rest of the tract. To accomplish this, on March 3, 1805, Congress arranged for a special indiscriminate location survey for the reserve. Deputy surveyor Elias Glover subdivided the tract into four townships of six miles (10Β km) square each in 1805, with the southwest township being number one, the northwest number two, the northeast number three, and the southeast number four. The tract has no ranges, and is an original survey, unrelated to later 1821 Congress Lands surveys that surround the reserve, known as North and East of First Principal Meridian. In 1807, Congress directed that every person in the actual possession of any tract of land, in his own right, and settled, occupied and improved by him prior to the first day of July, 1796, or by some other person under whom he claimed the right to its occupancy or possession, should be confirmed in his title as an estate of inheritance in fee simple, and be entitled to a land patent for it. Each township was subdivided into 36 sections numbered boustrophedonically, as established by the Act of May 18, 1796. Under the Act of 1816, Joseph Wampler surveyed the riverfront into long lots of about each, numbered 1 to 93, and officially called "River Tracts". The private claims of the British and French era settlers were surveyed in 1817 by deputy surveyor S. Carpenter. The partial sections left after the River Tracts were called "Fractional Sections". The Act of 1816 set aside section 16 of each township as School Lands for benefit of schools in each township. These sections were eventually sold. Town lots in Perrysburg were also laid out in 1816, providing less than two sections of land in lots less than each. Land sales were through the Wooster Land Office in the Canton Land District. Modern times The tract encompasses portions of Wood and Lucas counties, including Perrysburg, Maumee, and south suburban Toledo, Ohio. The corners of the Reservation are at (NW of the intersection of Dorr and Crissey roads, forming part of the boundary with Spencer and Springfield townships), (NE of the MLK Bridge within the Maumee River), (the intersection of Tracy and Dowling roads, forming the SE corner of Perrysburg Township), (NE of the intersection of Neowash and Noward roads in Waterville Township). See also Ohio Lands Historic regions of the United States Notes References External links Former regions and territories of the United States Geography of Ohio History of Ohio Lucas County, Ohio Wood County, Ohio 1795 establishments in the Northwest Territory
Ernst-Lothar von Knorr (2 January 1896 – 30 October 1973) was a German composer, music educator and civil servant. The years until 1933 Born in Eitorf, Knorr grew up in Bonn. His parents were the pharmacist Dr. chem. Karl Ferdinand von Knorr and Eugenie Sophie Merten. From 1902 he had his first violin lessons. In 1907 he was admitted to the Cologne Conservatory. After graduating from high school, conservatory examination and military service, he became a violin teacher at the Heidelberg Music Academy in 1919, and in 1920 he founded the Heidelberg Chamber Orchestra Association with P. Gies. On 6 October 1923 he married in Gummersbach Elise Siebel, a granddaughter of , the co-founder of the paper mill . In the same year he became Concert master with the orchestra of the Diaghilev Ballet in Munich, in 1924 followed the establishment and direction of the Volks- und Jugendmusikschule-SΓΌd in Berlin. In 1925 and 1928 his son Friedrich-Carl and his daughter Ellen were born. A third child (Angelika, *1944) died shortly after birth. Professional career during the Third Reich In 1937 von Knorr became a teacher at the Staatliche Hochschule fΓΌr Musik in Berlin, where he received a professorship in 1939. From 1937 to the 31st century. From 1937 until August 31, 1941 he was additionally music adviser of the Oberkommando des Heeres and was promoted to Captain, then to Major of the Wehrmacht. Towards the end of his term of office as music adviser of the OKH, in 1941, together with the General and later Resistance fighter of the 20 July plot, he compiled with Eduard Wagner a list of various music creators, which was signed by Adolf Hitler and meant a of 360 musicians. Knorr also appointed some musicians as teachers at the army music schools, exempting them from active military service. After his term of office as music consultant of the OKH, he was appointed on 31 August 1941, against the resistance of Herbert Gerigk and the Amt Rosenberg deputy director of the Hochschule fΓΌr Musik und Darstellende Kunst Frankfurt am Main and the military music school in Frankfurt. In 1942 he joined the NSDAP (member number: 8.995.057). The years since 1945 As late as 1944 von Knorr began building up the , where he was also director. In 1945 he was removed from his post as a senior civil servant, but in 1948 he received a certificate stating that he had only been "slightly [Nazi] burdened" and was able to continue his work. However, the university was constantly threatened with closure; von Knorr therefore soon applied for other positions. In 1952 he became director of the Hochschule fΓΌr Musik, Theater und Medien Hannover. From 1955 he was also military music advisor to Theodor Blank, the first Minister of Defence of the Federal Republic of Germany, In 1956, his wife Elise died after a long illness. Two years later, von Knorr married the Swedish music student Britt-Gun Lidin. After his retirement in 1961, von Knorr took over the direction of the University of Music and Theatre in his home town of Heidelberg until 1969. In 1961 Knorr received the Grand Order of Merit of the Federal Republic of Germany. Legacy In 2014, Knorr's written estate was transferred to the Badische Landesbibliothek as a gift. It contains documents of various kinds, including compositions, extensive correspondence, manuscripts, photographs, etc., from the period from 1944 until his death in 1973 at the age of 77. Earlier documents do not exist, since Knorr's apartment at the time was destroyed in a bombing raid on Frankfurt in 1944, and his documents, including the manuscripts of his compositions up to that time, were almost completely destroyed. Literature Jasmin Hambsch: β€žEinleitungβ€œ, in dies.: Nachlass Ernst-Lothar von Knorr – Findbuch. Karlsruhe 2018, online Ernst-Lothar von Knorr: Lebenserinnerungen. Erlebtes musikalisches Geschehen in Deutschland. from the estate published by the Ernst-Lothar von Knorr-Stiftung, with an introduction by Thomas Schipperges. P. J. Tonger Musikverlag, KΓΆln-Rodenkirchen 1996, . Ernst Lothar von Knorr, Munzinger in Internationales Biographisches Archiv. 45/1960, 31 October 1960 References External links Digitalisierte Dokumente aus dem Nachlass in den Digitalen Sammlungen der Badische Landesbibliothek Lied-Portal Nachlass von Ernst-Lothar von Knorr auf der Webseite der Badische Landesbibliothek 1896 births 1973 deaths North Rhine-Westphalia 20th-century classical composers 20th-century German composers Concertmasters Nazi Party members Commanders Crosses of the Order of Merit of the Federal Republic of Germany
```go // // Last.Backend LLC CONFIDENTIAL // __________________ // // [2014] - [2019] Last.Backend LLC // All Rights Reserved. // // NOTICE: All information contained herein is, and remains // the property of Last.Backend LLC and its suppliers, // if any. The intellectual and technical concepts contained // herein are proprietary to Last.Backend LLC // and its suppliers and may be covered by Russian Federation and Foreign Patents, // patents in process, and are protected by trade secret or copyright law. // Dissemination of this information or reproduction of this material // is strictly forbidden unless prior written permission is obtained // from Last.Backend LLC. // package views // Exporter - default node structure // swagger:model views_ingress type Exporter struct { Meta ExporterMeta `json:"meta"` Status ExporterStatus `json:"status"` } // ExporterList - node map list // swagger:model views_ingress_list type ExporterList map[string]*Exporter // ExporterMeta - node metadata structure // swagger:model views_ingress_meta type ExporterMeta struct { Meta Version string `json:"version"` } // ExporterStatus - node state struct // swagger:model views_ingress_status type ExporterStatus struct { Ready bool `json:"ready"` } // swagger:model views_ingress_spec type ExporterManifest struct { Meta ExporterManifestMeta `json:"meta"` } type ExporterManifestMeta struct { Initial bool `json:"initial"` } ```
Elijah Alick (born 28 March 1996) is an Australian professional rugby league footballer who currently plays for the Brisbane Broncos in the National Rugby League (NRL). References Living people 1996 births Australian rugby league players People educated at Brisbane State High School Rugby league wingers Place of birth missing (living people)
```go package hooks import ( "context" dump "github.com/fsamin/go-dump" "github.com/rockbears/log" "github.com/ovh/cds/sdk" ) func (s *Service) doScheduledTaskExecution(ctx context.Context, t *sdk.TaskExecution) (*sdk.WorkflowNodeRunHookEvent, error) { log.Debug(ctx, "Hooks> Processing scheduled task %s", t.UUID) // Prepare a struct to send to CDS API h := sdk.WorkflowNodeRunHookEvent{ WorkflowNodeHookUUID: t.UUID, } //Prepare the payload //Anything can be pushed in the configuration, just avoid sending payloadValues := map[string]string{} if payload, ok := t.Config[sdk.Payload]; ok && payload.Value != "{}" { var payloadInt interface{} if err := sdk.JSONUnmarshal([]byte(payload.Value), &payloadInt); err == nil { e := dump.NewDefaultEncoder() e.Formatters = []dump.KeyFormatterFunc{dump.WithDefaultLowerCaseFormatter()} e.ExtraFields.DetailedMap = false e.ExtraFields.DetailedStruct = false e.ExtraFields.Len = false e.ExtraFields.Type = false m1, errm1 := e.ToStringMap(payloadInt) if errm1 != nil { log.Error(ctx, "Hooks> doScheduledTaskExecution> Cannot convert payload to map %s", errm1) } else { payloadValues = m1 } payloadValues["payload"] = payload.Value } else { log.Error(ctx, "Hooks> doScheduledTaskExecution> Cannot unmarshall payload %s", err) } } for k, v := range t.Config { switch k { case sdk.HookConfigProject, sdk.HookConfigWorkflow, sdk.SchedulerModelCron, sdk.SchedulerModelTimezone, sdk.Payload: default: payloadValues[k] = v.Value } } payloadValues["cds.triggered_by.username"] = sdk.SchedulerUsername payloadValues["cds.triggered_by.fullname"] = sdk.SchedulerFullname h.Payload = payloadValues return &h, nil } ```
```javascript module.exports = async function (results) { // wait 1ms await new Promise((resolve) => setTimeout(resolve, 1)) // return results as JSON return 'Async results:\n' + JSON.stringify(results) } ```
Indian Deep Farm is an historic home and farm which are located in Newlin Township, Chester County, Pennsylvania. This property was added to the National Register of Historic Places in 1985. History and architectural features The house consists of a two-story, five-bay, brick, main block which dates to the 1830s, with a two-story, two-bay, stone core section. Both the main block and core have gable roofs. Also located on the property are a stone and frame bank barn, shop, slaughterhouse, and one-and-one-half-story frame tenant house that was built over a spring. The property also includes a stone walled stockyard. It was added to the National Register of Historic Places in 1985. References Farms on the National Register of Historic Places in Pennsylvania Houses in Chester County, Pennsylvania National Register of Historic Places in Chester County, Pennsylvania
Fazel Darbandi (), also Fadil al-Darbandi (), also known as Molla Agha Darbandi (Persian: ) (died 1869/1870, Tehran), was an Iranian Shia cleric and scholar of the Qajar era. Biography Darbandi's surname implies an origin in the city of Darband (i.e. Derbent, now part of Dagestan, Russia), or its environs, although the exact whereabouts of his place and date of birth are unknown. Darbandi grew up to study in Najaf, in Ottoman Iraq, where he was a student of Molla Mohammad Sharif-ol-Olama Mazandarani (died 1829–1830). Darbandi later tried to teach in the city of Karbala, but the "peculiarities" of his persona made it "impossible" to retain any number of students. Afterwards, he moved to Iran, arriving in Tehran where prime minister (αΉ£adr-e aΚΏαΊ“am) Mirza Aqa Khan Nuri had just been dismissed. Darbandi was famous amongst his contemporaries for being eloquent in both Persian as well as Arabic, as well as for "erudition" in the science related to "Hadith transmitters". After his death, he was buried in Karbala. Notes References Sources 1869 deaths 1870 deaths Iranian Shia clerics Iranian scholars 19th-century Iranian people People of Qajar Iran
Walker Library may refer to: David S. Walker Library in Tallahassee, Florida, USA James E. Walker Library at the Middle Tennessee State University in Murfreesboro, Tennessee, USA Vol Walker Library at the University of Arkansas in Fayetteville, Arkansas, USA Walker Library (Minneapolis) in Minneapolis, Minnesota, USA The Walker Library of the History of Human Imagination, a private library in Ridgefield, Connecticut, USA Walker Management Library at Vanderbilt University in Nashville, Tennessee, USA Walker Memorial Library in Westbrook, Maine, USA
```smalltalk " Unit test for trait handling " Class { #name : 'TraitTest', #superclass : 'TraitAbstractTest', #category : 'Traits-Tests', #package : 'Traits-Tests' } { #category : 'tests' } TraitTest >> createT1 [ | t1 | t1 := self newTrait: #T1 with: #(a b c). t1 compile: 'setValues a := 1. b := 2. c := 3. '. t1 compile: 'getValues ^ a + b + c '. ^ t1 ] { #category : 'tests' } TraitTest >> createT2 [ | t2 | t2 := self newTrait: #T2 with: #(d e f). t2 compile: 'setValues2 d := 1. e := 2. f := 3. '. t2 compile: 'getValues2 ^ d + e + f + self getValues '. ^ t2 ] { #category : 'tests' } TraitTest >> createT3 [ | t3 | "This is a trait with a method with a pragma" t3 := self newTrait: #T3 with: #(). t3 compile: 'aMethod <aPragma> ^ 42 '. t3 class compile: 'aClassMethod <aPragma> ^ 42 '. ^ t3 ] { #category : 'tests' } TraitTest >> testAddingATraitToAClassWithSubclasses [ "This is a regression test when adding a trait to a class without traits and with subclasses was not possible." | c1 c2 t1 | c1 := self newClass: #C1. c2 := self newClass: #C2 superclass: c1 traits: { }. t1 := self newTrait: #T1. self shouldnt: [ self class classInstaller update: c1 to: [ :aBuilder | aBuilder traits: t1 ] ] raise: Error. self assert: (c1 includesTrait: t1). self deny: (c2 includesTrait: t1) ] { #category : 'tests' } TraitTest >> testClassHavingAnInstanceVariableUsersDifferenThanUsers [ | t1 aClass | t1 := self newTrait: #T1 with: #( users ). aClass := self newClass: #C1 superclass: Object traits: { }. aClass class setTraitComposition: { t1 }. self assert: (aClass class allSlots anySatisfy: [ :e | e name = #users ]). self assert: (aClass class slotNamed: #users) definingClass equals: t1 ] { #category : 'tests' } TraitTest >> testClassTraitThatHasAPragmaHasCorrectTraitSourceAfterRecompile [ | t3 aClass | t3 := self createT3. aClass := self newClass: #C1 superclass: Object traits: { t3 }. self assert: (aClass class >> #aClassMethod) traitSource equals: t3 class asTraitComposition. (aClass class >> #aClassMethod) recompile. self assert: (aClass class >> #aClassMethod) traitSource equals: t3 class asTraitComposition ] { #category : 'tests' } TraitTest >> testClassTraits [ | t1 t2 aClass | <ignoreNotImplementedSelectors: #(otherSelector otherSelector2 anAlias aSelector)> t1 := self newTrait: #T1 with: #(). t2 := self newTrait: #T2 with: #(). t1 classTrait compile: 'aSelector ^ 21'. t1 classTrait compile: 'otherSelector ^ 42'. t2 classTrait compile: 'otherSelector2 ^ 42'. aClass := self newClass: #C1 superclass: Object traits: {}. aClass class setTraitComposition: t1 classSide - { #aSelector }. self assert: aClass otherSelector equals: 42. aClass := self newClass: #C1 superclass: Object traits: {}. aClass class setTraitComposition: t1 classSide + t2 classSide. self assert: aClass otherSelector equals: 42. self assert: aClass otherSelector2 equals: 42. self assert: aClass aSelector equals: 21. aClass := self newClass: #C1 superclass: Object traits: {}. aClass class setTraitComposition: t1 classSide @ {#anAlias -> #aSelector}. self assert: aClass anAlias equals: 21. self assert: aClass aSelector equals: 21 ] { #category : 'tests' } TraitTest >> testClassUsesTrait [ | t1 superclass subclass | t1 := self newTrait: #T1 with: {}. superclass := self newClass: #Superclass with:#() traits: t1. subclass := self newClass: #Subclass superclass: superclass traits: {}. self assert: (superclass usesTrait: t1). self assert: (superclass usesTraitLocally: t1). self assert: (subclass usesTrait: t1). self assert: (subclass usesTraitLocally: t1) not ] { #category : 'tests' } TraitTest >> testClassUsingTraitsDoesNotHaveUsers [ | t1 aClass | t1 := self newTrait: #T1 with: #(). aClass := self newClass: #C1 superclass: Object traits: {t1}. self assert: (aClass class allSlots noneSatisfy: [:e | e name = #users]) ] { #category : 'tests' } TraitTest >> testComposedBy [ "tests the #isComposedBy: aTrait method" self assert: (Trait3 isComposedBy: Trait2). self deny: (Trait2 isComposedBy: Trait3). self deny: (Trait3 isComposedBy: Object) ] { #category : 'tests' } TraitTest >> testDefinedMethods [ [ Trait1 compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. Trait3 compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. MOPTestClassC compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. "Test local methods of a trait standalone" self assertCollection: Trait1 localMethods hasSameElements: { (Trait1 >> #c1). (Trait1 >> #c). (Trait1 >> #extensionMethod) }. self assertCollection: Trait1 definedMethods hasSameElements: { (Trait1 >> #c1). (Trait1 >> #c) }. "Test local methods of a trait using a trait" self assertCollection: Trait3 localMethods hasSameElements: { (Trait3 >> #c3). (Trait3 >> #c). (Trait3 >> #extensionMethod) }. self assertCollection: Trait3 definedMethods hasSameElements: { (Trait3 >> #c3). (Trait3 >> #c) }. "Test local methods of a class using a trait" self assertCollection: MOPTestClassC localMethods hasSameElements: { (MOPTestClassC >> #c). (MOPTestClassC >> #extensionMethod)}. self assertCollection: MOPTestClassC definedMethods hasSameElements: { (MOPTestClassC >> #c) } ] ensure: [ self packageOrganizer removePackage: 'AGeneratedPackageForTest' ] ] { #category : 'tests' } TraitTest >> testDefinedSelectors [ [ Trait1 compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. Trait3 compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. MOPTestClassC compile: 'extensionMethod ' classified: '*AGeneratedPackageForTest'. "Test local selectors of a trait standalone" self assertCollection: Trait1 localSelectors hasSameElements: #( #c1 #c #extensionMethod ). self assertCollection: Trait1 definedSelectors hasSameElements: #( #c1 #c ). "Test local selectors of a trait using a trait" self assertCollection: Trait3 localSelectors hasSameElements: #( #c3 #c #extensionMethod ). self assertCollection: Trait3 definedSelectors hasSameElements: #( #c3 #c ). "Test local selectors of a class using a trait" self assertCollection: MOPTestClassC localSelectors hasSameElements: #( #c #extensionMethod ). self assertCollection: MOPTestClassC definedSelectors hasSameElements: #( #c ) ] ensure: [ self packageOrganizer removePackage: 'AGeneratedPackageForTest' ] ] { #category : 'tests - empty' } TraitTest >> testEmptyCompositionManagesTEmpty [ | t1 | t1 := self newTrait: #T1 traits: TEmpty. self assert: t1 hasEmptyComposition ] { #category : 'tests' } TraitTest >> testErrorClassCreation [ | trait aSubclass aClass | trait := self class classInstaller make: [ :aBuilder | aBuilder name: #TMyTrait; package: self packageNameForTests; beTrait ]. aClass := self class classInstaller make: [ :aClassBuilder | aClassBuilder name: #AClass; superclass: nil; package: self packageNameForTests ]. "----------------" aSubclass := self class classInstaller make: [ :aClassBuilder | aClassBuilder name: #AClass2; traitComposition: trait; superclass: aClass; package: self packageNameForTests ]. "----------------" "Change the superclass of AClass" aClass := self class classInstaller make: [ :aClassBuilder | aClassBuilder name: #AClass; superclass: Object; package: self packageNameForTests ]. self assert: trait traitUsers asArray equals: { aSubclass }. self assert: aSubclass traits asArray equals: { trait } ] { #category : 'tests' } TraitTest >> testForbidInstantiation [ | trait | trait := self class classInstaller make: [ :aBuilder | aBuilder name: #TMyTrait; package: self packageNameForTests; beTrait ]. self should: [ trait basicNew ] raise: Error ] { #category : 'tests' } TraitTest >> testIndirectSequence [ | t1 t2 t3 c1 obj | <ignoreNotImplementedSelectors: #(setValues setValues2 getValues getValues2)> t1 := self createT1. t2 := self createT2. t3 := self newTrait: #T3 traits: t1 + t2. c1 := self newClass: #C1 with: #(g h) traits: t3. obj := c1 new. obj setValues. obj setValues2. self assert: (TraitedClass basicUsers includesAll: { t1 class. t2 class. t3 class. c1 class}). self assert: obj getValues equals: 6. self assert: obj getValues2 equals: 12 ] { #category : 'tests' } TraitTest >> testIsUsed [ self assert: Trait1 isUsed. self assert: Trait1 classTrait isUsed ] { #category : 'tests' } TraitTest >> testLocalMethodWithSameCodeInTrait [ "Test whether there are no unintentional overridden traits methods" SystemNavigation new allBehaviorsDo: [ :each | each hasTraitComposition ifTrue: [ each selectorsDo: [ :selector | (each includesLocalSelector: selector) ifTrue: [ (each traitComposition traitDefining: selector ifNone: [ nil ]) ifNotNil: [ :trait | (trait selectors includes: selector) ifTrue: [ self deny: (trait compiledMethodAt: selector) equals: each >> selector ] ] ] ] ] ] ] { #category : 'tests' } TraitTest >> testLocalMethods [ "Test local methods of a trait standalone" self assertCollection: Trait1 localMethods hasSameElements: { (Trait1 >> #c1). (Trait1 >> #c) }. "Test local methods of a trait using a trait" self assertCollection: Trait3 localMethods hasSameElements: { (Trait3 >> #c3). (Trait3 >> #c) }. "Test local methods of a class using a trait" self assertCollection: MOPTestClassC localMethods hasSameElements: { (MOPTestClassC >> #c) } ] { #category : 'tests' } TraitTest >> testLocalSelectors [ "Test local selectors of a trait standalone" self assertCollection: Trait1 localSelectors hasSameElements: #( #c1 #c ). "Test local selectors of a trait using a trait" self assertCollection: Trait3 localSelectors hasSameElements: #( #c3 #c ). "Test local selectors of a class using a trait" self assertCollection: MOPTestClassC localSelectors hasSameElements: #( #c ) ] { #category : 'tests' } TraitTest >> testMethodsAddedInMetaclass [ | t1 c1 | t1 := self createT1. c1 := self newClass: #C1 with: #(g h) traits: t1. self assertCollection: c1 class selectors sorted equals: TraitedClass selectors sorted ] { #category : 'tests' } TraitTest >> testMethodsAddedInMetaclassNotPresentInSubclasses [ | t1 c1 c2 | t1 := self createT1. c1 := self newClass: #C1 with: #( g h ) traits: t1. c2 := self newClass: #C2 superclass: c1 traits: { }. self assertCollection: c2 class localSelectors sorted equals: #( ) ] { #category : 'tests' } TraitTest >> your_sha256_hashclass [ | t1 c1 c2 t2 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #(g h) traits: t1. c2 := self newClass: #C2 superclass: c1 traits: {t2}. self assertCollection: c2 class localSelectors sorted equals: #(). self assert: (TraitedClass selectors allSatisfy: [ :selector | (c2 class lookupSelector: selector) isNotNil ]) ] { #category : 'tests' } TraitTest >> testOrigin [ | tr1 tr2 tr3 tr23 | tr1 := self newTrait: #TTT1 traits: { }. tr2 := self newTrait: #TTT2 traits: { tr1 }. tr3 := self newTrait: #TTT3 traits: { tr1 }. tr23 := self newTrait: #TTT23 traits: { (tr3 + tr2) }. tr1 compile: 'foo ^ 4'. self assert: (tr1 >> #foo) origin identicalTo: tr1. self assert: (tr2 >> #foo) origin identicalTo: tr1. self assert: (tr3 >> #foo) origin identicalTo: tr1. "-----------" "For TR2" self assert: (tr2 traitComposition traitDefining: #foo) innerClass identicalTo: tr1. self assert: (tr2 >> #foo) origin identicalTo: tr1. "-----------" "-----------" "For TR23" self assert: (tr23 traitComposition traitDefining: #foo) innerClass identicalTo: tr1. self assert: (tr23 >> #foo) origin identicalTo: tr1 "----------" ] { #category : 'tests' } TraitTest >> testOriginWithRequiredMethod [ | tr1 c1 | tr1 := self newTrait: #TTT1 traits: { }. tr1 compile: 'foo ^ self explicitRequirement'. self assert: (tr1 >> #foo) origin identicalTo: tr1. c1 := self newClass: #CTT1 superclass: Object traits: tr1. self assert: (c1 >> #foo) origin identicalTo: tr1 ] { #category : 'tests' } TraitTest >> testOriginWithRequiredMethodInTraitChain [ "check that the origin of a explicit requirement is the trait defining this method, even if we check this from another *trait* using the first trait. Tr2 uses Tr1, Tr1 defines a explicit requirement #foo, the origin of Tr2>>foo is Tr1" | tr1 tr2 | tr1 := self newTrait: #TTT1 traits: { }. tr1 compile: 'foo ^ self explicitRequirement'. self assert: (tr1 >> #foo) origin identicalTo: tr1. tr2 := self newTrait: #TTT2 traits: { tr1 }. self assert: (tr2 >> #foo) origin identicalTo: tr1 ] { #category : 'tests' } TraitTest >> testPackageIsUpdatedInClassSide [ | c1 t1 | t1 := self newTrait: #T1. t1 class compile: 'msg ^ 1'. c1 := self newClass: #C1 superclass: Object traits: t1. c1 class compile: 'msg ^ 12'. self assert: self packageNameForTests asPackage methods size equals: 2. (c1 class >> #msg) removeFromSystem. self assert: self packageNameForTests asPackage methods size equals: 1 ] { #category : 'tests' } TraitTest >> testPackageIsUpdatedInInstanceSide [ | c1 t1 | t1 := self newTrait: #T1. t1 compile: 'msg ^ 1'. c1 := self newClass: #C1 superclass: Object traits: t1. c1 compile: 'msg ^ 12'. self assert: (c1 >> #msg) origin equals: c1. self assert: self packageNameForTests asPackage methods size equals: 2. (c1 >> #msg) removeFromSystem. self assert: self packageNameForTests asPackage methods size equals: 1 ] { #category : 'tests' } TraitTest >> testRecompilingTraitClassMethodRecompilesTheMethodInTheUsers [ | trait class priorTraitMethod priorClassMethod | trait := self newTrait: #TTraitForTest. class := self newClass: #ClassUsingTTraitForTest traits: trait. trait class compile: 'test ^ #test'. priorTraitMethod := trait class >> #test. priorClassMethod := class class >> #test. priorTraitMethod recompile. self deny: trait class >> #test identicalTo: priorTraitMethod. self deny: class class >> #test identicalTo: priorClassMethod ] { #category : 'tests' } TraitTest >> testRecompilingTraitMethodRecompilesTheMethodInTheUsers [ | trait class priorTraitMethod priorClassMethod | trait := self newTrait: #TTraitForTest. class := self newClass: #ClassUsingTTraitForTest traits: trait. trait compile: 'test ^ #test'. priorTraitMethod := trait >> #test. priorClassMethod := class >> #test. priorTraitMethod recompile. self deny: trait >> #test identicalTo: priorTraitMethod. self deny: class >> #test identicalTo: priorClassMethod ] { #category : 'tests' } TraitTest >> testRedefiningAClassAsTraitShouldRaiseError [ self newClass: #C1. self should: [ self newTrait: #C1 ] raise: Error ] { #category : 'tests' } TraitTest >> testRedefiningATraitAsAClassShouldRaiseError [ self newTrait: #C1 with: #(). self should: [ self newClass: #C1 ] raise: Error ] { #category : 'tests' } TraitTest >> testRemakingATraitUsedByAnAnonymousClassKeepItAnonymous [ "Regression test were an anonymous class became non anonymous if we updated a trait used by it." | t1 aClass | t1 := self newTrait: #T1 with: #( ). aClass := Smalltalk anonymousClassInstaller make: [ :builder | builder traitComposition: t1 ]. self assert: aClass isAnonymous. t1 := self newTrait: #T1 with: #( aSlot ). self assert: aClass isAnonymous ] { #category : 'tests' } TraitTest >> testRemoveFromSystem [ | aClass aTrait | aTrait := self newTrait: #T1. aClass := self newClass: #AClassForTests traits: aTrait. aTrait removeFromSystem: false. self deny: aClass hasTraitComposition. self assert: aTrait isObsolete ] { #category : 'tests' } TraitTest >> testRemovingTraitsDoesNotModifiyTraitedSubclasses [ | t1 t2 c2 c1 | t1 := self newTrait: #T1. t2 := self newTrait: #T2. c1 := self newClass: #C1 traits: t1. c2 := self newClass: #C2 superclass: c1 traits: t2. self assert: c1 hasTraitComposition. self assert: (c1 traits includes: t1). self assert: c2 hasTraitComposition. self assert: (c2 traits includes: t2). self assert: (t2 users includes: c2). c1 := self newClass: #C1 traits: { }. self deny: c1 hasTraitComposition. self deny: (c1 traits includes: t1). self assert: c2 hasTraitComposition. self assert: (c2 traits includes: t2). self assert: (t2 users includes: c2) ] { #category : 'tests' } TraitTest >> testRemovingTraitsRemoveTraitedClassMethods [ | t1 t2 c1 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #(g h) traits: t1 + t2. self assert: (c1 class includesSelector: #traits). c1 := self newClass: #C1 with: #(g h) traits: {}. self deny: (c1 class includesSelector: #traits) ] { #category : 'tests' } TraitTest >> testRemovingTraitsRemoveTraitedClassMethodsWithSubclasses [ | t1 t2 c2 c1 c3 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #(g h) traits: t1. c2 := self newClass: #C2 superclass: c1 traits: t2. c3 := self newClass: #C3 superclass: c1 traits: {}. self assert: (c1 class includesSelector: #traits). self deny: (c1 class includesLocalSelector: #traits). self assert: (c2 class lookupSelector: #traits) isNotNil. self deny: (c2 class includesLocalSelector: #traits). self assert: (c3 class lookupSelector: #traits) isNotNil. self deny: (c3 class includesLocalSelector: #traits). c1 := self newClass: #C1 with: #(g h) traits: {}. "Now c1 has no more traits, c2 remains unchanged" self deny: (c1 class includesSelector: #traits). self assert: (c2 class includesSelector: #traits). self deny: (c2 class includesLocalSelector: #traits). self assert: (c2 class includesSelector: #traits). self deny: (c2 class includesLocalSelector: #traits). ] { #category : 'tests' } TraitTest >> testRemovingTraitsUpdatesCategories [ | t1 t2 c1 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #(g h) traits: t1 + t2. c1 := self newClass: #C1 with: #(g h) traits: { }. c1 selectors do: [ :selector | self assert: (c1 includesSelector: selector) ]. c1 class selectors do: [ :selector | self assert: (c1 class includesSelector: selector) ] ] { #category : 'tests' } TraitTest >> testSelectorsWithExplicitOrigin [ "Obtain the subset of selectors that are defined either locally or inherited from traits. But, exclude selectors of methods from implicit traits such as TraitedClass" | t1 c1 | t1 := self newTrait: #T1 with: #(). t1 compile: 'instanceSideMethod'. t1 class compile: 'classSideMethod'. c1 := self newClass: #C1 traits: t1. self assertCollection: c1 selectorsWithExplicitOrigin hasSameElements: #(instanceSideMethod). self assertCollection: c1 class selectorsWithExplicitOrigin hasSameElements: #(classSideMethod) ] { #category : 'tests' } TraitTest >> testSelectorsWithExplicitOriginNoTrait [ "Obtain the subset of selectors that are defined either locally or inherited from traits. But, exclude selectors of methods from implicit traits such as TraitedClass" | c1 | c1 := self newClass: #C1. c1 compile: 'instanceSideMethod'. c1 class compile: 'classSideMethod'. self assertCollection: c1 selectorsWithExplicitOrigin hasSameElements: #(instanceSideMethod). self assertCollection: c1 class selectorsWithExplicitOrigin hasSameElements: #(classSideMethod) ] { #category : 'tests' } TraitTest >> testSequence [ | t1 t2 c1 obj | <ignoreNotImplementedSelectors: #(setValues setValues2 getValues getValues2 )> t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #(g h) traits: t1 + t2. obj := c1 new. obj setValues. obj setValues2. self assert: (TraitedClass basicUsers includesAll: { t1 class. t2 class. c1 class}). self assert: obj getValues equals: 6. self assert: obj getValues2 equals: 12 ] { #category : 'tests' } TraitTest >> testSettingAClassInAClassTraitCompositionShouldRaiseAnError [ | t1 c1 | t1 := self newTrait: #T1 with: #(a). c1 := self newClass: #C1. self should: [ t1 traitComposition: c1 ] raise: Error. self should: [ t1 classTrait traitComposition: c1 ] raise: Error. ] { #category : 'tests' } TraitTest >> testSettingEmptyTraitCompositionDoesNotModifiyTraitedSubclasses [ | t1 t2 c2 c1 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #( g h ) traits: t1. c2 := self newClass: #C2 superclass: c1 traits: t2. c1 setTraitComposition: { }. self assert: c2 hasTraitComposition. self assert: (c2 traits includes: t2). self assert: (t2 users includes: c2) ] { #category : 'tests' } TraitTest >> testSettingEmptyTraitCompositionUpdatesMetaclass [ | t1 t2 c2 c1 c3 | t1 := self createT1. t2 := self createT2. c1 := self newClass: #C1 with: #( g h ) traits: t1. c2 := self newClass: #C2 superclass: c1 traits: t2. c3 := self newClass: #C3 superclass: c1 traits: { }. self assert: (c1 class includesSelector: #traits). self deny: (c2 class includesSelector: #traits). self deny: (c3 class includesSelector: #traits). c1 setTraitComposition: { }. self deny: (c1 class includesSelector: #traits). self assert: (c2 class includesSelector: #traits). self deny: (c3 class includesSelector: #traits) ] { #category : 'tests' } TraitTest >> testSlotsAreNotDuplicated [ | t1 t2 c1 | t1 := self newTrait: #T1 with: #(a). t2 := self newTrait: #T2 traits: t1. c1 := self newClass: #C1 traits: t1 + t2. self assert: c1 traitComposition slots size equals: c1 traitComposition slots asSet size. self assert: c1 traitComposition slots size equals: 1 ] { #category : 'tests' } TraitTest >> testSubclasses [ | t1 t2 | t1 := self createT1. t2 := self newTrait: #T2 with: #(aa bb) traits: t1. self deny: t1 hasSubclasses. self deny: t2 hasSubclasses. self assert: t1 subclasses isEmpty. self assert: t2 subclasses isEmpty. self deny: t1 hasSubclasses. self deny: t2 hasSubclasses. ] { #category : 'tests' } TraitTest >> testTraitHaveUsersInstanceVariable [ | t1 aClass | t1 := self newTrait: #T1 with: #(users). aClass := self newClass: #C1 superclass: Object traits: {t1}. self assert: (aClass allSlots anySatisfy: [:e | e name = #users]). self assert: (aClass slotNamed: #users) definingClass equals: t1 ] { #category : 'tests' } TraitTest >> testTraitRemoval [ | aClass aTrait | aTrait := self newTrait: #T1. aClass := self newClass: #AClassForTests traits: aTrait. self assertEmpty: aClass localSelectors. aClass removeFromComposition: aTrait. self assertEmpty: aClass localSelectors. self assertEmpty: aClass selectors. self deny: aClass hasTraitComposition. self deny: (aTrait traitUsers includes: aClass) ] { #category : 'tests' } TraitTest >> testTraitSource [ self assert: (MOPTestClassC >> #c) traitSource isNil. self assert: (MOPTestClassC >> #c2) traitSource equals: Trait2 asTraitComposition ] { #category : 'tests' } TraitTest >> testTraitSourceIsPersistedWithRecompilation [ [ Trait2 compile: 'traitMethod ^ 1' classified: '*GeneratedPackageForTest'. self assert: (Trait2 >> #traitMethod) traitSource isNil. self assert: (MOPTestClassC >> #traitMethod) traitSource equals: Trait2 asTraitComposition. (Trait2 >> #traitMethod) recompile. self assert: (Trait2 >> #traitMethod) traitSource isNil. self assert: (MOPTestClassC >> #traitMethod) traitSource equals: Trait2 asTraitComposition ] ensure: [ self packageOrganizer removePackage: 'GeneratedPackageForTest' ] ] { #category : 'tests' } TraitTest >> testTraitSourceIsPersistedWithRemovalOfMetalinks [ [ | metalink | Trait2 compile: 'traitMethod ^ 1' classified: '*GeneratedPackageForTest'. self assert: (Trait2 >> #traitMethod) traitSource isNil. self assert: (MOPTestClassC >> #traitMethod) traitSource equals: Trait2 asTraitComposition. metalink := MetaLink new. (MOPTestClassC >> #traitMethod) ast link: metalink. self assert: (Trait2 >> #traitMethod) traitSource isNil. self assert: (MOPTestClassC >> #traitMethod) traitSource equals: Trait2 asTraitComposition. metalink uninstall. self assert: (Trait2 >> #traitMethod) traitSource isNil. self assert: (MOPTestClassC >> #traitMethod) traitSource equals: Trait2 asTraitComposition ] ensure: [ self packageOrganizer removePackage: 'GeneratedPackageForTest' ] ] { #category : 'tests' } TraitTest >> testTraitThatHasAPragmaHasCorrectTraitSourceAfterRecompile [ | t3 aClass | t3 := self createT3. aClass := self newClass: #C1 superclass: Object traits: {t3}. self assert: (aClass >> #aMethod) traitSource equals: t3 asTraitComposition. (aClass >> #aMethod) recompile. self assert: (aClass >> #aMethod) traitSource equals: t3 asTraitComposition ] { #category : 'tests' } TraitTest >> testTraitUsingTraitsPreserveSourceCode [ | t1 t2 source | t1 := self createT1. t2 := self newTrait: #T2 traits: t1. source := 'aMethod: aString ^ aMethod'. t1 compile: source. self assert: (t1 >> #aMethod:) sourceCode equals: source. self assert: (t2 >> #aMethod:) sourceCode equals: source ] { #category : 'tests' } TraitTest >> testTraitUsingTraitsPreserveSourceCodeOnClassSide [ | t1 t2 source | t1 := self createT1. t2 := self newTrait: #T2 traits: t1. source := 'aMethod: aString ^ aMethod'. t1 class compile: source. self assert: (t1 class >> #aMethod:) sourceCode equals: source. self assert: (t2 class >> #aMethod:) sourceCode equals: source ] { #category : 'tests' } TraitTest >> testTraitsMethodClassSanity [ (Smalltalk globals allTraits flatCollect: #traitUsers) asSet do: [ :trait | trait selectorsDo: [ :selector | self assert: (trait >> selector) methodClass identicalTo: trait ] ] ] { #category : 'tests' } TraitTest >> testTraitsUsersSanity [ "This documents bug path_to_url" Smalltalk allClassesAndTraits do: [ :each | self assert: (each traits allSatisfy: [ :t | t traitUsers includes: each ]) ]. Smalltalk globals allTraits do: [ :each | self assert: (each traitUsers allSatisfy: [ :b | b traits includes: each ]) ] ] { #category : 'tests' } TraitTest >> testUsingTraitInAnonymousSubClassAndRedefiningIt [ | t1 aClass | t1 := self newTrait: #T1 with: #(). aClass := Smalltalk anonymousClassInstaller make: [ :builder | builder superclass: Object. builder traitComposition: t1. ]. self deny: (Object subclasses includes: aClass). t1 := t1 classInstaller update: t1 to: [ :aBuilder | aBuilder slots: #(aSlot); package: self packageNameForTests; beTrait ]. self assert: (aClass hasSlotNamed: #aSlot). self deny: (Object subclasses includes: aClass) ] ```
Arnold Rodrigues is a former footballer who played as a midfielder for Salgaocar and the India football team. Born in Tanzania, he represented India internationally, after making his debut in 1977. Early life Rodrigues was born in Dar es Salaam, Tanzania, to Goan parents, before being sent to a boarding school Monte de Guirim School in Bardez, Goa, India. His family hailed from Orlim, Salcete, Goa. Career Rodrigues played for the club Salgaocar and the Goa football team at the Santosh Trophy. In 1989, Rodrigues signed with Croydon of Isthmian Football League. He also appeared with English Non-League football outfit Simba, a team having Goan players. Style of play Rodrigues was known for his speed and dribbling ability. Personal life Rodrigues is married, has lived in London, England, is a British citizen, and is a supporter of English Premier League side Liverpool. Honours India King's Cup third place: 1977 Salgaocar Puttiah Memorial Trophy: 1978 Goa Santosh Trophy: 1983–84 Panvel SC Bandodkar Gold Trophy: 1974 Individual Santosh Trophy Best Player: 1983–84 See also Goans in football History of Indian football References Living people Indian men's footballers India men's international footballers Sportspeople from Dar es Salaam Footballers from Goa Salgaocar FC players Indian expatriate sportspeople in England Indian emigrants to England British men's footballers British people of Goan descent British sportspeople of Indian descent Men's association football midfielders Croydon F.C. players Year of birth missing (living people)
The men's doubles badminton event at the 2015 Pan American Games will be held from July 11–16 at the Atos Markham Pan Am Centre in Toronto. The defending Pan American Games champions are Howard Bach and Tony Gunawan of the United States. The athletes will be drawn into an elimination stage draw. Once a team lost a match, it will be not longer able to compete. Each match will be contested as the best of three games. Schedule All times are Central Standard Time (UTC-6). Seeds (champions) (quarterfinals) Results Finals Top half Bottom half References Men's doubles draw with results Men's doubles
```cmake # ******************************************************************************* # # # path_to_url # # Unless required by applicable law or agreed to in writing, software # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # ******************************************************************************* if(blas_cmake_included) return() endif() set(blas_cmake_included true) include("cmake/options.cmake") # Retains existing functionality of _DNNL_USE_MKL if(_DNNL_USE_MKL) set(DNNL_BLAS_VENDOR "MKL") endif() if(DNNL_BLAS_VENDOR STREQUAL "NONE") return() endif() if (NOT "${DNNL_BLAS_VENDOR}" MATCHES "^(NONE|MKL|OPENBLAS|ARMPL|ACCELERATE|ANY)$") message(FATAL_ERROR "Unsupported DNNL_BLAS_VENDOR: ${DNNL_BLAS_VENDOR}.") endif() if (NOT "${DNNL_BLAS_VENDOR}" MATCHES "^(NONE|ARMPL|ACCELERATE)$") message(WARNING "Use of DNNL_BLAS_VENDOR=${DNNL_BLAS_VENDOR} is not " "recommended. This vendor is supported for performance analysis purposes only.") endif() macro(expect_arch_or_generic arch) if(NOT "${DNNL_TARGET_ARCH}" MATCHES "^(${arch}|ARCH_GENERIC)$") message(FATAL_ERROR "DNNL_BLAS_VENDOR=${DNNL_BLAS_VENDOR} is not supported " "for DNNL_TARGET_ARCH=${DNNL_TARGET_ARCH}.") return() endif() endmacro() # Check chosen DNNL_BLAS_VENDOR is supported and set BLA_VENDOR accordingly set(CBLAS_HEADERS "cblas.h") if(DNNL_BLAS_VENDOR STREQUAL "MKL") expect_arch_or_generic("X64") set(BLA_VENDOR "Intel10_64_dyn") set(CBLAS_HEADERS "mkl_cblas.h") elseif(DNNL_BLAS_VENDOR STREQUAL "OPENBLAS") set(BLA_VENDOR "OpenBLAS") elseif(DNNL_BLAS_VENDOR STREQUAL "ACCELERATE") set(BLA_VENDOR "Apple") set(CBLAS_HEADERS "Accelerate.h") elseif(DNNL_BLAS_VENDOR STREQUAL "ARMPL") set(CBLAS_HEADERS "armpl.h") expect_arch_or_generic("AARCH64") if(DNNL_CPU_RUNTIME STREQUAL "OMP") set(BLA_VENDOR "Arm_mp") else() set(BLA_VENDOR "Arm") endif() endif() find_package(BLAS REQUIRED) if(BLAS_FOUND) set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${BLAS_LINKER_FLAGS}") list(APPEND EXTRA_SHARED_LIBS BLAS::BLAS) # Check that the BLAS library supports the CBLAS interface. set(CMAKE_REQUIRED_LIBRARIES "${BLAS_LINKER_FLAGS};${BLAS_LIBRARIES}") set(CMAKE_REQUIRED_FLAGS "${BLAS_COMPILER_FLAGS}") # Find and include accompanying cblas.h list(GET BLAS_LIBRARIES 0 FIRST_BLAS_LIB) get_filename_component(BLAS_LIB_DIR ${FIRST_BLAS_LIB} PATH) find_path(BLAS_INCLUDE_DIR ${CBLAS_HEADERS} $ENV{CPATH} ${BLAS_LIB_DIR}/../include ${BLAS_LIB_DIR}/../../include) include_directories(${BLAS_INCLUDE_DIR}) # Check we have a working CBLAS interface unset(CBLAS_WORKS CACHE) check_function_exists(cblas_sgemm CBLAS_WORKS) if(NOT CBLAS_WORKS) message(FATAL_ERROR "BLAS library does not support CBLAS interface.") endif() check_function_exists(cblas_sbgemm BLAS_HAS_SBGEMM) if(BLAS_HAS_SBGEMM) add_definitions(-DBLAS_HAS_SBGEMM) endif() message(STATUS "Found CBLAS: ${BLAS_LIBRARIES}") message(STATUS "CBLAS include path: ${BLAS_INCLUDE_DIR}") add_definitions(-DUSE_CBLAS) if (DNNL_BLAS_VENDOR STREQUAL "MKL") add_definitions(-DUSE_MKL) endif() if (DNNL_BLAS_VENDOR STREQUAL "ACCELERATE") add_definitions(-DUSE_ACCELERATE) endif() endif() ```
The Lebombi River is a river of Gabon. It is one of the tributaries of the OgoouΓ©. References Lerique Jacques. 1983. Hydrographie-Hydrologie. in Geographie et Cartographie du Gabon, Atlas IllustrΓ© led by The MinistΓ¨re de l'Education Nationale de la Republique Gabonaise. Pg 14–15. Paris, France: Edicef. Perusset AndrΓ©. 1983. Oro-Hydrographie (Le Relief) in Geographie et Cartographie du Gabon, Atlas IllustrΓ© led by The MinistΓ¨re de l'Education Nationale de la Republique Gabonaise. Pg 10–13. Paris, France: Edicef. Rivers of Gabon
East Main Street Historic District is a national historic district located at Brevard, Transylvania County, North Carolina. It encompasses 14 contributing buildings, 1 contributing structure, and 1 contributing site in a predominantly residential section of Brevard. The district developed between about 1900 and 1959 and includes notable examples of Colonial Revival and Bungalow / American Craftsman style architecture. Located in the district are the separately listed St. Philip's Episcopal Church, Silvermont, William Breese, Jr., House, Charles E. Orr House, Royal and Louise Morrow House, and Max and Claire Brombacher House. Other notable buildings include the Lankford-Cleveland House (c. 1858, 1900), Brevard-Davidson River Presbyterian Church (1956, 1965, 1996), White House (c. 1900), Wyke-Barclay House (1905), and Carrier-Plummer House (1914). It was listed on the National Register of Historic Places in 2009. Gallery References Historic districts on the National Register of Historic Places in North Carolina Colonial Revival architecture in North Carolina Buildings and structures in Transylvania County, North Carolina National Register of Historic Places in Transylvania County, North Carolina
```php <?php /* * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the */ namespace Google\Service\DataLabeling; class your_sha256_hashionMetadata extends \Google\Model { protected $basicConfigType = GoogleCloudDatalabelingV1p2alpha1HumanAnnotationConfig::class; protected $basicConfigDataType = ''; /** * @param GoogleCloudDatalabelingV1p2alpha1HumanAnnotationConfig */ public function setBasicConfig(GoogleCloudDatalabelingV1p2alpha1HumanAnnotationConfig $basicConfig) { $this->basicConfig = $basicConfig; } /** * @return GoogleCloudDatalabelingV1p2alpha1HumanAnnotationConfig */ public function getBasicConfig() { return $this->basicConfig; } } // Adding a class alias for backwards compatibility with the previous class name. class_alias(your_sha256_hashionMetadata::class, your_sha256_hashelVideoObjectDetectionOperationMetadata'); ```
```xml // See LICENSE in the project root for license information. import { PrimaryButton } from '@fluentui/react/lib/Button'; import * as React from 'react'; import { useCallback } from 'react'; import { sendMessageToExtension } from '../Message/toExtension'; import { useAppSelector } from '../store/hooks'; import { useParameterArgs } from '../store/slices/parameter'; export const RunButton = (): JSX.Element => { const commandName: string = useAppSelector((state) => state.parameter.commandName); const formValidateAsync: (() => Promise<boolean>) | undefined = useAppSelector( (state) => state.ui.formValidateAsync ); const args: string[] = useParameterArgs(); const onClickRunButton: () => void = useCallback(async () => { // eslint-disable-next-line no-console console.log('onCLickRun', commandName, formValidateAsync); if (!commandName || !formValidateAsync) { return; } const isValid: boolean = await formValidateAsync(); // eslint-disable-next-line no-console console.log('isValid', isValid); if (isValid) { sendMessageToExtension({ command: 'commandInfo', commandName, args }); } }, [args, commandName, formValidateAsync]); return <PrimaryButton text="Run" onClick={onClickRunButton} allowDisabledFocus />; }; ```
```objective-c /* Ticker.h - esp32 library that calls functions periodically Based on the original work of: The original version is part of the esp8266 core for Arduino environment. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU You should have received a copy of the GNU Lesser General Public Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef TICKER_H #define TICKER_H extern "C" { #include "esp_timer.h" } #include <functional> class Ticker { public: Ticker(); ~Ticker(); typedef void (*callback_with_arg_t)(void *); typedef std::function<void(void)> callback_function_t; void attach(float seconds, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(1000000ULL * seconds, true, _static_callback, this); } void attach_ms(uint64_t milliseconds, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(1000ULL * milliseconds, true, _static_callback, this); } void attach_us(uint64_t micros, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(micros, true, _static_callback, this); } template<typename TArg> void attach(float seconds, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); // C-cast serves two purposes: // static_cast for smaller integer types, // reinterpret_cast + const_cast for pointer types _attach_us(1000000ULL * seconds, true, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } template<typename TArg> void attach_ms(uint64_t milliseconds, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); _attach_us(1000ULL * milliseconds, true, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } template<typename TArg> void attach_us(uint64_t micros, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); _attach_us(micros, true, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } void once(float seconds, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(1000000ULL * seconds, false, _static_callback, this); } void once_ms(uint64_t milliseconds, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(1000ULL * milliseconds, false, _static_callback, this); } void once_us(uint64_t micros, callback_function_t callback) { _callback_function = std::move(callback); _attach_us(micros, false, _static_callback, this); } template<typename TArg> void once(float seconds, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); _attach_us(1000000ULL * seconds, false, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } template<typename TArg> void once_ms(uint64_t milliseconds, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); _attach_us(1000ULL * milliseconds, false, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } template<typename TArg> void once_us(uint64_t micros, void (*callback)(TArg), TArg arg) { static_assert(sizeof(TArg) <= sizeof(void *), "attach() callback argument size must be <= sizeof(void*)"); _attach_us(micros, false, reinterpret_cast<callback_with_arg_t>(callback), reinterpret_cast<void *>(arg)); } void detach(); bool active() const; protected: static void _static_callback(void *arg); callback_function_t _callback_function = nullptr; esp_timer_handle_t _timer; private: void _attach_us(uint64_t micros, bool repeat, callback_with_arg_t callback, void *arg); }; #endif // TICKER_H ```
Eric Hope (17 January 1915 – 2 August 1999) was a British pianist. Born in Stratford-upon-Avon, Warwickshire, of Baltic descent, he was a pupil at Warwick School, 1931-34. He studied piano playing in London under Kathleen Arnold. He died in Nottingham in 1999. Publications Handbook of Piano Playing, Students' Music Library, Dobson Books Ltd, December 1955, Aids to technique: Muscular development exercises for pianists and other instrumentalists, Edwin Ashdown, 1962 Basic Piano Exercises, Edwin Ashdown, 1963 References Obituary in The Portcullis, the Chronicle of Warwick School, October 1999. 1915 births 1999 deaths People educated at Warwick School British classical pianists British conscientious objectors 20th-century British pianists 20th-century British musicians
The Found Centennial 100 is a Canadian six-seat cabin monoplane produced by Found Brothers Aviation. Design and development The Centennial 100 was developed as an improved version of the Found FBA-2. Design work started in October 1966 and the prototype first flew on 7 April 1967. The aircraft is powered by a 290Β hp (216Β kW) Avco Lycoming IO-540-G1D5 engine. Three prototypes and two production aircraft were built and were used to gain certification for the type in July 1968. No further aircraft were built as the company went out of business. Specifications (Centennial) References Citations Bibliography Taylor, John W.R. Jane's All The World's Aircraft 1969-70. London:Jane's Yearbooks, 1969. The Illustrated Encyclopedia of Aircraft (Part Work 1982-1985). London: Orbis Publishing. See also Found Aircraft Found FBA-2 1960s Canadian civil utility aircraft High-wing aircraft Single-engined tractor aircraft Aircraft first flown in 1967
```java /* * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package io.camunda.zeebe.model.bpmn.validation.zeebe; import io.camunda.zeebe.model.bpmn.instance.EventBasedGateway; import io.camunda.zeebe.model.bpmn.instance.EventDefinition; import io.camunda.zeebe.model.bpmn.instance.FlowNode; import io.camunda.zeebe.model.bpmn.instance.IntermediateCatchEvent; import io.camunda.zeebe.model.bpmn.instance.MessageEventDefinition; import io.camunda.zeebe.model.bpmn.instance.SequenceFlow; import io.camunda.zeebe.model.bpmn.instance.SignalEventDefinition; import io.camunda.zeebe.model.bpmn.instance.TimerEventDefinition; import io.camunda.zeebe.model.bpmn.util.ModelUtil; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import org.camunda.bpm.model.xml.validation.ModelElementValidator; import org.camunda.bpm.model.xml.validation.ValidationResultCollector; public class EventBasedGatewayValidator implements ModelElementValidator<EventBasedGateway> { private static final List<Class<? extends EventDefinition>> SUPPORTED_EVENTS = Arrays.asList( TimerEventDefinition.class, MessageEventDefinition.class, SignalEventDefinition.class); private static final String ERROR_UNSUPPORTED_TARGET_NODE = "Event-based gateway must not have an outgoing sequence flow to other elements than message/timer/signal intermediate catch events."; @Override public Class<EventBasedGateway> getElementType() { return EventBasedGateway.class; } @Override public void validate( final EventBasedGateway element, final ValidationResultCollector validationResultCollector) { final Collection<SequenceFlow> outgoingSequenceFlows = element.getOutgoing(); if (outgoingSequenceFlows.size() < 2) { validationResultCollector.addError( 0, "Event-based gateway must have at least 2 outgoing sequence flows."); } final boolean isValid = outgoingSequenceFlows.stream().allMatch(this::isValidOutgoingSequenceFlow); if (!isValid) { validationResultCollector.addError(0, ERROR_UNSUPPORTED_TARGET_NODE); } final List<MessageEventDefinition> messageEventDefinitions = getMessageEventDefinitions(outgoingSequenceFlows).collect(Collectors.toList()); ModelUtil.verifyNoDuplicatedEventDefinition( messageEventDefinitions, error -> validationResultCollector.addError(0, error)); final List<SignalEventDefinition> signalEventDefinitions = getSignalEventDefinitions(outgoingSequenceFlows).collect(Collectors.toList()); ModelUtil.verifyNoDuplicatedEventDefinition( signalEventDefinitions, error -> validationResultCollector.addError(0, error)); if (!succeedingNodesOnlyHaveEventBasedGatewayAsIncomingFlows(element)) { validationResultCollector.addError( 0, "Target elements of an event gateway must not have any additional incoming sequence flows other than that from the event gateway."); } } private boolean isValidOutgoingSequenceFlow(final SequenceFlow flow) { final FlowNode targetNode = flow.getTarget(); if (targetNode instanceof IntermediateCatchEvent) { return isValidEvent((IntermediateCatchEvent) targetNode); } else { return false; } } private boolean isValidEvent(final IntermediateCatchEvent event) { final Collection<EventDefinition> eventDefinitions = event.getEventDefinitions(); if (eventDefinitions.size() != 1) { return false; } else { final EventDefinition eventDefinition = eventDefinitions.iterator().next(); return SUPPORTED_EVENTS.stream() .anyMatch(e -> e.isAssignableFrom(eventDefinition.getClass())); } } private Stream<MessageEventDefinition> getMessageEventDefinitions( final Collection<SequenceFlow> outgoingSequenceFlows) { return outgoingSequenceFlows.stream() .map(SequenceFlow::getTarget) .filter(t -> t instanceof IntermediateCatchEvent) .map(IntermediateCatchEvent.class::cast) .flatMap(e -> e.getEventDefinitions().stream()) .filter(e -> e instanceof MessageEventDefinition) .map(MessageEventDefinition.class::cast); } private Stream<SignalEventDefinition> getSignalEventDefinitions( final Collection<SequenceFlow> outgoingSequenceFlows) { return outgoingSequenceFlows.stream() .map(SequenceFlow::getTarget) .filter(t -> t instanceof IntermediateCatchEvent) .map(IntermediateCatchEvent.class::cast) .flatMap(e -> e.getEventDefinitions().stream()) .filter(e -> e instanceof SignalEventDefinition) .map(SignalEventDefinition.class::cast); } private boolean succeedingNodesOnlyHaveEventBasedGatewayAsIncomingFlows( final EventBasedGateway element) { return element.getSucceedingNodes().stream() .flatMap(flowNode -> flowNode.getPreviousNodes().stream()) .allMatch(element::equals); } } ```
```xml <?xml version="1.0" encoding="utf-8"?> <shape xmlns:android="path_to_url" android:shape="oval"> <size android:width="6dp" android:height="6dp"/> <solid android:color="@android:color/white"/> </shape> ```
```javascript /** * @fileoverview A rule to warn against using arrow functions when they could be * confused with comparisions * @author Jxck <path_to_url */ "use strict"; const astUtils = require("../util/ast-utils.js"); //your_sha256_hash-------------- // Helpers //your_sha256_hash-------------- /** * Checks whether or not a node is a conditional expression. * @param {ASTNode} node - node to test * @returns {boolean} `true` if the node is a conditional expression. */ function isConditional(node) { return node && node.type === "ConditionalExpression"; } //your_sha256_hash-------------- // Rule Definition //your_sha256_hash-------------- module.exports = { meta: { type: "suggestion", docs: { description: "disallow arrow functions where they could be confused with comparisons", category: "ECMAScript 6", recommended: false, url: "path_to_url" }, fixable: "code", schema: [{ type: "object", properties: { allowParens: { type: "boolean" } }, additionalProperties: false }], messages: { confusing: "Arrow function used ambiguously with a conditional expression." } }, create(context) { const config = context.options[0] || {}; const sourceCode = context.getSourceCode(); /** * Reports if an arrow function contains an ambiguous conditional. * @param {ASTNode} node - A node to check and report. * @returns {void} */ function checkArrowFunc(node) { const body = node.body; if (isConditional(body) && !(config.allowParens && astUtils.isParenthesised(sourceCode, body))) { context.report({ node, messageId: "confusing", fix(fixer) { // if `allowParens` is not set to true dont bother wrapping in parens return config.allowParens && fixer.replaceText(node.body, `(${sourceCode.getText(node.body)})`); } }); } } return { ArrowFunctionExpression: checkArrowFunc }; } }; ```
```java /* * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.SQLParserTestCase; /** * Drop domain statement test case. */ public final class DropDomainStatementTestCase extends SQLParserTestCase { } ```
```smalltalk using System.Collections.Concurrent; using System.Dynamic; namespace System.Collections.Generic; /// <summary> /// Extension methods for Dictionary. /// </summary> public static class AbpDictionaryExtensions { /// <summary> /// This method is used to try to get a value in a dictionary if it does exists. /// </summary> /// <typeparam name="T">Type of the value</typeparam> /// <param name="dictionary">The collection object</param> /// <param name="key">Key</param> /// <param name="value">Value of the key (or default value if key not exists)</param> /// <returns>True if key does exists in the dictionary</returns> internal static bool TryGetValue<T>(this IDictionary<string, object> dictionary, string key, out T? value) { object? valueObj; if (dictionary.TryGetValue(key, out valueObj) && valueObj is T) { value = (T)valueObj; return true; } value = default; return false; } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue? GetOrDefault<TKey, TValue>(this Dictionary<TKey, TValue> dictionary, TKey key) where TKey : notnull { TValue? obj; return dictionary.TryGetValue(key, out obj) ? obj : default; } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue? GetOrDefault<TKey, TValue>(this IDictionary<TKey, TValue> dictionary, TKey key) { return dictionary.TryGetValue(key, out var obj) ? obj : default; } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue? GetOrDefault<TKey, TValue>(this IReadOnlyDictionary<TKey, TValue> dictionary, TKey key) { return dictionary.TryGetValue(key, out var obj) ? obj : default; } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue? GetOrDefault<TKey, TValue>(this ConcurrentDictionary<TKey, TValue> dictionary, TKey key) where TKey : notnull { return dictionary.TryGetValue(key, out var obj) ? obj : default; } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <param name="factory">A factory method used to create the value if not found in the dictionary</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue GetOrAdd<TKey, TValue>(this IDictionary<TKey, TValue> dictionary, TKey key, Func<TKey, TValue> factory) { TValue? obj; if (dictionary.TryGetValue(key, out obj)) { return obj; } return dictionary[key] = factory(key); } /// <summary> /// Gets a value from the dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <param name="factory">A factory method used to create the value if not found in the dictionary</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue GetOrAdd<TKey, TValue>(this IDictionary<TKey, TValue> dictionary, TKey key, Func<TValue> factory) { return dictionary.GetOrAdd(key, k => factory()); } /// <summary> /// Gets a value from the concurrent dictionary with given key. Returns default value if can not find. /// </summary> /// <param name="dictionary">Concurrent dictionary to check and get</param> /// <param name="key">Key to find the value</param> /// <param name="factory">A factory method used to create the value if not found in the dictionary</param> /// <typeparam name="TKey">Type of the key</typeparam> /// <typeparam name="TValue">Type of the value</typeparam> /// <returns>Value if found, default if can not found.</returns> public static TValue GetOrAdd<TKey, TValue>(this ConcurrentDictionary<TKey, TValue> dictionary, TKey key, Func<TValue> factory) where TKey : notnull { return dictionary.GetOrAdd(key, k => factory()); } /// <summary> /// Converts a &lt;string,object&gt; dictionary to dynamic object so added and removed at run /// </summary> /// <param name="dictionary">The collection object</param> /// <returns>If value is correct, return ExpandoObject that represents an object</returns> public static dynamic ConvertToDynamicObject(this Dictionary<string, object> dictionary) { var expandoObject = new ExpandoObject(); var expendObjectCollection = (ICollection<KeyValuePair<string, object>>)expandoObject!; foreach (var keyValuePair in dictionary) { expendObjectCollection.Add(keyValuePair); } return expandoObject; } } ```
J. Otis Humphrey (December 30, 1850 – June 14, 1918) was an American lawyer and jurist who served as a United States district judge of the United States District Court for the Southern District of Illinois from 1901 to 1918. Education and career Born in Morgan County, Illinois, Humphrey attended Shurtleff College, and read law to enter the bar in 1880. He was a legal clerk for the Illinois State Office of Railroad and Warehouse Commissioners from 1880 to 1883. He was in private practice in Springfield, Illinois from 1883 to 1897. He was then the United States Attorney for the Southern District of Illinois from 1897 to 1901. Federal judicial service On March 7, 1901, Humphrey was nominated by President William McKinley to a seat on the United States District Court for the Southern District of Illinois vacated by Judge William J. Allen. Humphrey was confirmed by the United States Senate on March 8, 1901, and received his commission the same day. Humphrey served in that capacity until his death on June 14, 1918. References Sources External links 1850 births 1918 deaths United States Attorneys for the Southern District of Illinois Judges of the United States District Court for the Southern District of Illinois United States federal judges appointed by William McKinley 19th-century American politicians United States federal judges admitted to the practice of law by reading law
```swift // // SshApprovalsViewController.swift // MacBox // // Created by Strongbox on 17/07/2023. // import Cocoa class SshApprovalsViewController: NSViewController, NSTableViewDataSource, NSTableViewDelegate { @IBOutlet var tableView: NSTableView! var datasource: [SSHAgentApproval] = [] override func viewDidLoad() { super.viewDidLoad() tableView.register(NSNib(nibNamed: NSNib.Name(GenericAutoLayoutTableViewCell.NibIdentifier.rawValue), bundle: nil), forIdentifier: GenericAutoLayoutTableViewCell.NibIdentifier) datasource = SSHAgentRequestHandler.shared.approvals tableView.dataSource = self tableView.delegate = self } func numberOfRows(in _: NSTableView) -> Int { datasource.count } func tableView(_ tableView: NSTableView, viewFor tableColumn: NSTableColumn?, row: Int) -> NSView? { let cell = tableView.makeView(withIdentifier: GenericAutoLayoutTableViewCell.NibIdentifier, owner: nil) as! GenericAutoLayoutTableViewCell guard let approval = datasource[safe: row] else { return cell } if tableColumn?.identifier.rawValue == "Process" { cell.title.stringValue = String(format: "%@", approval.processName) } else { if case let .timed(time: timestamp) = approval.expiry { cell.title.stringValue = (timestamp as NSDate).friendlyDateTimeStringPrecise } else { cell.title.stringValue = NSLocalizedString("ssh_agent_remember_approval_until_strongbox_quits", comment: "until Strongbox quits") } } return cell } } ```
Tenby Museum and Art Gallery, located in Tenby, Pembrokeshire, South West Wales, is the oldest independent museum in Wales. Established in 1878, the museum has a collection of local geology, biology, archaeological and maritime artifacts. Accompanying the regular exhibitions since 1976 is a collection of images and crafts by local and national artists such as Augustus and Gwen John. The Tenby Museum building is a Grade II listed building. Origins At the beginning of January 1878, a meeting of a like-minded group of would-be trustees of a proposed museum was held at 10 The Norton, Tenby, the home of one of their members, Charles Allen. The group also included Edward Laws (Hons. Secretary), Dr Frederick Dyser, Rev. George Huntington, James T. Hawkesley, E. Rawdon Power, Dr. John G. Lock and Frederick Walker. It was the wish of the group to house and display the collection of valuable geological specimens formerly belonging to the late Rev. Gilbert N. Smith, an amateur archaeologist and geologist, who had been the Rector of nearby Gumfreston. The collection had been purchased by the town of Tenby for Β£100 and was to form the basis of a museum collection together with other promised items of natural history and of scientific books. The trustees wished for these collections to be permanently housed in the town for the benefit of the community. From the beginning, they decided upon a purely local museum with no β€˜cosmopolitan’ collections being accepted. This rule has been adhered to ever since and material unconnected with the area is not collected. The National school building on Castle Hill had recently been vacated-a new school having been built in the centre of the town. In February 1878, the Tenby Town Council, presided over by Alderman Charles Allen, passed a resolution to grant the trustees permission to rent the school building at the cost of one shilling per annum for the purpose of forming a permanent museum. The council had no wish to become involved in any other expenditure and therefore the costs of refurbishing the building and administering a museum were to be met by the trustees. The trustees immediately set about obtaining an estimate for the necessary rebuilding work. Lewis John, a local builder, estimated a total cost of Β£44 12s 11d for initial repair work to the building. His estimate being accepted, John set about repairing the roof, guttering, windows, doorways, and staircases, together with plastering and some redecoration. His work was regarded as highly satisfactory and he was later employed in further work to the building. Some display cases were also purchased at the time. It was obvious that, to provide a community museum, financial help was going to be necessary from within the community itself. Early in February, Edward Laws prepared two hundred letters which were circulated to selected residents of the town, setting out the aims of the proposed museum and outlining the finances required to achieve those aims. Soon, a number of donations were received. E. J. Reed, the Member of Parliament for the Pembroke Boroughs gave Β£100 and wrote to Laws: β€˜I need hardly say that the establishment at Tenby of such a museum as that contemplated would be to me, as to many others, a source of great satisfaction.’ There were other similar favourable responses from the community and elsewhere to the idea of forming a museum collection. Yet, a letter to the Tenby Observer of 3 January 1878 written by one β€˜SGP’ of Bristol (subsequently identified as one Samuel Purchase) indicates a note of caution was being urged with regard to the administration of the proposed museum and the safeguarding of its collection. History 17 June 1976 saw the official opening of the Wilfred Harrison Art Gallery. In 1995, the New Art Gallery was opened at Tenby Museum. In the 20th century, the museum was affiliated to the National Museum and Galleries of Wales and received increasing recognition and support from a number of professional museum organisations. In the 1990s, the museum was the recipient of national awards: the Prince of Wales Award 1993 and the Shoestring Award (Museum of the Year Award 1996) for the UK museum which achieved the most with the least available financial resources. In 2000, national and regional awards were presented for high standards of professional training. On 31 July 2003, Prince Charles visited Tenby and the Tenby Museum and Art Gallery to mark the 125th anniversary of the museum's opening. Notable people associated with the museum William Lyons (1776–1849) β€” lived in Tenby in early 19th century and was a collector and researcher of specimens of natural history. The Lyons shell collection was donated to the Tenby Local Museum by his daughters in 1878. Dr. Frederick Daniel Dyster (1810–1893) β€” a founder member of the museum, he had for many years been interested in marine biology and was the friend and correspondent of T. H. Huxley. Dyster contributed to the early collections of the museum by donating a number of scientific books to the museum's library. Charles Allen (1807–1884) β€” it was at his home at 10 The Norton, Tenby, that the first meeting was held to discuss the formation of a Local Museum for Tenby. Allen came from a well known Pembrokeshire family. After retirement, he was a member of the Town Council and Mayor in 1865 and 1871. Edward Laws (1837–1913) β€” the prime mover in the establishment of a local museum for Tenby and became the first Hon. Secretary of the museum's trustees. Laws was an amateur historian and archaeologist, who was the author of several works including β€˜The History of Little England beyond Wales’, β€˜The church Book of St Mary the Virgin’ and β€˜The Civil War in Pembrokeshire’. In 1877 he had also excavated in the Tenby area with Professor George Rolleston. Professor George Rolleston (1829–1881) β€” Professor of Anatomy and Physiology at Oxford in 1860, he pioneered the teaching of Zoology. He undertook anthropological excavations with Edward Laws in the Tenby area in 1877, and in the following year was invited to perform the opening ceremony of the Tenby Local Museum, which took place on 26 July. Edward Rawdon Bingham Power (1811–1896) β€” a native of Surrey, Power had retired with his family to Tenby after a long career in the civil service in Ceylon. He was a J.P. and served as Mayor of Tenby in 1872. For the remainder of his life, he was one of the most regular members at museum committee meetings and actively supported the museum's interests. He undertook considerable work on behalf of charitable organisations in the town. These included the Tenby Cottage Hospital, of which he was the Hon. Secretary and Treasurer Exhibits The museum features exhibits about local history, geology, archaeology, area maritime history and piracy. The New Gallery features changing exhibits of contemporary art and crafts in various media, and the permanent art collection includes works by such Welsh artists as Gwen and Augustus John, Kyffin Williams, John Piper, Nina Hamnett, Claudia Williams, John Uzzell Edwards and John Knapp Fisher. References Tenby Museums established in 1878 1878 establishments in Wales Museums in Pembrokeshire Art museums and galleries in Wales History museums in Wales Grade II listed buildings in Pembrokeshire Local museums in Wales
St. Mary's Catholic Church is a historic Catholic church at 701 Church in Brenham, Texas. It was built in 1935 and added to the National Register of Historic Places in 1990. See also National Register of Historic Places listings in Washington County, Texas References External links Roman Catholic churches in Texas Churches on the National Register of Historic Places in Texas National Register of Historic Places in Washington County, Texas Renaissance Revival architecture in Texas Churches in Washington County, Texas Roman Catholic churches completed in 1935 Buildings and structures in Brenham, Texas 20th-century Roman Catholic church buildings in the United States