text
stringlengths
1
22.8M
Milton John Helmick (1885–1954) was Attorney General of New Mexico from 1923 to 1925, a judge in Albuquerque from 1925 to 1934, and the judge of the United States Court for China from 1934 to 1943. Early life Milton John Helmick was a native of Colorado. Helmick attended Stanford University and then took a law degree from the University of Denver in 1910. Career Milton John Helmick served as Attorney General of New Mexico from 1923 to 1925 and from 1925 to 1934 as judge for the 2nd District of Albuquerque. In 1934, Helmick was appointed to a 10-year term as the Judge for the United States Court for China in Shanghai, China replacing Milton D. Purdy. On December 8, 1941, Japanese troops occupied the United States consulate in Shanghai where the court was based. Helmick was interned for about half a year before being repatriated to America. His appointment as judge formally came to an end in May 1943 after the Treaty for Relinquishment of Extraterritorial Rights in China was ratified. Helmick returned to China in 1944 to study the new Chinese legal system to prepare for dealing with the system after the defeat of Japan. Helmick then worked for the Standard Vacuum Oil Company in Shanghai from 1945 to 1951. In 1953, Helmick was appointed Judge of the United States Consular Court for Casablanca and Tangiers where he tried one of the few cases of piracy against an American citizen in the 20th Century. Retirement and death Helmick retired in January 1954 and died in San Francisco in October 1954 at the age of 69. Further reading , Vol. 1: ; Vol. 2: ; Vol. 3: References 1954 deaths Judges of the United States Court for China New Mexico Attorneys General 20th-century American judges Stanford University alumni University of Denver alumni 1885 births United States district court judges appointed by Franklin D. Roosevelt
```xml import assert from 'assert'; import AsyncTestUtil, { wait, waitUntil, randomString } from 'async-test-util'; import { schemaObjects, schemas, humansCollection, isNode } from '../../plugins/test-utils/index.mjs'; import { createRxDatabase, randomCouchString, addRxPlugin, RxJsonSchema, ensureNotFalsy, RxLocalDocument, RxCollection } from '../../plugins/core/index.mjs'; import { RxDBLocalDocumentsPlugin } from '../../plugins/local-documents/index.mjs'; addRxPlugin(RxDBLocalDocumentsPlugin); import config, { describeParallel } from './config.ts'; import { filter, first, map } from 'rxjs/operators'; import { firstValueFrom } from 'rxjs'; declare type TestDocType = { foo: string; }; describeParallel('local-documents.test.ts', () => { describe('.insertLocal()', () => { describe('positive', () => { it('should create a local document', async () => { const c = await humansCollection.create(0); const doc = await c.insertLocal('foobar', { foo: 'bar' }); assert.ok(doc); c.database.destroy(); }); it('should not find the doc because its local', async () => { const c = await humansCollection.create(0); await c.insertLocal('foobar', { foo: 'bar' }); const doc2 = await c.findOne().exec(); assert.strictEqual(doc2, null); c.database.destroy(); }); }); describe('negative', () => { it('should throw if already exists', async () => { const c = await humansCollection.create(0); const doc = await c.insertLocal('foobar', { foo: 'bar' }); assert.ok(doc); let thrown = false; try { await c.insertLocal('foobar', { foo: 'bar2' }); } catch (err) { thrown = true; } assert.ok(thrown); c.database.destroy(); }); }); }); describe('.getLocal()', () => { describe('positive', () => { it('should find the document', async () => { const c = await humansCollection.create(0); await c.insertLocal('foobar', { foo: 'bar' }); const doc = await c.getLocal('foobar'); assert.ok(doc); assert.strictEqual(doc.get('foo'), 'bar'); c.database.destroy(); }); it('should find the document twice (doc-cache)', async () => { const c = await humansCollection.create(0); await c.insertLocal('foobar', { foo: 'bar' }); const doc = await c.getLocal('foobar'); const doc2 = await c.getLocal('foobar'); assert.ok(doc); assert.ok(doc === doc2); c.database.destroy(); }); }); describe('negative', () => { it('should not find non-existing', async () => { const c = await humansCollection.create(0); const doc = await c.getLocal('foobar'); assert.strictEqual(doc, null); c.database.destroy(); }); }); }); describe('.$', () => { it('should return the full RxLocaDocument, not just the data', async () => { const c = await humansCollection.create(0); const doc = await c.insertLocal('foobar', { foo: 'bar' }); const emitted: RxLocalDocument<any, any>[] = []; doc.$.subscribe(fullDoc => { emitted.push(fullDoc); }); await waitUntil(() => emitted.length === 1); await doc.incrementalPatch({ foo: 'bar2' }); await waitUntil(() => emitted.length === 2); emitted.forEach(fullDoc => { // ensure it is a full RxLocalDocument instance assert.ok(fullDoc.primary); }); // 2nd must have updated data assert.strictEqual(emitted[1].get('foo'), 'bar2'); c.database.destroy(); }); }); describe('incremental mutation functions', () => { type LocalDocType = { foo: string; added?: string; }; describe('.incrementalPatch()', () => { it('should modify the data', async () => { const c = await humansCollection.create(0); let doc = await c.upsertLocal<LocalDocType>( 'foobar', { foo: 'bar' } ); doc = await doc.incrementalPatch({ added: 'foo' }); assert.strictEqual(doc.get('foo'), 'bar'); assert.strictEqual(doc.get('added'), 'foo'); c.database.destroy(); }); }); describe('.incrementalModify()', () => { it('should modify the data', async () => { const c = await humansCollection.create(0); let doc: RxLocalDocument<RxCollection<any>, LocalDocType> = await c.upsertLocal<LocalDocType>('foobar', { foo: 'bar' }); doc = await doc.incrementalModify(data => { data.added = 'foo'; return data; }); assert.strictEqual(doc.get('foo'), 'bar'); assert.strictEqual(doc.get('added'), 'foo'); c.database.destroy(); }); }); }); describe('.getLocal$()', () => { const id = 'foo'; it('should emit null when not exists', async () => { const c = await humansCollection.create(0); const cData = await c.getLocal$(id).pipe(first()).toPromise(); const dbData = await c.database.getLocal$(id).pipe(first()).toPromise(); assert.strictEqual(cData, null); assert.strictEqual(dbData, null); c.database.destroy(); }); it('should emit the document when exists', async () => { const c = await humansCollection.create(0); await c.insertLocal(id, { foo: 'bar' }); await c.database.insertLocal(id, { foo: 'bar' }); const cDoc = await c.getLocal$(id).pipe(first()).toPromise(); const dbDoc = await c.database.getLocal$(id).pipe(first()).toPromise(); assert.strictEqual(ensureNotFalsy(cDoc).get('foo'), 'bar'); assert.strictEqual(ensureNotFalsy(dbDoc).get('foo'), 'bar'); c.database.destroy(); }); it('collection: should emit again when state changed', async () => { const c = await humansCollection.create(0); const cEmits: any[] = []; const sub = c.getLocal$(id).subscribe((x: any) => { cEmits.push(x ? x.toJSON() : null); }); await waitUntil(() => cEmits.length === 1); assert.strictEqual(cEmits[0], null); // insert await c.insertLocal(id, { foo: 'bar' }); await waitUntil(() => cEmits.length === 2); assert.strictEqual(cEmits[1].data.foo, 'bar'); // update await c.upsertLocal(id, { foo: 'bar2' }); await waitUntil(() => cEmits.length === 3); assert.strictEqual(cEmits[2].data.foo, 'bar2'); sub.unsubscribe(); c.database.destroy(); }); it('database: should emit again when state changed', async () => { const c = await humansCollection.create(0); const db = c.database; const cEmits: any[] = []; const sub = db.getLocal$(id).subscribe((x) => { cEmits.push(x ? x.toJSON() : null); }); await waitUntil(() => cEmits.length === 1); assert.strictEqual(cEmits[0], null); // insert await db.insertLocal(id, { foo: 'bar' }); await waitUntil(() => cEmits.length === 2); assert.strictEqual(cEmits[1].data.foo, 'bar'); // update await db.upsertLocal(id, { foo: 'bar2' }); await waitUntil(() => cEmits.length === 3); assert.strictEqual(cEmits[2].data.foo, 'bar2'); sub.unsubscribe(); c.database.destroy(); }); }); describe('.upsertLocal()', () => { describe('positive', () => { it('should insert when not exists', async () => { const c = await humansCollection.create(0); const doc: RxLocalDocument<any, { foo: string; }> = await c.upsertLocal<{ foo: string; }>('foobar', { foo: 'bar' }); assert.ok(doc); assert.strictEqual(doc.get('foo'), 'bar'); c.database.destroy(); }); it('should update if the document already exists', async () => { const c = await humansCollection.create(0); const doc = await c.upsertLocal('foobar', { foo: 'bar' }); const doc2 = await c.upsertLocal('foobar', { foo: 'bar2' }); assert.strictEqual(doc2.get('foo'), 'bar2'); assert.ok(doc !== doc2); c.database.destroy(); }); /** * @link path_to_url */ it('should invoke subscription once', async () => { const c = await humansCollection.create(0); const emitted: RxLocalDocument<any, { foo: string; }>[] = []; const doc = await c.upsertLocal<{ foo: string; }>('foobar', { foo: 'barOne', }); await wait(50); const docSub = doc.$.subscribe(x => { emitted.push(x as any); }); await waitUntil(() => emitted.length === 1); await c.upsertLocal('foobar', { foo: 'barTwo', }); assert.strictEqual(emitted.length, 2); // first 'barOne' is emitted because.$ is a BehaviorSubject assert.strictEqual(emitted[0].get('foo'), 'barOne'); // second after the change, barTwo is emitted assert.strictEqual(emitted[1].get('foo'), 'barTwo'); docSub.unsubscribe(); c.database.destroy(); }); }); describe('negative', () => { }); }); describe('.remove()', () => { it('should remove the document', async () => { const c = await humansCollection.create(0); const doc = await c.upsertLocal('foobar', { foo: 'bar' }); await doc.remove(); const doc2 = await c.getLocal('foobar'); assert.ok(ensureNotFalsy(doc2).deleted); c.database.destroy(); }); }); describe('with database', () => { it('should be able to use local documents directly on the database', async () => { const c = await humansCollection.create(0); const db = c.database; const doc1 = await db.insertLocal('foobar', { foo: 'bar' }); const doc2 = await db.getLocal('foobar'); assert.strictEqual(doc1, doc2); db.destroy(); }); }); describe('multi-instance', () => { if (!config.storage.hasMultiInstance) { return; } it('should stream events over multi-instance', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), localDocuments: true }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true, localDocuments: true }); const doc1 = await db.insertLocal('foobar', { foo: 'bar' }); let doc2: RxLocalDocument<any, any> | null; await waitUntil(async () => { doc2 = await db2.getLocal('foobar'); return !!doc2; }); await doc1.incrementalPatch({ foo: 'bar2' }); await waitUntil(() => { return ensureNotFalsy(doc2).getLatest().get('foo') === 'bar2'; }, 1000, 50); db.destroy(); db2.destroy(); }); it('should emit deleted', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), localDocuments: true }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true, localDocuments: true }); const doc1 = await db.insertLocal('foobar', { foo: 'bar' }); let doc2: RxLocalDocument<any, any> | null = undefined as any; await waitUntil(async () => { doc2 = await db2.getLocal('foobar'); return !!doc2; }); const hasEmitted = firstValueFrom( ensureNotFalsy(doc2).deleted$ .pipe( map(x => { return x; }), filter(d => d === true), first() ) ); await doc1.remove(); await hasEmitted; db.destroy(); db2.destroy(); }); it('should emit changes (database)', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), localDocuments: true }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true, localDocuments: true }); const doc1 = await db.insertLocal('foobar', { foo: 'bar' }); await doc1.incrementalPatch({ foo: 'bar2' }); await waitUntil(async () => { const doc2 = await db2.getLocal<TestDocType>('foobar'); return doc2 && doc2.toJSON().data.foo === 'bar2'; }); db.destroy(); db2.destroy(); }); it('should emit changes (collection)', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true }); const c1 = await db.addCollections({ humans: { schema: schemas.primaryHuman, localDocuments: true } }); const c2 = await db2.addCollections({ humans: { schema: schemas.primaryHuman, localDocuments: true } }); // insert on instance #1 const doc1 = await c1.humans.insertLocal('foobar', { foo: 'bar' }); const emitted: any[] = []; const sub = c1.humans.getLocal$('foobar').subscribe((x: any) => { emitted.push(x ? x.toJSON(true) : null); }); await waitUntil(() => emitted.length === 1); // update on instance #2 const doc2 = await c2.humans.getLocal<TestDocType>('foobar'); await doc1.incrementalPatch({ foo: 'bar2' }); await waitUntil(() => doc2 && doc2.getLatest().toJSON().data.foo === 'bar2'); await waitUntil(() => { return emitted.length >= 2; }); sub.unsubscribe(); db.destroy(); db2.destroy(); }); it('BUG insertLocal not send to other instance', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), localDocuments: true }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true, localDocuments: true }); const emitted: any[] = []; const sub = db2.getLocal$<TestDocType>('foobar').subscribe(x => { emitted.push(x); }); /** * Before inserting, we must await that the empty result set * was emitted. Otherwise we might miss the initial emit * because creating the db2 can take a long time * on some storages. So not awaiting here would make the test * timing dependent. */ await waitUntil(() => emitted.length === 1); await db.insertLocal<TestDocType>('foobar', { foo: 'bar' }); await waitUntil(() => { return emitted.length === 2; }, 2000, 50); assert.ok(emitted.pop()); const doc = await db2.getLocal<TestDocType>('foobar'); assert.strictEqual(doc && doc.toJSON().data.foo, 'bar'); sub.unsubscribe(); db.destroy(); db2.destroy(); }); it('should not conflict with non-local-doc that has same id', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), localDocuments: true }); const c1 = await db.addCollections({ humans: { schema: schemas.primaryHuman, localDocuments: true } }); const db2 = await createRxDatabase({ name, storage: config.storage.getStorage(), ignoreDuplicate: true, localDocuments: true }); const c2 = await db2.addCollections({ humans: { schema: schemas.primaryHuman, localDocuments: true } }); const docData = schemaObjects.humanData(); docData.passportId = 'foobar'; docData.age = 40; const doc = await c1.humans.insert(docData); const localDoc = await c1.humans.insertLocal('foobar', { foo: 'bar', age: 10 }); let doc2: RxLocalDocument<any, any> | null = undefined as any; await waitUntil(async () => { doc2 = await c2.humans.findOne().exec(); return !!doc2; }); let localDoc2: RxLocalDocument<any, any> | null = undefined as any; await waitUntil(async () => { localDoc2 = await c2.humans.getLocal('foobar'); return !!localDoc2; }); await doc.incrementalPatch({ age: 50 }); await AsyncTestUtil.waitUntil(() => (doc2 as any).getLatest().age === 50); await AsyncTestUtil.wait(20); assert.strictEqual(ensureNotFalsy(localDoc2).get('age'), 10); await localDoc.incrementalPatch({ age: 66, foo: 'bar' }); await AsyncTestUtil.waitUntil(() => ensureNotFalsy(localDoc2).getLatest().get('age') === 66); await AsyncTestUtil.wait(20); assert.strictEqual(ensureNotFalsy(doc2).getLatest().get('age'), 50); db.destroy(); db2.destroy(); }); }); describe('issues', () => { it('#661 LocalDocument Observer field error', async () => { const myCollection = await humansCollection.create(0); await myCollection.upsertLocal( 'foobar', { foo: 'bar' } ); const emitted: any[] = []; const localDoc = await myCollection.getLocal('foobar'); ensureNotFalsy(localDoc).get$('foo').subscribe((val: any) => { emitted.push(val); }); await AsyncTestUtil.waitUntil(() => emitted.length === 1); assert.strictEqual(emitted[0], 'bar'); myCollection.database.destroy(); }); it('#663 Document conflicts with LocalDocument in the same Collection', async () => { const name = randomCouchString(10); const db = await createRxDatabase({ name, storage: config.storage.getStorage(), }); type DocData = { id: string; boundariesGrp: { bndrPlnId: string; bndrPlnNm: string; }[]; }; const boundaryMgmtSchema: RxJsonSchema<DocData> = { version: 0, type: 'object', primaryKey: 'id', properties: { id: { type: 'string', maxLength: 100 }, boundariesGrp: { type: 'array', uniqueItems: false, items: { type: 'object', properties: { bndrPlnId: { type: 'string', }, bndrPlnNm: { type: 'string', } } }, default: [], }, } }; const boundaryMgmtCols = await db.addCollections({ human: { schema: boundaryMgmtSchema, localDocuments: true } }); const boundaryMgmtCol = boundaryMgmtCols.human; const groups = { bndrPlnId: 'mygroup', bndrPlnNm: 'other' }; // insert non-local await boundaryMgmtCol.insert({ id: randomCouchString(12), boundariesGrp: [groups] }); await boundaryMgmtCol.insertLocal('metadata', { userData: {}, selectedBndrPlnId: 'foobar1', actionRev: 0, bndrId: 'foobar2', direction: 'foobar3', }); // save localgrpId const grpId = 'foobar'; const metadata = await boundaryMgmtCol.getLocal('metadata'); await ensureNotFalsy(metadata).incrementalModify(docData => { docData.selectedBndrPlnId = grpId; return docData; }); const data = await boundaryMgmtCol.findOne().exec(true); const json = data.toJSON(); assert.deepStrictEqual(json.boundariesGrp[0], groups); db.destroy(); }); it('local documents not persistent on db restart', async () => { if (!config.storage.hasPersistence) { return; } if (!isNode) { return; } const dbName: string = randomCouchString(10); const localDocId = 'foobar'; const localDocData = { foo: 'bar' }; const db = await createRxDatabase({ name: dbName, storage: config.storage.getStorage(), multiInstance: false, localDocuments: true }); const cols = await db.addCollections({ humans: { schema: schemas.human, localDocuments: true } }); await db.insertLocal(localDocId, localDocData); await cols.humans.insertLocal(localDocId, localDocData); await db.destroy(); const db2 = await createRxDatabase({ name: dbName, storage: config.storage.getStorage(), multiInstance: false, localDocuments: true }); const col2 = await db2.addCollections({ humans: { schema: schemas.human, localDocuments: true } }); const docDb = await db2.getLocal(localDocId); const docCol = await col2.humans.getLocal(localDocId); assert.ok(docDb); assert.ok(docCol); assert.strictEqual(docDb.get('foo'), 'bar'); assert.strictEqual(docCol.get('foo'), 'bar'); await db2.destroy(); }); it('doing many upsertLocal() can cause a 404 document not found', async () => { if (!isNode) { return; } const dbName: string = randomCouchString(10); const db = await createRxDatabase({ name: dbName, storage: config.storage.getStorage(), multiInstance: false, localDocuments: true }); const key = 'foobar'; let doc = await db.getLocal(key); doc = await db.insertLocal(key, { foo: 'bar' }); assert.ok(doc); let t = 0; while (t < 50) { await db.upsertLocal(key, { foo: randomString(10) }); t++; } db.destroy(); }); }); }); ```
```ruby class AddIndexToArticlesCollectionId < ActiveRecord::Migration[6.0] disable_ddl_transaction! def change add_index :articles, :collection_id, algorithm: :concurrently end end ```
John Sham Kin-Fun (born 1952) is a Hong Kong actor and film producer. His English name is sometimes written as John Shum. Whilst known primarily for his comedic acting roles in Hong Kong cinema, he also spent time as a political activist. Biography Shum was educated in Hong Kong, the UK and the US. Upon his return to Hong Kong, he co-founded "City Magazine" with John Chan, and worked as its editor. At the same time, he began working in television and radio. He was also a student activist in the 1970s back in his youth and was a member of a Trotskyist vanguard party the Revolutionary Marxist League. In 1983, he set up the film production company D&B Films, along with Sammo Hung and Dickson Poon. He later founded another film company with John Chan, Maverick Films Ltd. Sham's most prolific period working as an actor was during the 1980s. Of the 45 films he has appeared in, 33 were during this period. Notable appearances include Sammo Hung's Lucky Stars films Winners and Sinners (1983) and Twinkle, Twinkle Lucky Stars (1985), and starring roles alongside comedy partner Richard Ng in the Pom Pom series (1984–1986). Sham is credited as a producer on over 20 films, including Hong Kong 1941 (1984), The Lunatics (1986), Legacy of Rage (1986) and The Banquet (1991). He also worked as an assistant director on the 1987 Michelle Yeoh film Magnificent Warriors and also has credits on a number of other films, with roles such as planning, executive production and presentation. In 1992, Sham co-hosted the Hong Kong Film Awards ceremony. Throughout much of the 1990s, films took a back seat as Shum was heavily involved with the Pro-Democracy movement in Hong Kong. In 2005, he was appointed the Executive Secretary of the Federation of Hong Kong Filmmakers, by the government-sponsored Film Development Committee (FDC). In December 2007, he was enrolled as an executive committee member of the Hong Kong Performing Artistes Guild. His latest role is as executive producer, alongside Jackie Chan, for the film Wushu (2008), which was directed by Antony Szeto and starred Sammo Hung. Filmography Films Yes, Madam 一皇家師姐 (1985), actor A Complicated Story 一個複雜故事 (2013), actor A Simple Life 桃姐 (2011), actor Bodyguards and Assassins 十月圍城 (2009), actor Mr. Cinema 老港正傳 (2007), actor McDull, the Alumni 春田花花同學會(2006), actor New Police Story 新警察故事 (2004), actor The Miracle Box 天作之盒 (2004), actor References External links 1952 births Hong Kong male actors Living people Hong Kong film producers Chinese Trotskyists
```javascript 'use strict' var tape = require('tape') var compiler = require('solc') var astHelper = require('../../src/solidity-decoder/astHelper') var decodeInfo = require('../../src/solidity-decoder/decodeInfo') var stateDecoder = require('../../src/solidity-decoder/stateDecoder') var contracts = require('./contracts/miscContracts') var simplecontracts = require('./contracts/simpleContract') var remixLib = require('remix-lib') var compilerInput = remixLib.helpers.compiler.compilerInput var util = require('../../src/solidity-decoder/types/util') tape('solidity', function (t) { t.test('astHelper, decodeInfo', function (st) { var output = compiler.compile(compilerInput(contracts)) output = JSON.parse(output) var state = astHelper.extractStateDefinitions('test.sol:contractUint', output.sources) var states = astHelper.extractStatesDefinitions(output.sources) var stateDef = state.stateDefinitions var parsedType = decodeInfo.parseType(stateDef[0].attributes.type, states, 'contractUint', util.extractLocationFromAstVariable(stateDef[0])) checkDecodeInfo(st, parsedType, 1, 1, 'uint8') parsedType = decodeInfo.parseType(stateDef[2].attributes.type, states, 'contractUint', util.extractLocationFromAstVariable(stateDef[2])) checkDecodeInfo(st, parsedType, 1, 32, 'uint256') parsedType = decodeInfo.parseType(stateDef[3].attributes.type, states, 'contractUint', util.extractLocationFromAstVariable(stateDef[3])) checkDecodeInfo(st, parsedType, 1, 32, 'uint256') parsedType = decodeInfo.parseType(stateDef[4].attributes.type, states, 'contractUint', util.extractLocationFromAstVariable(stateDef[4])) checkDecodeInfo(st, parsedType, 1, 16, 'bytes16') state = astHelper.extractStateDefinitions('test.sol:contractStructAndArray', output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[1].attributes.type, states, 'contractStructAndArray', util.extractLocationFromAstVariable(stateDef[1])) checkDecodeInfo(st, parsedType, 2, 32, 'struct contractStructAndArray.structDef') parsedType = decodeInfo.parseType(stateDef[2].attributes.type, states, 'contractStructAndArray', util.extractLocationFromAstVariable(stateDef[2])) checkDecodeInfo(st, parsedType, 6, 32, 'struct contractStructAndArray.structDef[3]') parsedType = decodeInfo.parseType(stateDef[3].attributes.type, states, 'contractStructAndArray', util.extractLocationFromAstVariable(stateDef[3])) checkDecodeInfo(st, parsedType, 2, 32, 'bytes12[4]') state = astHelper.extractStateDefinitions('test.sol:contractArray', output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[0].attributes.type, states, 'contractArray', util.extractLocationFromAstVariable(stateDef[0])) checkDecodeInfo(st, parsedType, 1, 32, 'uint32[5]') parsedType = decodeInfo.parseType(stateDef[1].attributes.type, states, 'contractArray', util.extractLocationFromAstVariable(stateDef[1])) checkDecodeInfo(st, parsedType, 1, 32, 'int8[]') parsedType = decodeInfo.parseType(stateDef[2].attributes.type, states, 'contractArray', util.extractLocationFromAstVariable(stateDef[2])) checkDecodeInfo(st, parsedType, 4, 32, 'int16[][3][][4]') state = astHelper.extractStateDefinitions('test.sol:contractEnum', output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[1].attributes.type, states, 'contractEnum') checkDecodeInfo(st, parsedType, 1, 2, 'enum') state = astHelper.extractStateDefinitions('test.sol:contractSmallVariable', output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[0].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[0])) checkDecodeInfo(st, parsedType, 1, 1, 'int8') parsedType = decodeInfo.parseType(stateDef[1].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[1])) checkDecodeInfo(st, parsedType, 1, 1, 'uint8') parsedType = decodeInfo.parseType(stateDef[2].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[2])) checkDecodeInfo(st, parsedType, 1, 2, 'uint16') parsedType = decodeInfo.parseType(stateDef[3].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[3])) checkDecodeInfo(st, parsedType, 1, 4, 'int32') parsedType = decodeInfo.parseType(stateDef[4].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[4])) checkDecodeInfo(st, parsedType, 1, 32, 'uint256') parsedType = decodeInfo.parseType(stateDef[5].attributes.type, states, 'contractSmallVariable', util.extractLocationFromAstVariable(stateDef[5])) checkDecodeInfo(st, parsedType, 1, 2, 'int16') output = compiler.compile(compilerInput(simplecontracts)) output = JSON.parse(output) state = astHelper.extractStateDefinitions('test.sol:simpleContract', output.sources) states = astHelper.extractStatesDefinitions(output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[2].attributes.type, states, 'simpleContract', util.extractLocationFromAstVariable(stateDef[2])) checkDecodeInfo(st, parsedType, 2, 32, 'struct simpleContract.structDef') parsedType = decodeInfo.parseType(stateDef[3].attributes.type, states, 'simpleContract', util.extractLocationFromAstVariable(stateDef[3])) checkDecodeInfo(st, parsedType, 6, 32, 'struct simpleContract.structDef[3]') parsedType = decodeInfo.parseType(stateDef[4].attributes.type, states, 'simpleContract', util.extractLocationFromAstVariable(stateDef[4])) checkDecodeInfo(st, parsedType, 1, 1, 'enum') state = astHelper.extractStateDefinitions('test.sol:test2', output.sources) stateDef = state.stateDefinitions parsedType = decodeInfo.parseType(stateDef[0].attributes.type, states, 'test1', util.extractLocationFromAstVariable(stateDef[0])) checkDecodeInfo(st, parsedType, 1, 32, 'struct test1.str') state = stateDecoder.extractStateVariables('test.sol:test2', output.sources) checkDecodeInfo(st, parsedType, 1, 32, 'struct test1.str') st.end() }) }) function checkDecodeInfo (st, decodeInfo, storageSlots, storageBytes, typeName) { st.equal(decodeInfo.storageSlots, storageSlots) st.equal(decodeInfo.storageBytes, storageBytes) st.equal(decodeInfo.typeName, typeName) } ```
Charles Wolf may refer to: Charles Wolf (astronomer) (1827–1918), French astronomer Charles Wolf (basketball) (born 1926), American basketball coach Charles Wolf Jr. (1924–2016), senior economic advisor at the RAND Corporation Charlie Wolf (born 1959), British radio DJ See also Charles Wolfe (disambiguation) Charles de Wolff (1932–2011), Dutch organist and conductor
The 2018 Antrim Senior Football Championship is the 117th official edition of Antrim GAA's premier club gaelic football tournament for senior clubs in County Antrim. 13 teams compete with the winners receiving the Padraig McNamee Cup and representing Antrim in the Ulster Senior Club Football Championship. The Antrim championship has a straight knock-out format. Naomh Éanna and Gort na Móna returned to the S.F.C. this year after claiming the 2018 I.F.C. and I.F.L. titles respectively. Erin's Own Cargin were the defending champions after they defeated Creggan Kickhams in the 2018 final. St Mary's Ahoghill and St Teresa's were relegated after finishing 11th and 12th respectively in the S.F.L. They were replaced in 2020 by St James' (I.F.C. Champions) and O'Donovan Rossa (I.F.L. Champions). On 12 October 2019, Erin's Own Cargin claimed their 9th S.F.C. and successfully defended their crown when defeating Lámh Dhearg 3-16 to 0-23 after extra time in the final replay at Corrigan Park. Team Changes Promoted from 2018 I.F.C. Naomh Eanna - (I.F.C. Champions) Gort na Móna - (I.F.L. Champions) Relegated to 2019 I.F.C. St Joseph's Glenavy - (12th in S.F.L.) Preliminary round Aghagallon 2-11, 1-8 Naomh Éanna, Glenavy, 17/8/2019, Round 1 7 of the 13 senior clubs play in this round. The 4 winners and the 4 teams who received byes compete in the quarter-finals. The 4 losing teams exit the championship. Lámh Dhearg 3-13, 1-6 Ahoghill, Corrigan Park, 24/8/2019, Aghagallon 2-15, 0-13 St Teresa's, Corrigan Park, 24/8/2019, Roger Casements Portglenone 1-10, 0-9 St Brigid's, Glenravel, 25/8/2019, O'Donovan Rossa 0-16, 0-9 Gort na Móna, Springfield Road, 25/9/2019, Quarter-finals The four 2018 S.F.C. Semi-Finalists received a bye to this years Quarter-Finals. St Gall's 1-16, 1-9 Aghagallon, Creggan, 7/9/2019, Erin's Own Cargin 1-14, 0-12 O'Donovan Rossa, Creggan, 7/9/2019, Roger Casements Portglenone 0-12, 1-9 St John's, Ahoghill, 8/9/2019, Lámh Dhearg 1-13, 0-11 Creggan Kickhams, Glenavy, 8/9/2019, Roger Casements Portglenone 1-12, 2-6 St John's, Coláiste Feirste, 11/9/2019, (Replay), Semi-finals After extra-time had ended in a deadlock, a shootout was required to decide who would proceed to the final. With the score standing at 10-10 in sudden death and Lámh Dhearg forward and former county star Paddy Cunningham about to attempt to put his side back in front, Antrim chairman Ciaran McCavana ran onto the pitch and told him that that would be it for tonight. Another replay was called by the chairman after consenting both managers. Final Ulster Senior Club Football Championship References Antrim SFC Antrim Senior Football Championship Antrim Senior Football Championship
The Diedrich Busch House is a historic building located on the east side of Davenport, Iowa, United States. It was individually listed on the National Register of Historic Places, and as a contributing property in the McClellan Heights Historic District in 1984. Diedrich Busch Diedrich Busch (1827–1893) was born in the town of Hamminkeln in Prussia He was apprenticed to a shoemaker, which became his trade for most of his life. He immigrated to the United States in 1853, landing in New York on July 3, and then made his way to Davenport. Busch was married twice. His first wife died young and then he married Emma Balcke, whose father was the pastor at the German Methodist Episcopal Church. Busch owned and operated a grocery store in the Village of East Davenport and the family lived above the store for several years before this house was built in 1877. Over the years he had made a sizeable investment in real estate on the east side of Davenport where he developed many houses and commercial structures. Architecture While this house sits geographically in the McClellan Heights neighborhood, architecturally it fits more properly in the neighboring Village of East Davenport. East Davenport was an industrial town that began in the 1850s and was annexed into the city of Davenport by the time the decade ended. The house follows a popular Vernacular style of architecture from the mid to late 19th-century Davenport known as the McClelland style. It shows influences from other architectural styles as well, such as the bracketed eaves and the polygonal window bay on the east side that suggests the Italianate style. The round arch windows in the attic is another popular feature in Davenport homes built in this era. The veranda that wraps around from the front of the house to the west side was probably added in the early 20th-century. The same is true for the sunroom in the re-entrant angle between the houses main block and the pavilion on the west side. References Houses completed in 1877 Houses in Davenport, Iowa Vernacular architecture in Iowa Houses on the National Register of Historic Places in Iowa National Register of Historic Places in Davenport, Iowa Individually listed contributing properties to historic districts on the National Register in Iowa 1877 establishments in Iowa
```c++ /* All rights reserved. Use is subject to license terms. This program is free software; you can redistribute it and/or modify This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #include <NdbApi.hpp> #include <NdbOut.hpp> #include <NdbMutex.h> #include "VerifyNdbApi.hpp" NdbMutex* g_pNdbMutexVerify = 0; void VerifyBegin(void) { if(!g_pNdbMutexVerify) { g_pNdbMutexVerify = NdbMutex_Create(); } NdbMutex_Lock(g_pNdbMutexVerify); } void VerifyEnd(void) { NdbMutex_Unlock(g_pNdbMutexVerify); } void CVerifyNdbSchemaOp::VerifyIntError(const int i, const char* szMethod) { VerifyBegin(); ndbout << "NdbSchemaOp::" << szMethod << " returned " << dec << i; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbSchemaCon::VerifyIntError(const int i, const char* szMethod) { VerifyBegin(); ndbout << "NdbSchemaCon::" << szMethod << " returned " << dec << i; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbSchemaCon::VerifyPtrError(void* p, const char* szMethod) { VerifyBegin(); ndbout << "NdbSchemaCon::" << szMethod << " returned " << hex << (Uint32)p; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbRecAttr::VerifyValueError(const int iNull, const char* szMethod) { VerifyBegin(); ndbout << "NdbRecAttr::" << szMethod << " : isNULL() returned " << dec << iNull; ndbout << endl; VerifyEnd(); } void CVerifyNdbOperation::VerifyIntError(const int i, const char* szMethod) { VerifyBegin(); ndbout << "NdbOperation::" << szMethod << " returned " << dec << i; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbOperation::VerifyPtrError(void* p, const char* szMethod) { VerifyBegin(); ndbout << "NdbOperation::" << szMethod << " returned " << hex << (Uint32)p; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbIndexOperation::VerifyIntError(const int i, const char* szMethod) { VerifyBegin(); ndbout << "NdbIndexOperation::" << szMethod << " returned " << dec << i; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbIndexOperation::VerifyPtrError(void* p, const char* szMethod) { VerifyBegin(); ndbout << "NdbIndexOperation::" << szMethod << " returned " << hex << (Uint32)p; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbConnection::VerifyIntError(const int i, const char* szMethod) { VerifyBegin(); ndbout << "NdbConnection::" << szMethod << " returned " << dec << i; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdbConnection::VerifyPtrError(void* p, const char* szMethod) { VerifyBegin(); ndbout << "NdbConnection::" << szMethod << " returned " << hex << (Uint32)p; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdb::VerifyPtrError(void* p, const char* szMethod) { VerifyBegin(); ndbout << "Ndb::" << szMethod << " returned " << hex << (Uint32)p; ndbout << " : " << dec << getNdbError().code << " : " << getNdbError().message << endl; VerifyEnd(); } void CVerifyNdb::VerifyVoidError(const int iCode, const char* szMethod) { VerifyBegin(); ndbout << "Ndb::" << szMethod << " : getNdbError().code returned " << dec << iCode; ndbout << " : " << getNdbError().message << endl; VerifyEnd(); } ```
```objective-c /** * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ /** * This file is generated using the remodel generation script. * The name of the input file is KFVectorFeature.value */ #import "Compatibility.h" #import "KFVectorGradientEffect.h" #import "KFVectorPathTrim.h" @class KFVectorFeature; /** * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ @interface KFVectorFeature : NSObject <NSCopying, NSCoding> @property (nonatomic, readonly, copy) NSString *name; @property (nonatomic, readonly) NSInteger featureId; @property (nonatomic, readonly) CGSize featureSize; @property (nonatomic, readonly) NSInteger animationGroupId; @property (nonatomic, readonly) NSUInteger fromFrame; @property (nonatomic, readonly) NSUInteger toFrame; @property (nonatomic, readonly, copy) UIColor *fillColor; @property (nonatomic, readonly, copy) UIColor *strokeColor; @property (nonatomic, readonly) CGFloat strokeWidth; @property (nonatomic, readonly, copy) NSString *strokeLineCap; @property (nonatomic, readonly, copy) NSArray *keyFrames; @property (nonatomic, readonly, copy) NSArray *timingCurves; @property (nonatomic, readonly, copy) NSArray *featureAnimations; @property (nonatomic, readonly, copy) NSString *backedImage; @property (nonatomic, readonly, copy) KFVectorFeature *masking; @property (nonatomic, readonly, copy) KFVectorGradientEffect *gradientEffect; @property (nonatomic, readonly, copy) KFVectorPathTrim *pathTrim; - (instancetype)initWithName:(NSString *)name featureId:(NSInteger)featureId featureSize:(CGSize)featureSize animationGroupId:(NSInteger)animationGroupId fromFrame:(NSUInteger)fromFrame toFrame:(NSUInteger)toFrame fillColor:(UIColor *)fillColor strokeColor:(UIColor *)strokeColor strokeWidth:(CGFloat)strokeWidth strokeLineCap:(NSString *)strokeLineCap keyFrames:(NSArray *)keyFrames timingCurves:(NSArray *)timingCurves featureAnimations:(NSArray *)featureAnimations backedImage:(NSString *)backedImage masking:(KFVectorFeature *)masking gradientEffect:(KFVectorGradientEffect *)gradientEffect pathTrim:(KFVectorPathTrim *)pathTrim; @end ```
```emacs lisp ;;; tty-colors.el --- color support for character terminals ;; Author: Eli Zaretskii ;; Maintainer: emacs-devel@gnu.org ;; Keywords: terminals, faces ;; This file is part of GNU Emacs. ;; GNU Emacs is free software: you can redistribute it and/or modify ;; (at your option) any later version. ;; GNU Emacs is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; along with GNU Emacs. If not, see <path_to_url ;;; Commentary: ;; Emacs support for colors evolved from the X Window System; color ;; support for character-based terminals came later. Many Lisp ;; packages use color names defined by X and assume the availability ;; of certain functions that look up colors, convert them to pixel ;; values, etc. ;; This file provides a more or less useful emulation of the X color ;; functionality for character-based terminals, and thus relieves the ;; rest of Emacs from including special code for this case. ;; Here's how it works. The support for terminal and MSDOS frames ;; maintains an alist, called `tty-defined-color-alist', which ;; associates colors supported by the terminal driver with small ;; integers. (These small integers are passed to the library ;; functions which set the color, and are effectively indices of the ;; colors in the supported color palette.) When Emacs needs to send a ;; color command to the terminal, the color name is first looked up in ;; `tty-defined-color-alist'. If not found, functions from this file ;; can be used to map the color to one of the supported colors. ;; Specifically, the X RGB values of the requested color are extracted ;; from `color-name-rgb-alist' and then the supported color is found ;; with the minimal distance in the RGB space from the requested ;; color. ;; `tty-defined-color-alist' is created at startup by calling the ;; function `tty-register-default-colors', defined below, which in ;; turn calls `tty-color-define', passing it each supported color, its ;; index, and its RGB values. The standard list of colors supported ;; by many Unix color terminals, including xterm, FreeBSD, and ;; GNU/Linux, is supplied below in `tty-standard-colors'. Some ;; terminal-specific files in lisp/term define their own standard ;; colors. If your terminal supports different or additional colors, ;; call `tty-color-define' from your `.emacs' or `site-start.el'. For ;; more-or-less standard definitions of VGA text-mode colors, see ;; lisp/term/pc-win.el. ;;; Code: ;; The following list is taken from rgb.txt distributed with X. ;; ;; WARNING: Some colors, such as "lightred", do not appear in this ;; list. If you think it's a good idea to add them, don't! The ;; problem is that the X-standard definition of "red" actually ;; corresponds to "lightred" on VGA (that's why pc-win.el and ;; w32-fns.el define "lightred" with the same RGB values as "red" ;; below). Adding "lightred" here would therefore create confusing ;; and counter-intuitive results, like "red" and "lightred" being the ;; same color. A similar situation exists with other "light*" colors. ;; ;; Nevertheless, "lightred" and other similar color names *are* ;; defined for the MS-DOS and MS-Windows consoles, because the users ;; on those systems expect these colors to be available. ;; ;; For these reasons, package maintainers are advised NOT to use color ;; names such as "lightred" or "lightblue", because they will have ;; different effect on different displays. Instead, use "red1" and ;; "blue1", respectively. ;; ;; Note: the RGB values below are in the range 0-65535, but are derived ;; from the standard 8-bit X definitions (so the upper and lower bytes ;; of each value are actually identical). ;; (defconst color-name-rgb-alist '(("snow" 65535 64250 64250) ("ghostwhite" 63736 63736 65535) ("whitesmoke" 62965 62965 62965) ("gainsboro" 56540 56540 56540) ("floralwhite" 65535 64250 61680) ("oldlace" 65021 62965 59110) ("linen" 64250 61680 59110) ("antiquewhite" 64250 60395 55255) ("papayawhip" 65535 61423 54741) ("blanchedalmond" 65535 60395 52685) ("bisque" 65535 58596 50372) ("peachpuff" 65535 56026 47545) ("navajowhite" 65535 57054 44461) ("moccasin" 65535 58596 46517) ("cornsilk" 65535 63736 56540) ("ivory" 65535 65535 61680) ("lemonchiffon" 65535 64250 52685) ("seashell" 65535 62965 61166) ("honeydew" 61680 65535 61680) ("mintcream" 62965 65535 64250) ("azure" 61680 65535 65535) ("aliceblue" 61680 63736 65535) ("lavender" 59110 59110 64250) ("lavenderblush" 65535 61680 62965) ("mistyrose" 65535 58596 57825) ("white" 65535 65535 65535) ("black" 0 0 0) ("darkslategray" 12079 20303 20303) ("darkslategrey" 12079 20303 20303) ("dimgray" 26985 26985 26985) ("dimgrey" 26985 26985 26985) ("slategray" 28784 32896 37008) ("slategrey" 28784 32896 37008) ("lightslategray" 30583 34952 39321) ("lightslategrey" 30583 34952 39321) ("gray" 48830 48830 48830) ("grey" 48830 48830 48830) ("lightgrey" 54227 54227 54227) ("lightgray" 54227 54227 54227) ("midnightblue" 6425 6425 28784) ("navy" 0 0 32896) ("navyblue" 0 0 32896) ("cornflowerblue" 25700 38293 60909) ("darkslateblue" 18504 15677 35723) ("slateblue" 27242 23130 52685) ("mediumslateblue" 31611 26728 61166) ("lightslateblue" 33924 28784 65535) ("mediumblue" 0 0 52685) ("royalblue" 16705 26985 57825) ("blue" 0 0 65535) ("dodgerblue" 7710 37008 65535) ("deepskyblue" 0 49087 65535) ("skyblue" 34695 52942 60395) ("lightskyblue" 34695 52942 64250) ("steelblue" 17990 33410 46260) ("lightsteelblue" 45232 50372 57054) ("lightblue" 44461 55512 59110) ("powderblue" 45232 57568 59110) ("paleturquoise" 44975 61166 61166) ("darkturquoise" 0 52942 53713) ("mediumturquoise" 18504 53713 52428) ("turquoise" 16448 57568 53456) ("cyan" 0 65535 65535) ("lightcyan" 57568 65535 65535) ("cadetblue" 24415 40606 41120) ("mediumaquamarine" 26214 52685 43690) ("aquamarine" 32639 65535 54484) ("darkgreen" 0 25700 0) ("darkolivegreen" 21845 27499 12079) ("darkseagreen" 36751 48316 36751) ("seagreen" 11822 35723 22359) ("mediumseagreen" 15420 46003 29041) ("lightseagreen" 8224 45746 43690) ("palegreen" 39064 64507 39064) ("springgreen" 0 65535 32639) ("lawngreen" 31868 64764 0) ("green" 0 65535 0) ("chartreuse" 32639 65535 0) ("mediumspringgreen" 0 64250 39578) ("greenyellow" 44461 65535 12079) ("limegreen" 12850 52685 12850) ("yellowgreen" 39578 52685 12850) ("forestgreen" 8738 35723 8738) ("olivedrab" 27499 36494 8995) ("darkkhaki" 48573 47031 27499) ("khaki" 61680 59110 35980) ("palegoldenrod" 61166 59624 43690) ("lightgoldenrodyellow" 64250 64250 53970) ("lightyellow" 65535 65535 57568) ("yellow" 65535 65535 0) ("gold" 65535 55255 0) ("lightgoldenrod" 61166 56797 33410) ("goldenrod" 56026 42405 8224) ("darkgoldenrod" 47288 34438 2827) ("rosybrown" 48316 36751 36751) ("indianred" 52685 23644 23644) ("saddlebrown" 35723 17733 4883) ("sienna" 41120 21074 11565) ("peru" 52685 34181 16191) ("burlywood" 57054 47288 34695) ("beige" 62965 62965 56540) ("wheat" 62965 57054 46003) ("sandybrown" 62708 42148 24672) ("tan" 53970 46260 35980) ("chocolate" 53970 26985 7710) ("firebrick" 45746 8738 8738) ("brown" 42405 10794 10794) ("darksalmon" 59881 38550 31354) ("salmon" 64250 32896 29298) ("lightsalmon" 65535 41120 31354) ("orange" 65535 42405 0) ("darkorange" 65535 35980 0) ("coral" 65535 32639 20560) ("lightcoral" 61680 32896 32896) ("tomato" 65535 25443 18247) ("orangered" 65535 17733 0) ("red" 65535 0 0) ("hotpink" 65535 26985 46260) ("deeppink" 65535 5140 37779) ("pink" 65535 49344 52171) ("lightpink" 65535 46774 49601) ("palevioletred" 56283 28784 37779) ("maroon" 45232 12336 24672) ("mediumvioletred" 51143 5397 34181) ("violetred" 53456 8224 37008) ("magenta" 65535 0 65535) ("violet" 61166 33410 61166) ("plum" 56797 41120 56797) ("orchid" 56026 28784 54998) ("mediumorchid" 47802 21845 54227) ("darkorchid" 39321 12850 52428) ("darkviolet" 38036 0 54227) ("blueviolet" 35466 11051 58082) ("purple" 41120 8224 61680) ("mediumpurple" 37779 28784 56283) ("thistle" 55512 49087 55512) ("snow1" 65535 64250 64250) ("snow2" 61166 59881 59881) ("snow3" 52685 51657 51657) ("snow4" 35723 35209 35209) ("seashell1" 65535 62965 61166) ("seashell2" 61166 58853 57054) ("seashell3" 52685 50629 49087) ("seashell4" 35723 34438 33410) ("antiquewhite1" 65535 61423 56283) ("antiquewhite2" 61166 57311 52428) ("antiquewhite3" 52685 49344 45232) ("antiquewhite4" 35723 33667 30840) ("bisque1" 65535 58596 50372) ("bisque2" 61166 54741 47031) ("bisque3" 52685 47031 40606) ("bisque4" 35723 32125 27499) ("peachpuff1" 65535 56026 47545) ("peachpuff2" 61166 52171 44461) ("peachpuff3" 52685 44975 38293) ("peachpuff4" 35723 30583 25957) ("navajowhite1" 65535 57054 44461) ("navajowhite2" 61166 53199 41377) ("navajowhite3" 52685 46003 35723) ("navajowhite4" 35723 31097 24158) ("lemonchiffon1" 65535 64250 52685) ("lemonchiffon2" 61166 59881 49087) ("lemonchiffon3" 52685 51657 42405) ("lemonchiffon4" 35723 35209 28784) ("cornsilk1" 65535 63736 56540) ("cornsilk2" 61166 59624 52685) ("cornsilk3" 52685 51400 45489) ("cornsilk4" 35723 34952 30840) ("ivory1" 65535 65535 61680) ("ivory2" 61166 61166 57568) ("ivory3" 52685 52685 49601) ("ivory4" 35723 35723 33667) ("honeydew1" 61680 65535 61680) ("honeydew2" 57568 61166 57568) ("honeydew3" 49601 52685 49601) ("honeydew4" 33667 35723 33667) ("lavenderblush1" 65535 61680 62965) ("lavenderblush2" 61166 57568 58853) ("lavenderblush3" 52685 49601 50629) ("lavenderblush4" 35723 33667 34438) ("mistyrose1" 65535 58596 57825) ("mistyrose2" 61166 54741 53970) ("mistyrose3" 52685 47031 46517) ("mistyrose4" 35723 32125 31611) ("azure1" 61680 65535 65535) ("azure2" 57568 61166 61166) ("azure3" 49601 52685 52685) ("azure4" 33667 35723 35723) ("slateblue1" 33667 28527 65535) ("slateblue2" 31354 26471 61166) ("slateblue3" 26985 22873 52685) ("slateblue4" 18247 15420 35723) ("royalblue1" 18504 30326 65535) ("royalblue2" 17219 28270 61166) ("royalblue3" 14906 24415 52685) ("royalblue4" 10023 16448 35723) ("blue1" 0 0 65535) ("blue2" 0 0 61166) ("blue3" 0 0 52685) ("blue4" 0 0 35723) ("dodgerblue1" 7710 37008 65535) ("dodgerblue2" 7196 34438 61166) ("dodgerblue3" 6168 29812 52685) ("dodgerblue4" 4112 20046 35723) ("steelblue1" 25443 47288 65535) ("steelblue2" 23644 44204 61166) ("steelblue3" 20303 38036 52685) ("steelblue4" 13878 25700 35723) ("deepskyblue1" 0 49087 65535) ("deepskyblue2" 0 45746 61166) ("deepskyblue3" 0 39578 52685) ("deepskyblue4" 0 26728 35723) ("skyblue1" 34695 52942 65535) ("skyblue2" 32382 49344 61166) ("skyblue3" 27756 42662 52685) ("skyblue4" 19018 28784 35723) ("lightskyblue1" 45232 58082 65535) ("lightskyblue2" 42148 54227 61166) ("lightskyblue3" 36237 46774 52685) ("lightskyblue4" 24672 31611 35723) ("slategray1" 50886 58082 65535) ("slategray2" 47545 54227 61166) ("slategray3" 40863 46774 52685) ("slategray4" 27756 31611 35723) ("lightsteelblue1" 51914 57825 65535) ("lightsteelblue2" 48316 53970 61166) ("lightsteelblue3" 41634 46517 52685) ("lightsteelblue4" 28270 31611 35723) ("lightblue1" 49087 61423 65535) ("lightblue2" 45746 57311 61166) ("lightblue3" 39578 49344 52685) ("lightblue4" 26728 33667 35723) ("lightcyan1" 57568 65535 65535) ("lightcyan2" 53713 61166 61166) ("lightcyan3" 46260 52685 52685) ("lightcyan4" 31354 35723 35723) ("paleturquoise1" 48059 65535 65535) ("paleturquoise2" 44718 61166 61166) ("paleturquoise3" 38550 52685 52685) ("paleturquoise4" 26214 35723 35723) ("cadetblue1" 39064 62965 65535) ("cadetblue2" 36494 58853 61166) ("cadetblue3" 31354 50629 52685) ("cadetblue4" 21331 34438 35723) ("turquoise1" 0 62965 65535) ("turquoise2" 0 58853 61166) ("turquoise3" 0 50629 52685) ("turquoise4" 0 34438 35723) ("cyan1" 0 65535 65535) ("cyan2" 0 61166 61166) ("cyan3" 0 52685 52685) ("cyan4" 0 35723 35723) ("darkslategray1" 38807 65535 65535) ("darkslategray2" 36237 61166 61166) ("darkslategray3" 31097 52685 52685) ("darkslategray4" 21074 35723 35723) ("aquamarine1" 32639 65535 54484) ("aquamarine2" 30326 61166 50886) ("aquamarine3" 26214 52685 43690) ("aquamarine4" 17733 35723 29812) ("darkseagreen1" 49601 65535 49601) ("darkseagreen2" 46260 61166 46260) ("darkseagreen3" 39835 52685 39835) ("darkseagreen4" 26985 35723 26985) ("seagreen1" 21588 65535 40863) ("seagreen2" 20046 61166 38036) ("seagreen3" 17219 52685 32896) ("seagreen4" 11822 35723 22359) ("palegreen1" 39578 65535 39578) ("palegreen2" 37008 61166 37008) ("palegreen3" 31868 52685 31868) ("palegreen4" 21588 35723 21588) ("springgreen1" 0 65535 32639) ("springgreen2" 0 61166 30326) ("springgreen3" 0 52685 26214) ("springgreen4" 0 35723 17733) ("green1" 0 65535 0) ("green2" 0 61166 0) ("green3" 0 52685 0) ("green4" 0 35723 0) ("chartreuse1" 32639 65535 0) ("chartreuse2" 30326 61166 0) ("chartreuse3" 26214 52685 0) ("chartreuse4" 17733 35723 0) ("olivedrab1" 49344 65535 15934) ("olivedrab2" 46003 61166 14906) ("olivedrab3" 39578 52685 12850) ("olivedrab4" 26985 35723 8738) ("darkolivegreen1" 51914 65535 28784) ("darkolivegreen2" 48316 61166 26728) ("darkolivegreen3" 41634 52685 23130) ("darkolivegreen4" 28270 35723 15677) ("khaki1" 65535 63222 36751) ("khaki2" 61166 59110 34181) ("khaki3" 52685 50886 29555) ("khaki4" 35723 34438 20046) ("lightgoldenrod1" 65535 60652 35723) ("lightgoldenrod2" 61166 56540 33410) ("lightgoldenrod3" 52685 48830 28784) ("lightgoldenrod4" 35723 33153 19532) ("lightyellow1" 65535 65535 57568) ("lightyellow2" 61166 61166 53713) ("lightyellow3" 52685 52685 46260) ("lightyellow4" 35723 35723 31354) ("yellow1" 65535 65535 0) ("yellow2" 61166 61166 0) ("yellow3" 52685 52685 0) ("yellow4" 35723 35723 0) ("gold1" 65535 55255 0) ("gold2" 61166 51657 0) ("gold3" 52685 44461 0) ("gold4" 35723 30069 0) ("goldenrod1" 65535 49601 9509) ("goldenrod2" 61166 46260 8738) ("goldenrod3" 52685 39835 7453) ("goldenrod4" 35723 26985 5140) ("darkgoldenrod1" 65535 47545 3855) ("darkgoldenrod2" 61166 44461 3598) ("darkgoldenrod3" 52685 38293 3084) ("darkgoldenrod4" 35723 25957 2056) ("rosybrown1" 65535 49601 49601) ("rosybrown2" 61166 46260 46260) ("rosybrown3" 52685 39835 39835) ("rosybrown4" 35723 26985 26985) ("indianred1" 65535 27242 27242) ("indianred2" 61166 25443 25443) ("indianred3" 52685 21845 21845) ("indianred4" 35723 14906 14906) ("sienna1" 65535 33410 18247) ("sienna2" 61166 31097 16962) ("sienna3" 52685 26728 14649) ("sienna4" 35723 18247 9766) ("burlywood1" 65535 54227 39835) ("burlywood2" 61166 50629 37265) ("burlywood3" 52685 43690 32125) ("burlywood4" 35723 29555 21845) ("wheat1" 65535 59367 47802) ("wheat2" 61166 55512 44718) ("wheat3" 52685 47802 38550) ("wheat4" 35723 32382 26214) ("tan1" 65535 42405 20303) ("tan2" 61166 39578 18761) ("tan3" 52685 34181 16191) ("tan4" 35723 23130 11051) ("chocolate1" 65535 32639 9252) ("chocolate2" 61166 30326 8481) ("chocolate3" 52685 26214 7453) ("chocolate4" 35723 17733 4883) ("firebrick1" 65535 12336 12336) ("firebrick2" 61166 11308 11308) ("firebrick3" 52685 9766 9766) ("firebrick4" 35723 6682 6682) ("brown1" 65535 16448 16448) ("brown2" 61166 15163 15163) ("brown3" 52685 13107 13107) ("brown4" 35723 8995 8995) ("salmon1" 65535 35980 26985) ("salmon2" 61166 33410 25186) ("salmon3" 52685 28784 21588) ("salmon4" 35723 19532 14649) ("lightsalmon1" 65535 41120 31354) ("lightsalmon2" 61166 38293 29298) ("lightsalmon3" 52685 33153 25186) ("lightsalmon4" 35723 22359 16962) ("orange1" 65535 42405 0) ("orange2" 61166 39578 0) ("orange3" 52685 34181 0) ("orange4" 35723 23130 0) ("darkorange1" 65535 32639 0) ("darkorange2" 61166 30326 0) ("darkorange3" 52685 26214 0) ("darkorange4" 35723 17733 0) ("coral1" 65535 29298 22102) ("coral2" 61166 27242 20560) ("coral3" 52685 23387 17733) ("coral4" 35723 15934 12079) ("tomato1" 65535 25443 18247) ("tomato2" 61166 23644 16962) ("tomato3" 52685 20303 14649) ("tomato4" 35723 13878 9766) ("orangered1" 65535 17733 0) ("orangered2" 61166 16448 0) ("orangered3" 52685 14135 0) ("orangered4" 35723 9509 0) ("red1" 65535 0 0) ("red2" 61166 0 0) ("red3" 52685 0 0) ("red4" 35723 0 0) ("deeppink1" 65535 5140 37779) ("deeppink2" 61166 4626 35209) ("deeppink3" 52685 4112 30326) ("deeppink4" 35723 2570 20560) ("hotpink1" 65535 28270 46260) ("hotpink2" 61166 27242 42919) ("hotpink3" 52685 24672 37008) ("hotpink4" 35723 14906 25186) ("pink1" 65535 46517 50629) ("pink2" 61166 43433 47288) ("pink3" 52685 37265 40606) ("pink4" 35723 25443 27756) ("lightpink1" 65535 44718 47545) ("lightpink2" 61166 41634 44461) ("lightpink3" 52685 35980 38293) ("lightpink4" 35723 24415 25957) ("palevioletred1" 65535 33410 43947) ("palevioletred2" 61166 31097 40863) ("palevioletred3" 52685 26728 35209) ("palevioletred4" 35723 18247 23901) ("maroon1" 65535 13364 46003) ("maroon2" 61166 12336 42919) ("maroon3" 52685 10537 37008) ("maroon4" 35723 7196 25186) ("violetred1" 65535 15934 38550) ("violetred2" 61166 14906 35980) ("violetred3" 52685 12850 30840) ("violetred4" 35723 8738 21074) ("magenta1" 65535 0 65535) ("magenta2" 61166 0 61166) ("magenta3" 52685 0 52685) ("magenta4" 35723 0 35723) ("orchid1" 65535 33667 64250) ("orchid2" 61166 31354 59881) ("orchid3" 52685 26985 51657) ("orchid4" 35723 18247 35209) ("plum1" 65535 48059 65535) ("plum2" 61166 44718 61166) ("plum3" 52685 38550 52685) ("plum4" 35723 26214 35723) ("mediumorchid1" 57568 26214 65535) ("mediumorchid2" 53713 24415 61166) ("mediumorchid3" 46260 21074 52685) ("mediumorchid4" 31354 14135 35723) ("darkorchid1" 49087 15934 65535) ("darkorchid2" 45746 14906 61166) ("darkorchid3" 39578 12850 52685) ("darkorchid4" 26728 8738 35723) ("purple1" 39835 12336 65535) ("purple2" 37265 11308 61166) ("purple3" 32125 9766 52685) ("purple4" 21845 6682 35723) ("mediumpurple1" 43947 33410 65535) ("mediumpurple2" 40863 31097 61166) ("mediumpurple3" 35209 26728 52685) ("mediumpurple4" 23901 18247 35723) ("thistle1" 65535 57825 65535) ("thistle2" 61166 53970 61166) ("thistle3" 52685 46517 52685) ("thistle4" 35723 31611 35723) ("gray0" 0 0 0) ("grey0" 0 0 0) ("gray1" 771 771 771) ("grey1" 771 771 771) ("gray2" 1285 1285 1285) ("grey2" 1285 1285 1285) ("gray3" 2056 2056 2056) ("grey3" 2056 2056 2056) ("gray4" 2570 2570 2570) ("grey4" 2570 2570 2570) ("gray5" 3341 3341 3341) ("grey5" 3341 3341 3341) ("gray6" 3855 3855 3855) ("grey6" 3855 3855 3855) ("gray7" 4626 4626 4626) ("grey7" 4626 4626 4626) ("gray8" 5140 5140 5140) ("grey8" 5140 5140 5140) ("gray9" 5911 5911 5911) ("grey9" 5911 5911 5911) ("gray10" 6682 6682 6682) ("grey10" 6682 6682 6682) ("gray11" 7196 7196 7196) ("grey11" 7196 7196 7196) ("gray12" 7967 7967 7967) ("grey12" 7967 7967 7967) ("gray13" 8481 8481 8481) ("grey13" 8481 8481 8481) ("gray14" 9252 9252 9252) ("grey14" 9252 9252 9252) ("gray15" 9766 9766 9766) ("grey15" 9766 9766 9766) ("gray16" 10537 10537 10537) ("grey16" 10537 10537 10537) ("gray17" 11051 11051 11051) ("grey17" 11051 11051 11051) ("gray18" 11822 11822 11822) ("grey18" 11822 11822 11822) ("gray19" 12336 12336 12336) ("grey19" 12336 12336 12336) ("gray20" 13107 13107 13107) ("grey20" 13107 13107 13107) ("gray21" 13878 13878 13878) ("grey21" 13878 13878 13878) ("gray22" 14392 14392 14392) ("grey22" 14392 14392 14392) ("gray23" 15163 15163 15163) ("grey23" 15163 15163 15163) ("gray24" 15677 15677 15677) ("grey24" 15677 15677 15677) ("gray25" 16448 16448 16448) ("grey25" 16448 16448 16448) ("gray26" 16962 16962 16962) ("grey26" 16962 16962 16962) ("gray27" 17733 17733 17733) ("grey27" 17733 17733 17733) ("gray28" 18247 18247 18247) ("grey28" 18247 18247 18247) ("gray29" 19018 19018 19018) ("grey29" 19018 19018 19018) ("gray30" 19789 19789 19789) ("grey30" 19789 19789 19789) ("gray31" 20303 20303 20303) ("grey31" 20303 20303 20303) ("gray32" 21074 21074 21074) ("grey32" 21074 21074 21074) ("gray33" 21588 21588 21588) ("grey33" 21588 21588 21588) ("gray34" 22359 22359 22359) ("grey34" 22359 22359 22359) ("gray35" 22873 22873 22873) ("grey35" 22873 22873 22873) ("gray36" 23644 23644 23644) ("grey36" 23644 23644 23644) ("gray37" 24158 24158 24158) ("grey37" 24158 24158 24158) ("gray38" 24929 24929 24929) ("grey38" 24929 24929 24929) ("gray39" 25443 25443 25443) ("grey39" 25443 25443 25443) ("gray40" 26214 26214 26214) ("grey40" 26214 26214 26214) ("gray41" 26985 26985 26985) ("grey41" 26985 26985 26985) ("gray42" 27499 27499 27499) ("grey42" 27499 27499 27499) ("gray43" 28270 28270 28270) ("grey43" 28270 28270 28270) ("gray44" 28784 28784 28784) ("grey44" 28784 28784 28784) ("gray45" 29555 29555 29555) ("grey45" 29555 29555 29555) ("gray46" 30069 30069 30069) ("grey46" 30069 30069 30069) ("gray47" 30840 30840 30840) ("grey47" 30840 30840 30840) ("gray48" 31354 31354 31354) ("grey48" 31354 31354 31354) ("gray49" 32125 32125 32125) ("grey49" 32125 32125 32125) ("gray50" 32639 32639 32639) ("grey50" 32639 32639 32639) ("gray51" 33410 33410 33410) ("grey51" 33410 33410 33410) ("gray52" 34181 34181 34181) ("grey52" 34181 34181 34181) ("gray53" 34695 34695 34695) ("grey53" 34695 34695 34695) ("gray54" 35466 35466 35466) ("grey54" 35466 35466 35466) ("gray55" 35980 35980 35980) ("grey55" 35980 35980 35980) ("gray56" 36751 36751 36751) ("grey56" 36751 36751 36751) ("gray57" 37265 37265 37265) ("grey57" 37265 37265 37265) ("gray58" 38036 38036 38036) ("grey58" 38036 38036 38036) ("gray59" 38550 38550 38550) ("grey59" 38550 38550 38550) ("gray60" 39321 39321 39321) ("grey60" 39321 39321 39321) ("gray61" 40092 40092 40092) ("grey61" 40092 40092 40092) ("gray62" 40606 40606 40606) ("grey62" 40606 40606 40606) ("gray63" 41377 41377 41377) ("grey63" 41377 41377 41377) ("gray64" 41891 41891 41891) ("grey64" 41891 41891 41891) ("gray65" 42662 42662 42662) ("grey65" 42662 42662 42662) ("gray66" 43176 43176 43176) ("grey66" 43176 43176 43176) ("gray67" 43947 43947 43947) ("grey67" 43947 43947 43947) ("gray68" 44461 44461 44461) ("grey68" 44461 44461 44461) ("gray69" 45232 45232 45232) ("grey69" 45232 45232 45232) ("gray70" 46003 46003 46003) ("grey70" 46003 46003 46003) ("gray71" 46517 46517 46517) ("grey71" 46517 46517 46517) ("gray72" 47288 47288 47288) ("grey72" 47288 47288 47288) ("gray73" 47802 47802 47802) ("grey73" 47802 47802 47802) ("gray74" 48573 48573 48573) ("grey74" 48573 48573 48573) ("gray75" 49087 49087 49087) ("grey75" 49087 49087 49087) ("gray76" 49858 49858 49858) ("grey76" 49858 49858 49858) ("gray77" 50372 50372 50372) ("grey77" 50372 50372 50372) ("gray78" 51143 51143 51143) ("grey78" 51143 51143 51143) ("gray79" 51657 51657 51657) ("grey79" 51657 51657 51657) ("gray80" 52428 52428 52428) ("grey80" 52428 52428 52428) ("gray81" 53199 53199 53199) ("grey81" 53199 53199 53199) ("gray82" 53713 53713 53713) ("grey82" 53713 53713 53713) ("gray83" 54484 54484 54484) ("grey83" 54484 54484 54484) ("gray84" 54998 54998 54998) ("grey84" 54998 54998 54998) ("gray85" 55769 55769 55769) ("grey85" 55769 55769 55769) ("gray86" 56283 56283 56283) ("grey86" 56283 56283 56283) ("gray87" 57054 57054 57054) ("grey87" 57054 57054 57054) ("gray88" 57568 57568 57568) ("grey88" 57568 57568 57568) ("gray89" 58339 58339 58339) ("grey89" 58339 58339 58339) ("gray90" 58853 58853 58853) ("grey90" 58853 58853 58853) ("gray91" 59624 59624 59624) ("grey91" 59624 59624 59624) ("gray92" 60395 60395 60395) ("grey92" 60395 60395 60395) ("gray93" 60909 60909 60909) ("grey93" 60909 60909 60909) ("gray94" 61680 61680 61680) ("grey94" 61680 61680 61680) ("gray95" 62194 62194 62194) ("grey95" 62194 62194 62194) ("gray96" 62965 62965 62965) ("grey96" 62965 62965 62965) ("gray97" 63479 63479 63479) ("grey97" 63479 63479 63479) ("gray98" 64250 64250 64250) ("grey98" 64250 64250 64250) ("gray99" 64764 64764 64764) ("grey99" 64764 64764 64764) ("gray100" 65535 65535 65535) ("grey100" 65535 65535 65535) ("darkgrey" 43433 43433 43433) ("darkgray" 43433 43433 43433) ("darkblue" 0 0 35723) ("darkcyan" 0 35723 35723) ; no "lightmagenta", see comment above ("darkmagenta" 35723 0 35723) ("darkred" 35723 0 0) ; but no "lightred", see comment above ("lightgreen" 37008 61166 37008)) "An alist of X color names and associated 16-bit RGB values.") (defconst tty-standard-colors '(("black" 0 0 0 0) ("red" 1 65535 0 0) ("green" 2 0 65535 0) ("yellow" 3 65535 65535 0) ("blue" 4 0 0 65535) ("magenta" 5 65535 0 65535) ("cyan" 6 0 65535 65535) ("white" 7 65535 65535 65535)) "An alist of 8 standard tty colors, their indices and RGB values.") ;; This is used by term.c (defconst tty-color-mode-alist '((never . -1) (no . -1) (default . 0) (auto . 0) (ansi8 . 8) (always . 8) (yes . 8)) "An alist of supported standard tty color modes and their aliases.") (defun tty-color-alist (&optional _frame) "Return an alist of colors supported by FRAME's terminal. FRAME defaults to the selected frame. Each element of the returned alist is of the form: (NAME INDEX R G B) where NAME is the name of the color, a string; INDEX is the index of this color to be sent to the terminal driver when the color should be displayed; it is typically a small integer; R, G, and B are the intensities of, accordingly, red, green, and blue components of the color, represented as numbers between 0 and 65535. The file `etc/rgb.txt' in the Emacs distribution lists the standard RGB values of the X colors. If RGB is nil, this color will not be considered by `tty-color-translate' as an approximation to another color." tty-defined-color-alist) (defun tty-modify-color-alist (elt &optional frame) "Put the association ELT into the alist of terminal colors for FRAME. ELT should be of the form (NAME INDEX R G B) (see `tty-color-alist' for details). If the association for NAME already exists in the color alist, it is modified to specify (INDEX R G B) as its cdr. Otherwise, ELT is appended to the end of the color alist. If FRAME is unspecified or nil, it defaults to the selected frame. Value is the modified color alist for FRAME." (let* ((entry (assoc (car elt) (tty-color-alist frame)))) (if entry (setcdr entry (cdr elt)) ;; Keep the colors in the order they are registered. (setq entry (list (append (list (car elt) (cadr elt)) (copy-sequence (cddr elt))))) (setq tty-defined-color-alist (nconc tty-defined-color-alist entry))) tty-defined-color-alist)) (defun tty-register-default-colors () "Register the default set of colors for a character terminal." (let* ((colors tty-standard-colors) (color (car colors))) (while colors (tty-color-define (car color) (cadr color) (cddr color)) (setq colors (cdr colors) color (car colors))) ;; Modifying color mappings means realized faces don't use the ;; right colors, so clear them, if we modified colors on a TTY ;; frame. (or (display-graphic-p) (clear-face-cache)))) (defun tty-color-canonicalize (color) "Return COLOR in canonical form. A canonicalized color name is all-lower case, with any blanks removed." (let ((case-fold-search nil)) (if (string-match "[A-Z ]" color) (replace-regexp-in-string " +" "" (downcase color)) color))) (defun tty-color-24bit (rgb) "Return pixel value on 24-bit terminals. Return nil if RGB is nil or not on 24-bit terminal." (when (and rgb (= (display-color-cells) 16777216)) (let ((r (lsh (car rgb) -8)) (g (lsh (cadr rgb) -8)) (b (lsh (nth 2 rgb) -8))) (logior (lsh r 16) (lsh g 8) b)))) (defun tty-color-define (name index &optional rgb frame) "Specify a tty color by its NAME, terminal INDEX and RGB values. NAME is a string, INDEX is typically a small integer used to send to the terminal driver a command to switch this color on, and RGB is a list of 3 numbers that specify the intensity of red, green, and blue components of the color. If specified, each one of the RGB components must be a number between 0 and 65535. If RGB is omitted, the specified color will never be used by `tty-color-translate' as an approximation to another color. FRAME is the frame where the defined color should be used. If FRAME is not specified or is nil, it defaults to the selected frame." (if (or (not (stringp name)) (not (integerp index)) (and rgb (or (not (listp rgb)) (/= (length rgb) 3)))) (error "Invalid specification for tty color \"%s\"" name)) (tty-modify-color-alist (append (list (tty-color-canonicalize name) (or (tty-color-24bit rgb) index)) rgb) frame)) (defun tty-color-clear (&optional _frame) "Clear the list of supported tty colors for frame FRAME. If FRAME is unspecified or nil, it defaults to the selected frame." (setq tty-defined-color-alist nil)) (defun tty-color-off-gray-diag (r g b) "Compute the angle between the color given by R,G,B and the gray diagonal. The gray diagonal is the diagonal of the 3D cube in RGB space which connects the points corresponding to the black and white colors. All the colors whose RGB coordinates belong to this diagonal are various shades of gray, thus the name." (let ((mag (sqrt (* 3 (+ (* r r) (* g g) (* b b)))))) (if (< mag 1) 0 (acos (/ (+ r g b) mag))))) (defun tty-color-approximate (rgb &optional frame) "Find the color in `tty-color-alist' that best approximates RGB. Value is a list of the form (NAME INDEX R G B). The argument RGB should be an rgb value, that is, a list of three integers in the 0..65535 range. FRAME defaults to the selected frame." (let* ((color-list (tty-color-alist frame)) (candidate (car color-list)) (best-distance 195076) ;; 3 * 255^2 + 15 (r (ash (car rgb) -8)) (g (ash (cadr rgb) -8)) (b (ash (nth 2 rgb) -8)) best-color) (while candidate (let ((try-rgb (cddr candidate)) ;; If the approximated color is not close enough to the ;; gray diagonal of the RGB cube, favor non-gray colors. ;; (The number 0.065 is an empirical ad-hoc'ery.) (favor-non-gray (>= (tty-color-off-gray-diag r g b) 0.065)) try-r try-g try-b dif-r dif-g dif-b dist) ;; If the RGB values of the candidate color are unknown, we ;; never consider it for approximating another color. (if try-rgb (progn (setq try-r (lsh (car try-rgb) -8) try-g (lsh (cadr try-rgb) -8) try-b (lsh (nth 2 try-rgb) -8)) (setq dif-r (- r try-r) dif-g (- g try-g) dif-b (- b try-b)) (setq dist (+ (* dif-r dif-r) (* dif-g dif-g) (* dif-b dif-b))) (if (and (< dist best-distance) ;; The candidate color is on the gray diagonal ;; if its RGB components are all equal. (or (/= try-r try-g) (/= try-g try-b) (not favor-non-gray))) (setq best-distance dist best-color candidate))))) (setq color-list (cdr color-list)) (setq candidate (car color-list))) best-color)) (defun tty-color-standard-values (color) "Return standard RGB values of the color COLOR. The result is a list of integer RGB values--(RED GREEN BLUE). These values range from 0 to 65535; white is (65535 65535 65535). The returned value reflects the standard X definition of COLOR, regardless of whether the terminal can display it, so the return value should be the same regardless of what display is being used." (let ((len (length color))) (cond ((and (>= len 4) ;; X-style "#XXYYZZ" color spec (eq (aref color 0) ?#) (member (aref color 1) '(?0 ?1 ?2 ?3 ?4 ?5 ?6 ?7 ?8 ?9 ?a ?b ?c ?d ?e ?f))) ;; Translate the string "#XXYYZZ" into a list ;; of numbers (XX YY ZZ). If the primary colors ;; are specified with less than 4 hex digits, ;; the used digits represent the most significant ;; bits of the value (e.g. #XYZ = #X000Y000Z000). (let* ((ndig (/ (- len 1) 3)) (i1 1) (i2 (+ i1 ndig)) (i3 (+ i2 ndig))) (list (lsh (string-to-number (substring color i1 i2) 16) (* 4 (- 4 ndig))) (lsh (string-to-number (substring color i2 i3) 16) (* 4 (- 4 ndig))) (lsh (string-to-number (substring color i3) 16) (* 4 (- 4 ndig)))))) ((and (>= len 9) ;; X-style RGB:xx/yy/zz color spec (string= (substring color 0 4) "rgb:")) ;; Translate the string "RGB:XX/YY/ZZ" into a list ;; of numbers (XX YY ZZ). If fewer than 4 hex ;; digits are used, they represent the fraction ;; of the maximum value (RGB:X/Y/Z = #XXXXYYYYZZZZ). (let* ((ndig (/ (- len 3) 3)) (maxval (1- (ash 1 (* 4 (- ndig 1))))) (i1 4) (i2 (+ i1 ndig)) (i3 (+ i2 ndig))) (list (/ (* (string-to-number (substring color i1 (- i2 1)) 16) 255) maxval) (/ (* (string-to-number (substring color i2 (- i3 1)) 16) 255) maxval) (/ (* (string-to-number (substring color i3) 16) 255) maxval)))) (t (cdr (assoc color color-name-rgb-alist)))))) (defun tty-color-translate (color &optional frame) "Given a color COLOR, return the index of the corresponding TTY color. COLOR must be a string that is either the color's name, or its X-style specification like \"#RRGGBB\" or \"RGB:rr/gg/bb\", where each primary. color can be given with 1 to 4 hex digits. If COLOR is a color name that is found among supported colors in `tty-color-alist', the associated index is returned. Otherwise, the RGB values of the color, either as given by the argument or from looking up the name in `color-name-rgb-alist', are used to find the supported color that is the best approximation for COLOR in the RGB space. If COLOR is neither a valid X RGB specification of the color, nor a name of a color in `color-name-rgb-alist', the returned value is nil. If FRAME is unspecified or nil, it defaults to the selected frame." (cadr (tty-color-desc color frame))) (defun tty-color-by-index (idx &optional frame) "Given a numeric index of a tty color, return its description. FRAME, if unspecified or nil, defaults to the selected frame. Value is a list of the form (NAME INDEX R G B)." (and idx (let ((colors (tty-color-alist frame)) desc found) (while colors (setq desc (car colors)) (if (eq idx (car (cdr desc))) (setq found desc)) (setq colors (cdr colors))) found))) (defun tty-color-values (color &optional frame) "Return RGB values of the color COLOR on a termcap frame FRAME. If COLOR is not directly supported by the display, return the RGB values for a supported color that is its best approximation. The value is a list of integer RGB values--(RED GREEN BLUE). These values range from 0 to 65535; white is (65535 65535 65535). If FRAME is omitted or nil, use the selected frame." (cddr (tty-color-desc color frame))) (defun tty-color-desc (color &optional frame) "Return the description of the color COLOR for a character terminal. Value is a list of the form (NAME INDEX R G B). The returned NAME or RGB value may not be the same as the argument COLOR, because the latter might need to be approximated if it is not supported directly." (and (stringp color) (let ((color (tty-color-canonicalize color))) (or (assoc color (tty-color-alist frame)) (let ((rgb (tty-color-standard-values color))) (and rgb (let ((pixel (tty-color-24bit rgb))) (or (and pixel (cons color (cons pixel rgb))) (tty-color-approximate rgb frame))))))))) (defun tty-color-gray-shades (&optional display) "Return the number of gray colors supported by DISPLAY's terminal. A color is considered gray if the 3 components of its RGB value are equal." (let* ((frame (if (framep display) display ;; FIXME: this uses an arbitrary frame from DISPLAY! (car (frames-on-display-list display)))) (colors (tty-color-alist frame)) (count 0) desc r g b) (while colors (setq desc (cddr (car colors)) r (car desc) g (cadr desc) b (car (cddr desc))) (and (numberp r) (eq r g) (eq g b) (setq count (1+ count))) (setq colors (cdr colors))) count)) (provide 'term/tty-colors) ;;; tty-colors.el ends here ```
Castello Piccolomini (Italian for Piccolomini castle) is a Middle Ages-Renaissance castle in Balsorano, Province of L'Aquila (Abruzzo). History The castle was built by Antonio Piccolomini, nephew of pope Pius II, in 1460. Now it is an hotel and it has been used as location for several Italian films. Architecture The castle has an irregular pentagonal plan with circular towers at each corner. The building is in stone and the main entrance is through the surrounding park. The internal courtyard is L shaped with a well in the middle. References External links Piccolomini (Balsorano) Balsorano
```xml /*! */ import type { User } from '@nextcloud/cypress' import { createShare } from './FilesSharingUtils.ts' import { getRowForFile } from '../files/FilesUtils.ts' describe('files_sharing: Files view', { testIsolation: true }, () => { let user: User let sharee: User beforeEach(() => { cy.createRandomUser().then(($user) => { user = $user }) cy.createRandomUser().then(($user) => { sharee = $user }) }) /** * Regression test of path_to_url */ it('opens a shared folder when clicking on it', () => { cy.mkdir(user, '/folder') cy.uploadContent(user, new Blob([]), 'text/plain', '/folder/file') cy.login(user) cy.visit('/apps/files') // share the folder createShare('folder', sharee.userId, { read: true, download: true }) // visit the own shares cy.visit('/apps/files/sharingout') // see the shared folder getRowForFile('folder').should('be.visible') // click on the folder should open it in files getRowForFile('folder').findByRole('button', { name: /open in files/i }).click() // See the URL has changed cy.url().should('match', /apps\/files\/files\/.+dir=\/folder/) // Content of the shared folder getRowForFile('file').should('be.visible') cy.logout() // Now for the sharee cy.login(sharee) // visit shared files view cy.visit('/apps/files/sharingin') // see the shared folder getRowForFile('folder').should('be.visible') // click on the folder should open it in files getRowForFile('folder').findByRole('button', { name: /open in files/i }).click() // See the URL has changed cy.url().should('match', /apps\/files\/files\/.+dir=\/folder/) // Content of the shared folder getRowForFile('file').should('be.visible') }) }) ```
Chimay Brewery (Brasserie de Chimay) is a brewery at Scourmont Abbey, a Trappist monastery in Chimay, Hainaut, Belgium, one of the thirteen breweries worldwide that produce Trappist beer. They make four ales: Chimay Rouge, Chimay Bleue, Chimay Blanche, and Chimay 150; and one patersbier for the monks. The monastery also makes four varieties of cheese. Brewery The brewery was founded inside Scourmont Abbey, in the Belgian municipality of Chimay in 1862. The brewery produces four ales as well as a patersbier for the monks themselves which is occasionally sold as Chimay Gold; they are known as Trappist beers because they are made in a Trappist monastery. It was the first brewery to use the Trappist Ale designation on its labels. As with all other Trappist breweries, the beer is sold only for financial support of the monastery and good causes. The brewery business pays rent for use of the property within the abbey, which is used to support the monastic community. The majority of the profit from the sale of the beer is distributed to charities and for community development around the region. As of 2007, sales figures for Chimay products exceeded $50 million per year. The water for the beers is drawn from a well located inside the monastery walls. The filtered solids from the beer mash are recycled into livestock feed which is given to the same cows that produce the milk for Chimay cheeses. The beer is transported from the monastery to the bottling plant 12 km away, which can fill 40,000 bottles per hour, of which many are returns. The beer is then refermented in the bottle for three weeks before being shipped around the world. 50% of Chimay beer production is sold on the export markets. The brewing plant was updated in 1988, and as of 2005 produced 12 megalitres annually. Beers The ingredients are: water, malted barley, wheat starch, sugar, hop extract and yeast; malt extract is used in Rouge and Bleue for colouring. Chimay Red, 7% ABV. In the 75 cl bottle, it is known as Première. It is a dark brown colour dubbel and has a sweet, fruity aroma. Chimay Blue, 9% ABV darker ale. In the 75 cl bottle, it is known as Grande Réserve. This copper-brown beer has a light creamy head and a slightly bitter taste. Considered to be the "classic" Chimay ale, it exhibits a considerable depth of fruity, peppery character. Chimay Triple, 8% ABV golden tripel. In the 75 cl bottle, it is known as Cinq Cents. This crisp beer bears a light orange colour, and is the most hopped and driest of the three. Chimay 150, 10% ABV blonde ale. Originally brewed as a special, 150th anniversary ale, now in regular production. Noted for its spicy, smoky character. Chimay Dorée (Golden), 4.8% ABV ale, brewed from very similar ingredients as the Red, but more pale and spiced differently. It is a patersbier, intended only to be drunk at the Abbey. Since 2007 it was available at the nearby inn Auberge de Poteaupré, which is associated with the abbey. The monks themselves drink this variety rather than the stronger three. Prior to 2015, it was unusual for bottles of the Chimay Dorée to be available outside the Abbey or local inn. However, from 2013, a limited quantity of this beer was sold on draught in the United Kingdom, at 19 Fuller's pubs and in Italy, where only 50 pubs sell this variety of beer, as well as in 330 ml bottles in some export markets. In the Christmas season of 2016, Chimay made available a gift set with all four varieties and two chalices, and since has been available in limited quantities in the USA. Cheeses Since 1876 the monastery has also made cheese, and as of 2010 offers four cheeses. They are: Chimay with Beer, whose rind is soaked in Chimay beer. Chimay Grand Classic, a semi-hard pressed cheese. Chimay Grand Cru, made from pasteurised milk and matured for six weeks. Old Chimay, a hard cheese matured for at least six months. See also References External links 1862 establishments in Belgium Belgian brands Trappist breweries in Belgium Breweries of Wallonia Companies based in Hainaut (province) Chimay
Frank Stephen Bunn (born 6 November 1962) is an English former professional footballer who is the U23 coach of League One club Wigan Athletic. He holds the Football League Cup record for the most goals (six) by a player in a single match, achieved in 1989. Career Bunn played as a striker and began his career at Luton Town, and later played for Hull City and Oldham Athletic. His most famous moment as a player came on 25 October 1989, when he scored six goals in Oldham's 7–0 victory over Scarborough in the third round of the League Cup, which is still the League Cup record for most goals by a player in a single match. In 1990, Bunn was forced to retire from professional football because of injury. He then joined Stalybridge Celtic and later Radcliffe Borough. He later became a coach and began his coaching career at Wigan Athletic, before joining Manchester City as reserve team coach in 1998. In February 2007, Bunn was appointed first-team coach at Coventry City, and on 11 February 2008, he was named joint caretaker manager along with John Harbin, following the dismissal of Iain Dowie. He returned to his old position as first-team coach following Chris Coleman's appointment as manager on 19 February 2008. He left the club in May 2010 after his contract expired. In June 2011, Bunn was appointed as Steve Eyre's assistant manager at Rochdale. In July 2012, he joined Huddersfield Town as a professional development coach working with the academy under-18 team. Bunn was appointed manager of newly relegated League Two club Oldham Athletic on 13 June 2018 on a one-year contract, but sacked the following December. In July 2019, he joined Wigan Athletic and as of the 2020–21 season is the coach of their U23 side. Personal life Bunn's son, Harry Bunn, is a professional footballer. Managerial statistics References External links Career statistics 1962 births Living people Footballers from Birmingham, West Midlands English men's footballers Men's association football forwards Luton Town F.C. players Hull City A.F.C. players Oldham Athletic A.F.C. players Stalybridge Celtic F.C. players Radcliffe F.C. players English Football League players English football managers Coventry City F.C. managers Oldham Athletic A.F.C. managers English Football League managers Manchester City F.C. non-playing staff Coventry City F.C. non-playing staff Rochdale A.F.C. non-playing staff Huddersfield Town A.F.C. non-playing staff Wigan Athletic F.C. non-playing staff
```python #! /usr/bin/python # -*- coding: utf-8 -*- r"""Example of Synced sequence input and output. This is a reimpmentation of the TensorFlow official PTB example in : tensorflow/models/rnn/ptb The batch_size can be seem as how many concurrent computations.\n As the following example shows, the first batch learn the sequence information by using 0 to 9.\n The second batch learn the sequence information by using 10 to 19.\n So it ignores the information from 9 to 10 !\n If only if we set the batch_size = 1, it will consider all information from 0 to 20.\n The meaning of batch_size here is not the same with the MNIST example. In MNIST example, batch_size reflects how many examples we consider in each iteration, while in PTB example, batch_size is how many concurrent processes (segments) for speed up computation. Some Information will be ignored if batch_size > 1, however, if your dataset is "long" enough (a text corpus usually has billions words), the ignored information would not effect the final result. In PTB tutorial, we setted batch_size = 20, so we cut the dataset into 20 segments. At the begining of each epoch, we initialize (reset) the 20 RNN states for 20 segments, then go through 20 segments separately. The training data will be generated as follow:\n >>> train_data = [i for i in range(20)] >>> for batch in tl.iterate.ptb_iterator(train_data, batch_size=2, num_steps=3): >>> x, y = batch >>> print(x, '\n',y) ... [[ 0 1 2] <---x 1st subset/ iteration ... [10 11 12]] ... [[ 1 2 3] <---y ... [11 12 13]] ... ... [[ 3 4 5] <--- 1st batch input 2nd subset/ iteration ... [13 14 15]] <--- 2nd batch input ... [[ 4 5 6] <--- 1st batch target ... [14 15 16]] <--- 2nd batch target ... ... [[ 6 7 8] 3rd subset/ iteration ... [16 17 18]] ... [[ 7 8 9] ... [17 18 19]] Hao Dong: This example can also be considered as pre-training of the word embedding matrix. About RNN ---------- $ Karpathy Blog : path_to_url More TensorFlow official RNN examples can be found here --------------------------------------------------------- $ RNN for PTB : path_to_url#recurrent-neural-networks $ Seq2seq : path_to_url#sequence-to-sequence-models $ translation : tensorflow/models/rnn/translate Example / benchmark for building a PTB LSTM model. Trains the model described in: (Zaremba, et. al.) Recurrent Neural Network Regularization path_to_url There are 3 supported model configurations: =========================================== | config | epochs | train | valid | test =========================================== | small | 13 | 37.99 | 121.39 | 115.91 | medium | 39 | 48.45 | 86.16 | 82.07 | large | 55 | 37.87 | 82.62 | 78.29 The exact results may vary depending on the random initialization. The hyperparameters used in the model: - init_scale - the initial scale of the weights - learning_rate - the initial value of the learning rate - max_grad_norm - the maximum permissible norm of the gradient - num_layers - the number of LSTM layers - num_steps - the number of unrolled steps of LSTM - hidden_size - the number of LSTM units - max_epoch - the number of epochs trained with the initial learning rate - max_max_epoch - the total number of epochs for training - keep_prob - the probability of keeping weights in the dropout layer - lr_decay - the decay of the learning rate for each epoch after "max_epoch" - batch_size - the batch size The data required for this example is in the data/ dir of the PTB dataset from Tomas Mikolov's webpage: $ wget path_to_url~imikolov/rnnlm/simple-examples.tgz $ tar xvf simple-examples.tgz A) use the zero_state function on the cell object B) for an rnn, all time steps share weights. We use one matrix to keep all gate weights. Split by column into 4 parts to get the 4 gate weight matrices. """ import argparse import sys import time import numpy as np import tensorflow as tf import tensorlayer as tl from tensorlayer.models import Model tl.logging.set_verbosity(tl.logging.DEBUG) def process_args(args): parser = argparse.ArgumentParser() parser.add_argument( '--model', default='small', choices=['small', 'medium', 'large'], help="A type of model. Possible options are: small, medium, large." ) parameters = parser.parse_args(args) return parameters class PTB_Net(Model): def __init__(self, vocab_size, hidden_size, init, keep): super(PTB_Net, self).__init__() self.embedding = tl.layers.Embedding(vocab_size, hidden_size, init) self.dropout1 = tl.layers.Dropout(keep=keep) self.lstm1 = tl.layers.RNN( cell=tf.keras.layers.LSTMCell(hidden_size), return_last_output=False, return_last_state=True, return_seq_2d=False, in_channels=hidden_size ) self.dropout2 = tl.layers.Dropout(keep=keep) self.lstm2 = tl.layers.RNN( cell=tf.keras.layers.LSTMCell(hidden_size), return_last_output=False, return_last_state=True, return_seq_2d=True, in_channels=hidden_size ) self.dropout3 = tl.layers.Dropout(keep=keep) self.out_dense = tl.layers.Dense(vocab_size, in_channels=hidden_size, W_init=init, b_init=init, act=None) def forward(self, inputs, lstm1_initial_state=None, lstm2_initial_state=None): inputs = self.embedding(inputs) inputs = self.dropout1(inputs) lstm1_out, lstm1_state = self.lstm1(inputs, initial_state=lstm1_initial_state) inputs = self.dropout2(lstm1_out) lstm2_out, lstm2_state = self.lstm2(inputs, initial_state=lstm2_initial_state) inputs = self.dropout3(lstm2_out) logits = self.out_dense(inputs) return logits, lstm1_state, lstm2_state def main(): """ The core of the model consists of an LSTM cell that processes one word at a time and computes probabilities of the possible continuations of the sentence. The memory state of the network is initialized with a vector of zeros and gets updated after reading each word. Also, for computational reasons, we will process data in mini-batches of size batch_size. """ param = process_args(sys.argv[1:]) if param.model == "small": init_scale = 0.1 learning_rate = 1e-3 max_grad_norm = 5 num_steps = 20 hidden_size = 200 max_epoch = 4 max_max_epoch = 13 keep_prob = 1.0 lr_decay = 0.5 batch_size = 20 vocab_size = 10000 elif param.model == "medium": init_scale = 0.05 learning_rate = 1e-3 max_grad_norm = 5 # num_layers = 2 num_steps = 35 hidden_size = 650 max_epoch = 6 max_max_epoch = 39 keep_prob = 0.5 lr_decay = 0.8 batch_size = 20 vocab_size = 10000 elif param.model == "large": init_scale = 0.04 learning_rate = 1e-3 max_grad_norm = 10 # num_layers = 2 num_steps = 35 hidden_size = 1500 max_epoch = 14 max_max_epoch = 55 keep_prob = 0.35 lr_decay = 1 / 1.15 batch_size = 20 vocab_size = 10000 else: raise ValueError("Invalid model: %s", param.model) # Load PTB dataset train_data, valid_data, test_data, vocab_size = tl.files.load_ptb_dataset() # train_data = train_data[0:int(100000/5)] # for fast testing print('len(train_data) {}'.format(len(train_data))) # 929589 a list of int print('len(valid_data) {}'.format(len(valid_data))) # 73760 a list of int print('len(test_data) {}'.format(len(test_data))) # 82430 a list of int print('vocab_size {}'.format(vocab_size)) # 10000 # One int represents one word, the meaning of batch_size here is not the # same with MNIST example, it is the number of concurrent processes for # computational reasons. init = tf.random_uniform_initializer(-init_scale, init_scale) net = PTB_Net(hidden_size=hidden_size, vocab_size=vocab_size, init=init, keep=keep_prob) # Truncated Backpropagation for training lr = tf.Variable(0.0, trainable=False) train_weights = net.weights optimizer = tf.optimizers.Adam(lr=lr) print(net) print("\nStart learning a language model by using PTB dataset") for i in range(max_max_epoch): # decreases the initial learning rate after several # epoachs (defined by ``max_epoch``), by multipling a ``lr_decay``. new_lr_decay = lr_decay**max(i - max_epoch, 0.0) lr.assign(learning_rate * new_lr_decay) # Training net.train() print("Epoch: %d/%d Learning rate: %.3f" % (i + 1, max_max_epoch, lr.value())) epoch_size = ((len(train_data) // batch_size) - 1) // num_steps start_time = time.time() costs = 0.0 iters = 0 # reset all states at the begining of every epoch lstm1_state = None lstm2_state = None for step, (x, y) in enumerate(tl.iterate.ptb_iterator(train_data, batch_size, num_steps)): with tf.GradientTape() as tape: ## compute outputs logits, lstm1_state, lstm2_state = net( x, lstm1_initial_state=lstm1_state, lstm2_initial_state=lstm2_state ) ## compute loss and update model cost = tl.cost.cross_entropy(logits, tf.reshape(y, [-1]), name='train_loss') grad, _ = tf.clip_by_global_norm(tape.gradient(cost, train_weights), max_grad_norm) optimizer.apply_gradients(zip(grad, train_weights)) costs += cost iters += 1 if step % (epoch_size // 10) == 10: print( "%.3f perplexity: %.3f speed: %.0f wps" % ( step * 1.0 / epoch_size, np.exp(costs / iters), iters * batch_size * num_steps / (time.time() - start_time) ) ) train_perplexity = np.exp(costs / iters) print("Epoch: %d/%d Train Perplexity: %.3f" % (i + 1, max_max_epoch, train_perplexity)) # Validing net.eval() start_time = time.time() costs = 0.0 iters = 0 # reset all states at the begining of every epoch lstm1_state = None lstm2_state = None for step, (x, y) in enumerate(tl.iterate.ptb_iterator(valid_data, batch_size, num_steps)): ## compute outputs logits, lstm1_state, lstm2_state = net(x, lstm1_initial_state=lstm1_state, lstm2_initial_state=lstm2_state) ## compute loss and update model cost = tl.cost.cross_entropy(logits, tf.reshape(y, [-1]), name='train_loss') costs += cost iters += 1 valid_perplexity = np.exp(costs / iters) print("Epoch: %d/%d Valid Perplexity: %.3f" % (i + 1, max_max_epoch, valid_perplexity)) print("Evaluation") # Testing net.eval() # go through the test set step by step, it will take a while. start_time = time.time() costs = 0.0 iters = 0 # reset all states at the begining lstm1_state = None lstm2_state = None for step, (x, y) in enumerate(tl.iterate.ptb_iterator(test_data, batch_size=1, num_steps=1)): ## compute outputs logits, lstm1_state, lstm2_state = net(x, lstm1_initial_state=lstm1_state, lstm2_initial_state=lstm2_state) ## compute loss and update model cost = tl.cost.cross_entropy(logits, tf.reshape(y, [-1]), name='train_loss') costs += cost iters += 1 test_perplexity = np.exp(costs / iters) print("Test Perplexity: %.3f took %.2fs" % (test_perplexity, time.time() - start_time)) print( "More example: Text generation using Trump's speech data: path_to_url -- def main_lstm_generate_text():" ) if __name__ == "__main__": main() # log of SmallConfig # Start learning a language model by using PTB dataset # Epoch: 1 Learning rate: 1.000 # 0.004 perplexity: 5512.735 speed: 4555 wps # 0.104 perplexity: 841.289 speed: 8823 wps # 0.204 perplexity: 626.273 speed: 9292 wps # 0.304 perplexity: 505.628 speed: 9472 wps # 0.404 perplexity: 435.580 speed: 9551 wps # 0.504 perplexity: 390.108 speed: 9555 wps # 0.604 perplexity: 351.379 speed: 9546 wps # 0.703 perplexity: 324.846 speed: 9579 wps # 0.803 perplexity: 303.824 speed: 9574 wps # 0.903 perplexity: 284.468 speed: 9551 wps # Epoch: 1 Train Perplexity: 269.981 # Epoch: 1 Valid Perplexity: 178.561 # Epoch: 2 Learning rate: 1.000 # 0.004 perplexity: 211.632 speed: 7697 wps # 0.104 perplexity: 151.509 speed: 9488 wps # 0.204 perplexity: 158.947 speed: 9674 wps # 0.304 perplexity: 153.963 speed: 9806 wps # 0.404 perplexity: 150.938 speed: 9817 wps # 0.504 perplexity: 148.413 speed: 9824 wps # 0.604 perplexity: 143.763 speed: 9765 wps # 0.703 perplexity: 141.616 speed: 9731 wps # 0.803 perplexity: 139.618 speed: 9781 wps # 0.903 perplexity: 135.880 speed: 9735 wps # Epoch: 2 Train Perplexity: 133.771 # Epoch: 2 Valid Perplexity: 142.595 # Epoch: 3 Learning rate: 1.000 # 0.004 perplexity: 146.902 speed: 8345 wps # 0.104 perplexity: 105.647 speed: 9572 wps # 0.204 perplexity: 114.261 speed: 9585 wps # 0.304 perplexity: 111.237 speed: 9586 wps # 0.404 perplexity: 110.181 speed: 9605 wps # 0.504 perplexity: 109.383 speed: 9601 wps # 0.604 perplexity: 106.722 speed: 9635 wps # 0.703 perplexity: 106.075 speed: 9597 wps # 0.803 perplexity: 105.481 speed: 9624 wps # 0.903 perplexity: 103.262 speed: 9618 wps # Epoch: 3 Train Perplexity: 102.272 # Epoch: 3 Valid Perplexity: 131.884 # Epoch: 4 Learning rate: 1.000 # 0.004 perplexity: 118.127 speed: 7867 wps # 0.104 perplexity: 85.530 speed: 9330 wps # 0.204 perplexity: 93.559 speed: 9399 wps # 0.304 perplexity: 91.141 speed: 9386 wps # 0.404 perplexity: 90.668 speed: 9462 wps # 0.504 perplexity: 90.366 speed: 9516 wps # 0.604 perplexity: 88.479 speed: 9477 wps # 0.703 perplexity: 88.275 speed: 9533 wps # 0.803 perplexity: 88.091 speed: 9560 wps # 0.903 perplexity: 86.430 speed: 9516 wps # Epoch: 4 Train Perplexity: 85.839 # Epoch: 4 Valid Perplexity: 128.408 # Epoch: 5 Learning rate: 1.000 # 0.004 perplexity: 100.077 speed: 7682 wps # 0.104 perplexity: 73.856 speed: 9197 wps # 0.204 perplexity: 81.242 speed: 9266 wps # 0.304 perplexity: 79.315 speed: 9375 wps # 0.404 perplexity: 79.009 speed: 9439 wps # 0.504 perplexity: 78.874 speed: 9377 wps # 0.604 perplexity: 77.430 speed: 9436 wps # 0.703 perplexity: 77.415 speed: 9417 wps # 0.803 perplexity: 77.424 speed: 9407 wps # 0.903 perplexity: 76.083 speed: 9407 wps # Epoch: 5 Train Perplexity: 75.719 # Epoch: 5 Valid Perplexity: 127.057 # Epoch: 6 Learning rate: 0.500 # 0.004 perplexity: 87.561 speed: 7130 wps # 0.104 perplexity: 64.202 speed: 9753 wps # 0.204 perplexity: 69.518 speed: 9537 wps # 0.304 perplexity: 66.868 speed: 9647 wps # 0.404 perplexity: 65.766 speed: 9538 wps # 0.504 perplexity: 64.967 speed: 9537 wps # 0.604 perplexity: 63.090 speed: 9565 wps # 0.703 perplexity: 62.415 speed: 9544 wps # 0.803 perplexity: 61.751 speed: 9504 wps # 0.903 perplexity: 60.027 speed: 9482 wps # Epoch: 6 Train Perplexity: 59.127 # Epoch: 6 Valid Perplexity: 120.339 # Epoch: 7 Learning rate: 0.250 # 0.004 perplexity: 72.069 speed: 7683 wps # 0.104 perplexity: 53.331 speed: 9526 wps # 0.204 perplexity: 57.897 speed: 9572 wps # 0.304 perplexity: 55.557 speed: 9491 wps # 0.404 perplexity: 54.597 speed: 9483 wps # 0.504 perplexity: 53.817 speed: 9471 wps # 0.604 perplexity: 52.147 speed: 9511 wps # 0.703 perplexity: 51.473 speed: 9497 wps # 0.803 perplexity: 50.788 speed: 9521 wps # 0.903 perplexity: 49.203 speed: 9515 wps # Epoch: 7 Train Perplexity: 48.303 # Epoch: 7 Valid Perplexity: 120.782 # Epoch: 8 Learning rate: 0.125 # 0.004 perplexity: 63.503 speed: 8425 wps # 0.104 perplexity: 47.324 speed: 9433 wps # 0.204 perplexity: 51.525 speed: 9653 wps # 0.304 perplexity: 49.405 speed: 9520 wps # 0.404 perplexity: 48.532 speed: 9487 wps # 0.504 perplexity: 47.800 speed: 9610 wps # 0.604 perplexity: 46.282 speed: 9554 wps # 0.703 perplexity: 45.637 speed: 9536 wps # 0.803 perplexity: 44.972 speed: 9493 wps # 0.903 perplexity: 43.506 speed: 9496 wps # Epoch: 8 Train Perplexity: 42.653 # Epoch: 8 Valid Perplexity: 122.119 # Epoch: 9 Learning rate: 0.062 # 0.004 perplexity: 59.375 speed: 7158 wps # 0.104 perplexity: 44.223 speed: 9275 wps # 0.204 perplexity: 48.269 speed: 9459 wps # 0.304 perplexity: 46.273 speed: 9564 wps # 0.404 perplexity: 45.450 speed: 9604 wps # 0.504 perplexity: 44.749 speed: 9604 wps # 0.604 perplexity: 43.308 speed: 9619 wps # 0.703 perplexity: 42.685 speed: 9647 wps # 0.803 perplexity: 42.022 speed: 9673 wps # 0.903 perplexity: 40.616 speed: 9678 wps # Epoch: 9 Train Perplexity: 39.792 # Epoch: 9 Valid Perplexity: 123.170 # Epoch: 10 Learning rate: 0.031 # 0.004 perplexity: 57.333 speed: 7183 wps # 0.104 perplexity: 42.631 speed: 9592 wps # 0.204 perplexity: 46.580 speed: 9518 wps # 0.304 perplexity: 44.625 speed: 9569 wps # 0.404 perplexity: 43.832 speed: 9576 wps # 0.504 perplexity: 43.153 speed: 9571 wps # 0.604 perplexity: 41.761 speed: 9557 wps # 0.703 perplexity: 41.159 speed: 9524 wps # 0.803 perplexity: 40.494 speed: 9527 wps # 0.903 perplexity: 39.111 speed: 9558 wps # Epoch: 10 Train Perplexity: 38.298 # Epoch: 10 Valid Perplexity: 123.658 # Epoch: 11 Learning rate: 0.016 # 0.004 perplexity: 56.238 speed: 7190 wps # 0.104 perplexity: 41.771 speed: 9171 wps # 0.204 perplexity: 45.656 speed: 9415 wps # 0.304 perplexity: 43.719 speed: 9472 wps # 0.404 perplexity: 42.941 speed: 9483 wps # 0.504 perplexity: 42.269 speed: 9494 wps # 0.604 perplexity: 40.903 speed: 9530 wps # 0.703 perplexity: 40.314 speed: 9545 wps # 0.803 perplexity: 39.654 speed: 9580 wps # 0.903 perplexity: 38.287 speed: 9597 wps # Epoch: 11 Train Perplexity: 37.477 # Epoch: 11 Valid Perplexity: 123.523 # Epoch: 12 Learning rate: 0.008 # 0.004 perplexity: 55.552 speed: 7317 wps # 0.104 perplexity: 41.267 speed: 9234 wps # 0.204 perplexity: 45.119 speed: 9461 wps # 0.304 perplexity: 43.204 speed: 9519 wps # 0.404 perplexity: 42.441 speed: 9453 wps # 0.504 perplexity: 41.773 speed: 9536 wps # 0.604 perplexity: 40.423 speed: 9555 wps # 0.703 perplexity: 39.836 speed: 9576 wps # 0.803 perplexity: 39.181 speed: 9579 wps # 0.903 perplexity: 37.827 speed: 9554 wps # Epoch: 12 Train Perplexity: 37.020 # Epoch: 12 Valid Perplexity: 123.192 # Epoch: 13 Learning rate: 0.004 # 0.004 perplexity: 55.124 speed: 8234 wps # 0.104 perplexity: 40.970 speed: 9391 wps # 0.204 perplexity: 44.804 speed: 9525 wps # 0.304 perplexity: 42.912 speed: 9512 wps # 0.404 perplexity: 42.162 speed: 9536 wps # 0.504 perplexity: 41.500 speed: 9630 wps # 0.604 perplexity: 40.159 speed: 9591 wps # 0.703 perplexity: 39.574 speed: 9575 wps # 0.803 perplexity: 38.921 speed: 9613 wps # 0.903 perplexity: 37.575 speed: 9629 wps # Epoch: 13 Train Perplexity: 36.771 # Epoch: 13 Valid Perplexity: 122.917 # Evaluation # Test Perplexity: 116.723 took 124.06s # MediumConfig # Epoch: 1 Learning rate: 1.000 # 0.008 perplexity: 5173.547 speed: 6469 wps # 0.107 perplexity: 1219.527 speed: 6453 wps # 0.206 perplexity: 866.163 speed: 6441 wps # 0.306 perplexity: 695.163 speed: 6428 wps # 0.405 perplexity: 598.464 speed: 6420 wps # 0.505 perplexity: 531.875 speed: 6422 wps # 0.604 perplexity: 477.079 speed: 6425 wps # 0.704 perplexity: 438.297 speed: 6428 wps # 0.803 perplexity: 407.928 speed: 6425 wps # 0.903 perplexity: 381.264 speed: 6429 wps # Epoch: 1 Train Perplexity: 360.795 # Epoch: 1 Valid Perplexity: 208.854 # ... # Epoch: 39 Learning rate: 0.001 # 0.008 perplexity: 56.618 speed: 6357 wps # 0.107 perplexity: 43.375 speed: 6341 wps # 0.206 perplexity: 47.873 speed: 6336 wps # 0.306 perplexity: 46.408 speed: 6337 wps # 0.405 perplexity: 46.327 speed: 6337 wps # 0.505 perplexity: 46.115 speed: 6335 wps # 0.604 perplexity: 45.323 speed: 6336 wps # 0.704 perplexity: 45.286 speed: 6337 wps # 0.803 perplexity: 45.174 speed: 6336 wps # 0.903 perplexity: 44.334 speed: 6336 wps # Epoch: 39 Train Perplexity: 44.021 # Epoch: 39 Valid Perplexity: 87.516 # Evaluation # Test Perplexity: 83.858 took 167.58s ```
Ward No. 38, Kolkata Municipal Corporation is an administrative division of Kolkata Municipal Corporation in Borough No. 5, covering parts of Rajabazar and Amherst Street neighbourhoods in North Kolkata, in the Indian state of West Bengal. History Attempts were made to establish a municipal corporation at Kolkata from the middle of the 19th century. The electoral system was introduced for the first time in 1847, and 4 of the 7 board members were elected by the rate payers. In 1852 the board was replaced by a new one and in 1863 a new body was formed. As per old records, in 1872 there were 25 wards in Kolkata (spellings as in use at that time) – 1. Shyampukur, 2. Kumartuli, 3. Bartala, 4. Sukea Street, 5. Jorabagan, 6. Jorasanko, 7. Barabazar, 8. Kolutola, 9. Muchipara, 10. Boubazar, 11. Padmapukur, 12. Waterloo Street, 13. Fenwick Bazar, 14. Taltala, 15. Kalinga, 16. Park Street, 17. Victoria Terrace, 18. Hastings, 19. Entali, 20. Beniapukur, 21. Baliganj-Tollyganj, 22. Bhabanipur, 23. Alipur, 24.Ekbalpur and 25. Watganj. A new municipal corporation was created in 1876, wherein 48 commissioners were elected and 24 were appointed by the government. With the implementation of the Municipal Consolidation Act of 1888 the area under the jurisdiction of the municipal corporation was enlarged. Certain areas were already there but more parts of them were added (current spellings) - Entally, Manicktala, Beliaghata, Ultadanga, Chitpur, Cossipore, Beniapukur, Ballygunge, Watganj and Ekbalpur, and Garden Reach and Tollygunj. The Calcutta Municipal Act of 1923 brought about important changes. It liberalised the constitution along democratic lines. The state government superseded the Corporation in 1948 and the Calcutta Municipal Act of 1951 came into force. Adult franchise was introduced in municipal elections in 1962. With the addition of certain areas in the southern parts of the city, the number of wards increased from 75 to 144. Geography Ward No. 38 is bordered on the north by Kailash Bose Street and Mahendra Srimani Street; on the east by Acharya Prafulla Chandra Street; on the south by Keshab Sen Street; and on the west by Bidhan Sarani. The ward is served by Amherst Street police station of Kolkata Police. Amherst Street Women police station covers all police districts under the jurisdiction of the North and North Suburban division of Kolkata Police, i.e. Amherst Street, Jorabagan, Shyampukur, Cossipore, Chitpur, Sinthi, Burtolla and Tala. Demographics As per 2011 Census of India Ward No. 38, Kolkata Municipal Corporation, had a total population of 28,791, of which 16,196 (56%) were males and 12,595 (44%) were females. Population below 6 years was 1,862. The total number of literates in Ward No. 38 was 23,097 (85.77% of the population over 6 years). Kolkata is the second most literate district in West Bengal. The literacy rate of Kolkata district has increased from 53.0% in 1951 to 86.3% in the 2011 census. See also – List of West Bengal districts ranked by literacy rate Census data about mother tongue and religion is not available at the ward level. For district level information see Kolkata district. According to the District Census Handbook Kolkata 2011, 141 wards of Kolkata Municipal Corporation formed Kolkata district. (3 wards were added later). Election highlights The ward forms a city municipal corporation council electoral constituency and is a part of Jorasanko (Vidhan Sabha constituency). References Municipal wards of Kolkata
```c++ #include <iostream> #include <vector> #include <algorithm> using namespace std; struct peo{ int id, ge, gi, fin; vector<int> choice; }; bool cmp(peo& a, peo& b) { if (a.fin != b.fin) return a.fin > b.fin; return a.ge > b.ge; } bool cmp2(peo& a, peo& b) { return a.id < b.id; } int main(){ int n, m, k, quota[110], cnt[110] = {0}; scanf("%d%d%d", &n, &m, &k); vector<peo> stu(n), sch[110]; for(int i = 0; i < m; i++) scanf("%d",&quota[i]); for(int i = 0; i < n; i++) { scanf("%d%d", &stu[i].ge, &stu[i].gi); stu[i].id = i; stu[i].fin = stu[i].ge + stu[i].gi; stu[i].choice.resize(k); for(int j = 0; j < k; j++) scanf("%d", &stu[i].choice[j]); } sort(stu.begin(), stu.end(), cmp); for(int i = 0; i < n; i++) { for(int j = 0; j < k; j++) { int schid = stu[i].choice[j]; int lastindex = cnt[schid] - 1; if(cnt[schid] < quota[schid] || (stu[i].fin == sch[schid][lastindex].fin) && stu[i].ge == sch[schid][lastindex].ge) { sch[schid].push_back(stu[i]); cnt[schid]++; break; } } } for(int i = 0; i < m; i++) { sort(sch[i].begin(), sch[i].end(), cmp2); for(int j = 0; j < cnt[i]; j++) { if(j != 0) printf(" "); printf("%d", sch[i][j].id); } printf("\n"); } return 0; } ```
Shannon & Wilson, also known as S & W, is an American geotechnical engineering and environmental consultancy firm headquartered in Seattle, Washington founded in 1954. It was founded by William L. Shannon and Stanley D. Wilson (de), both of whom are alumni of Harvard University. The firm offers various geotechnical, geological, and environmental services ranging from natural resource management to geophysical surveying for public and private sectors. The company’s markets include dams and levees, design and construction, energy, federal, industrial, property development, transportation, wastewater management, and waterfront facilities. However, over half of their work is devoted to retrofitting and performing safety investigations on transportation projects. Founding The firm is a manifestation of a revolution in civil engineering as a result of the birth of geotechnical engineering, which became a new field by the mid 20th century. Radically new concepts were taking shape in soil and rock mechanics pioneered by Professor Arthur Casagrande, a leading figure in geotechnical engineering, who encouraged Shannon and Wilson to form a partnership. The professor and his colleagues, including the founders of the firm, were in the process of establishing the foundation of what later came to be known as geotechnical engineering. Staff In 1998, the firm peaked at 130 employees. However, with the introduction of the tax-cutting Initiative 695 and the recession after that, both of which slashed transportation funding, the number of employees was reduced to 90 and has never exceeded 100 staff for several years. However, the company has since grown to more than 300 employees in 11 offices nationwide. Events Expansion Although headquartered in Seattle, the firm has offices based in Richland, Portland, St. Louis, Fairbanks, Anchorage and Denver. The Denver office opened around 2000 to work on a nearby revamp of Interstate 25. Lawsuits In 2002, Shannon & Wilson became involved in a lawsuit as a result of property damage initiated by landslides in a residential area on Perkins Lane on top of Magnolia Hill. The court ended up ruling in favor of Shannon & Wilson however. Recognition For more than half a century, Shannon & Wilson has played a major role in the design and construction of renowned public and private projects in the Puget Sound region. In recognition of their achievements, each year Shannon & Wilson sponsors the Stanley D. Wilson Memorial Lecture at the University of Washington, the Stanley D. Wilson Fellowship at the University of Illinois, and the William L. Shannon Endowed Fellowship at the University of Washington. Services Geotechnical Engineering Contamination / Remediation Geologic Hazards Natural Resources Water Resources Design-Build Tunneling / Underground Arctic Engineering Construction Management Notable projects SR 520 Floating Bridge Alaskan Way Viaduct replacement tunnel Link light rail Tacoma Narrows Bridge Seahawks Stadium Seattle Center Monorail U.S. Courthouse, Seattle Lower Meramec River Wastewater Treatment Plant Baumgartner Tunnel Boston's Central Artery Third Harbor Tunnel References External links https://trenchlesstechnology.com/shannon-wilson-announces-2017-promotions/ Services Companies based in Seattle Consulting firms established in 1954 1954 establishments in Washington (state) Geotechnical engineering companies Construction and civil engineering companies of the United States American companies established in 1954 Construction and civil engineering companies established in 1954
```python #!/pxrpythonsubst # # # path_to_url from __future__ import print_function import sys, os, unittest from pxr import Usd, Tf class TestErrors(unittest.TestCase): def test_Errors(self): with self.assertRaises(Tf.CppException): Usd._UnsafeGetStageForTesting(Usd.Prim()) if __name__ == "__main__": unittest.main() ```
Frank Woodley ( Wood; born 29 February 1968) is an Australian comedian, author and musician who is best known for his work alongside Colin Lane as part of the comedic duo Lano and Woodley. The two first performed together for almost 20 years in live shows, a television series and an album of comedic songs, before deciding to pursue individual careers in 2006. They announced their reformation in November 2017. Personal life Woodley was born Frank Wood, the youngest of seven children. He grew up in suburban Melbourne, where his family ran a milk bar in Glen Waverley. He adopted the stage name "Frank Woodley"—drawn from a childhood nickname—when he began performing as part of the comedy duo Lano and Woodley in 1993. In 2000 he changed his name by deed poll to avoid confusion. Woodley is married with a son and daughter and lives in Melbourne's inner north. Career Lano and Woodley Woodley first performed with fellow comedian Colin Lane as part of the duo Lano and Woodley for a period of almost 20 years. The two met through theatresports in the mid-1980s and first performed together at an open mike night at the Prince Patrick Hotel in Collingwood, Victoria, in 1987 along with their friend Scott Casley, calling themselves the Found Objects. Over the next six years, the trio performed in venues throughout Australia and at the Edinburgh Festival Fringe. They became semi-regulars on ABC TV's The Big Gig, a show known for boosting the careers of new comedy acts. They also had their own commercial radio show for six months and were part of the short-lived Seven Network sketch show The Comedy Sale. In 1992, when Casley moved to Alice Springs, Woodley and Lane decided to continue as a duo, drawing their name from childhood nicknames. As Lano and Woodley, the two adopt humorous onstage personas, with Woodley playing a "goofy innocent" who is frequently bullied by Lane's pompous, controlling character. Their first show as a comedy duo, Fence, debuted in 1993. It toured throughout Australia, winning the Moosehead Award at the Melbourne International Comedy Festival for best act and was eventually taken to the Edinburgh Festival Fringe in 1994 where it won the Perrier Comedy Award. Subsequent live productions have included Curtains, Glitzy, Slick, Bruiser, The Island and their 2006 farewell show, Goodbye. In 2000, they co-hosted the televised Melbourne Comedy Gala. Woodley and Lane have created two television shows together. The Adventures of Lano and Woodley, which premiered on the ABC in 1997, was a comedy series which featured the duo living together in a fictional suburban Melbourne flat and frequently getting into trouble. It aired for two seasons, becoming the first Australian show to be sold to the BBC and airing in 38 other countries. Although they were offered the opportunity to make the series in England, the pair decided to remain in Australia because they did not want to live in London. In 2004 a live show, The Island, was filmed as a TV special and aired on The Comedy Channel. The duo have also released an album, Lano & Woodley Sing Songs, and a novel, Housemeeting. In 2006, after close to 20 years of working together, Woodley and Lane decided to part ways. Woodley stated that the split was due to a desire to pursue new challenges. "We just got to the stage where we felt we had to make a decision", he said. "Either we were going to spend the next 20 years doing this, this'd be our career, our lives forever. And that wouldn't have been a terrible thing. Or we could go, 'Let's have a bit more variety in our lives'." In one final tour, the duo travelled through 37 Australian cities with their farewell show, Goodbye. In 2018, the duo reprised their roles as Lano and Woodley for a show titled Fly which won the 2018 Melbourne Comedy Festival People’s Choice Award. Solo work Woodley made his solo debut in 2003 at the Melbourne International Comedy Festival with The Happy Dickwi, a show about "a whole lot of unrelated ideas". He has since performed a number of solo stand-up shows, and in 2008 debuted a one-man play entitled Possessed. Directed by Kate Denborough and featuring music from Paul Mac, Possessed is the story of Louie, a lonely recluse who falls in love with, and becomes possessed by, the ghost of a 19th-century shipwreck victim. Woodley says that the show came out of the desire to do a big solo show and his interest in doing a romantic comedy: "So I was thinking that maybe I could do a solo romantic comedy where I fell in love with myself. Although it was just a stupid joke initially, I started thinking about having a ghost possess me and then I fall in love with the ghost. I have to help free her from the curse she's under." The show has toured both nationally and internationally, and draws inspiration from Buster Keaton, Charlie Chaplin, Don Adams, Peter Sellers, Jerry Lewis and Laurel and Hardy. Woodley played a television vet named "Frank Woodley" in the twelfth episode of the 1998 Australia television series The Games. Woodley has made regular guest appearances on Australian television, including Spicks and Specks, Good News Week, Thank God You're Here, The Sideshow, Big Question, Rove Live, Australia's Brainiest Comedian and Show Me the Movie!. In 2008, he appeared in a series of television advertisements for Metlink promoting public transport in Melbourne, in particular buses. In 2007, Woodley performed in the stage show The Complete Works of William Shakespeare (Abridged), a 97-minute production which encompasses 37 Shakespearian plays. From 2008 to 2009, Woodley co-hosted a show on The Comedy Channel called Aussie Gold every Saturday night. In 2012, Woodley had a small role in the Australian comedy film Kath & Kimderella. In 2015, he co-starred as the dogcatcher in Oddball. Aside from being a comedian, Woodley is also a children's author and has written a series of children's books called Kizmet. In 2020, Woodley participated in the first Australian season of the Amazon Prime comedy competition series LOL: Last One Laughing, going on to win the grand prize. In 2022, Woodley was appointed as one of the team captains in Would I Lie to You? Australia alongside Chris Taylor. In 2023, Woodley had a small role as a fertility specialist in the Australian drama comedy Love Me. Woodley A television project entitled Woodley screened in 2012. It is a half-hour visual comedy about a largely innocent-seeming man who is caught up in real-world problems. The series follows his attempts to bond with his daughter and his estranged wife (Justine Clarke), though this proves difficult for the accident-prone Woodley, as she tries to move forward with her life with her new partner Greg. The show was announced with a number of other projects in a $1.2 million funding from the Victorian government. The show premiered on Wednesday 22 February at 8.00pm on ABC1. The show is built around an extensive use of visual (often slapstick) humour, rather than dialogue, although this is also used sparingly. References External links 1968 births Living people Australian male comedians Australian male film actors Australian male television actors Australian male voice actors Comedians from Melbourne Male actors from Melbourne 20th-century Australian male actors 21st-century Australian male actors
The Red Fisher Show is a Canadian television series which appeared on CTV from 1968 to 1989. Its episodes featured host and American expatriate B. H. "Red" Fisher with different guests who would narrate footage of fishing or hunting expeditions in various regions of Canada and the United States. The show's TV set was dubbed "Scuttlebutt Lodge, the Tall Tale Capital of the World". The show was subject to parody, in the form of the also popular The Red Green Show, and SCTV's The Fishin' Musician with John Candy as host Gil Fisher. The program was broadcast on weekends outside prime time, generally appearing Saturday afternoons. Guests Dates indicated are based on broadcasts on CFTO-TV Toronto. Gordon "Red" Berenson, hockey player (3 July 1971) Johnny Bower, hockey player Bill Culluton, flycaster (17 July 1971) Ben Hardesty, World Casting Champion (11 September 1971) Alan Hale, Jr., actor Stan Mikita, hockey player Gordie Howe, hockey player Ferguson Jenkins, baseball Ben Johnson, actor Roger Maris, baseball (10 July 1971) Merlin Olsen, football player Slim Pickens, actor Eddie Shack, hockey player Ted Williams, baseball player (11 March 1972) See also B. H. Fisher References 1968 Canadian television series debuts 1989 Canadian television series endings CTV Television Network original programming 1960s Canadian documentary television series 1960s Canadian sports television series 1970s Canadian sports television series 1980s Canadian sports television series 1970s Canadian documentary television series 1980s Canadian documentary television series
```c /* * * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided * with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef __clang__ unsigned short __builtin_addcs(unsigned short, unsigned short, unsigned short, unsigned short *); #endif int main() { unsigned short carryout; __builtin_addcs((unsigned short) 0x0, (unsigned short) 0x0, 0, &carryout); if (carryout != 0) { return -1; } __builtin_addcs((unsigned short) 0xFFFF, (unsigned short) 0x0, 0, &carryout); if (carryout != 0) { return -1; } __builtin_addcs((unsigned short) 0x0, (unsigned short) 0xFFFF, 0, &carryout); if (carryout != 0) { return -1; } __builtin_addcs((unsigned short) 0xFFFF, (unsigned short) 0x1, 0, &carryout); if (carryout != 1) { return -1; } __builtin_addcs((unsigned short) 0x1, (unsigned short) 0xFFFF, 0, &carryout); if (carryout != 1) { return -1; } __builtin_addcs((unsigned short) 0xFFFF, (unsigned short) 0xFFFF, 0, &carryout); if (carryout != 1) { return -1; } __builtin_addcs((unsigned short) 0x0, (unsigned short) 0xFFFE, 1, &carryout); if (carryout != 0) { return -1; } __builtin_addcs((unsigned short) 0x0, (unsigned short) 0xFFFF, 1, &carryout); if (carryout != 1) { return -1; } __builtin_addcs((unsigned short) 0xFFFE, (unsigned short) 0x0, 1, &carryout); if (carryout != 0) { return -1; } __builtin_addcs((unsigned short) 0xFFFF, (unsigned short) 0x0, 1, &carryout); if (carryout != 1) { return -1; } __builtin_addcs((unsigned short) 0xFFFF, (unsigned short) 0xFFFF, 1, &carryout); if (carryout != 1) { return -1; } unsigned short res1 = __builtin_addcs((unsigned short) 0x0FFF, (unsigned short) 0x1, 0, &carryout); if (res1 != 0x1000 || carryout != 0) { return -1; } unsigned short res2 = __builtin_addcs((unsigned short) 0x0FFF, (unsigned short) 0x1, 1, &carryout); if (res2 != 0x1001 || carryout != 0) { return -1; } return 0; } ```
Sceloporus becki, also known as the island fence lizard, is a species of lizard endemic to the Channel Islands of California. Taxonomy It was once considered a subspecies of the western fence lizard (Sceloporus occidentalis), but is now considered to be its own species. Etymology The specific epithet, becki, is in honor of Rollo Howard Beck, an American ornithologist who collected the first specimens. References Flaxington, William (2005). Photograph of Island fence lizard on Santa Cruz Island. Calphotos External links National Park Service, Channel Islands - Island Fence Lizard U.S. Department of the Interior Island Fence Lizard - Sceloporus occidentalis becki CaliforniaHerps.com A Guide to the Amphibians and Reptiles of California Sceloporus Endemic fauna of California Fauna of the Channel Islands of California Fauna of the California chaparral and woodlands Lizards of North America Endemic reptiles of the United States Reptiles described in 1905 Taxa named by John Van Denburgh Fauna without expected TNC conservation status
```xml <?xml version="1.0" encoding="UTF-8"?> <definitions id="definitions" xmlns="path_to_url" xmlns:activiti="path_to_url" targetNamespace="Examples"> <process id="testDynamicScript"> <startEvent id="theStart" /> <sequenceFlow id="flow1" sourceRef="theStart" targetRef="script1" /> <scriptTask id="script1" scriptFormat="JavaScript" activiti:autoStoreVariables="false"> <script> <![CDATA[ var sum = a + b; execution.setVariable("test", sum); ]]> </script> </scriptTask> <sequenceFlow id="flow2" sourceRef="script1" targetRef="task1" /> <userTask id="task1" /> <sequenceFlow id="flow3" sourceRef="task1" targetRef="theEnd" /> <endEvent id="theEnd" /> </process> </definitions> ```
```go /* path_to_url Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package remotecommand import ( "context" "io" "net/http" "k8s.io/apimachinery/pkg/util/httpstream" ) // StreamOptions holds information pertaining to the current streaming session: // input/output streams, if the client is requesting a TTY, and a terminal size queue to // support terminal resizing. type StreamOptions struct { Stdin io.Reader Stdout io.Writer Stderr io.Writer Tty bool TerminalSizeQueue TerminalSizeQueue } // Executor is an interface for transporting shell-style streams. type Executor interface { // Deprecated: use StreamWithContext instead to avoid possible resource leaks. // See path_to_url for details. Stream(options StreamOptions) error // StreamWithContext initiates the transport of the standard shell streams. It will // transport any non-nil stream to a remote system, and return an error if a problem // occurs. If tty is set, the stderr stream is not used (raw TTY manages stdout and // stderr over the stdout stream). // The context controls the entire lifetime of stream execution. StreamWithContext(ctx context.Context, options StreamOptions) error } type streamCreator interface { CreateStream(headers http.Header) (httpstream.Stream, error) } type streamProtocolHandler interface { stream(conn streamCreator) error } ```
Spermospora avenae or red leather leaf is a fungal plant pathogen of Avena sativa. The slender colourless hyphae that colonise oat plants become broader in the leaf epidermis and develop a layer of swollen, irregular shaped cells from which conidiophores arise. These penetrate to the exterior and a single conidium forms, ready for dispersal. The colourless conidia are substantially longer than they are wide, crescent shaped and have two or three septa. The disease symptoms on the plant leaves are initially small blue and reddish discolorations that then become larger and extend along the length of the leaf blade. The affected areas are irregular in shape. These eventually darken to red-brown and become leathery in appearance. Wet weather is conducive to infections. The spores can survive on seeds and stubble. Foliar fungicide application has been found helpful, although is not sufficient by itself. References Pezizomycotina
```xml import * as pdfjs from 'pdfjs-dist'; import Document from './Document.js'; import Outline from './Outline.js'; import Page from './Page.js'; import Thumbnail from './Thumbnail.js'; import useDocumentContext from './shared/hooks/useDocumentContext.js'; import useOutlineContext from './shared/hooks/useOutlineContext.js'; import usePageContext from './shared/hooks/usePageContext.js'; export type { DocumentProps } from './Document.js'; export type { OutlineProps } from './Outline.js'; export type { PageProps } from './Page.js'; export type { ThumbnailProps } from './Thumbnail.js'; import './pdf.worker.entry.js'; export { pdfjs, Document, Outline, Page, Thumbnail, useDocumentContext, useOutlineContext, usePageContext, }; ```
18 and 19 Brook Green are two Grade II listed Georgian houses at 18–19 Brook Green, Hammersmith, London, W6. The houses were built in the early 19th century. References Grade II listed buildings in the London Borough of Hammersmith and Fulham Grade II listed houses in London Houses in the London Borough of Hammersmith and Fulham Houses completed in the 19th century
Taira flavidorsalis is a spider species in the genus Taira. It is native to Japan. It was first described in 1964 as Amaurobius flavidorsalis. References Amaurobiidae
```xml import { renderHook } from '@testing-library/react-hooks'; import * as React from 'react'; import { useMenu_unstable } from './useMenu'; import { useMenuContextValues_unstable } from './useMenuContextValues'; describe('useMenuContextValues_unstable', () => { it('should return a value for "menu"', () => { const { result } = renderHook(() => { const state = useMenu_unstable({ children: <span /> }); return useMenuContextValues_unstable(state); }); expect(result.current.menu).toMatchInlineSnapshot(` Object { "checkedValues": Object {}, "hasCheckmarks": false, "hasIcons": false, "inline": false, "isSubmenu": false, "menuPopoverRef": Object { "current": null, }, "mountNode": null, "onCheckedValueChange": [Function], "open": false, "openOnContext": false, "openOnHover": false, "persistOnItemClick": false, "setOpen": [Function], "triggerId": "menu1", "triggerRef": Object { "current": null, }, } `); }); }); ```
```go package responsetransformer import ( "testing" "github.com/hellofresh/janus/pkg/plugin" "github.com/hellofresh/janus/pkg/proxy" "github.com/stretchr/testify/assert" ) func TestResponseTransformerConfig(t *testing.T) { var config Config rawConfig := map[string]interface{}{ "add": map[string]interface{}{ "headers": map[string]string{ "NAME": "TEST", }, "querystring": map[string]string{ "name": "test", }, }, } err := plugin.Decode(rawConfig, &config) assert.NoError(t, err) assert.IsType(t, map[string]string{}, config.Add.Headers) assert.Contains(t, config.Add.Headers, "NAME") } func TestResponseTransformerPlugin(t *testing.T) { rawConfig := map[string]interface{}{ "add": map[string]interface{}{ "headers": map[string]string{ "NAME": "TEST", }, }, } def := proxy.NewRouterDefinition(proxy.NewDefinition()) err := setupResponseTransformer(def, rawConfig) assert.NoError(t, err) assert.Len(t, def.Middleware(), 1) } ```
Keshabpur High School (H.S.) is a co-educational institution with instruction given in Bengali. It is in Keshabpur village, block of Domjur, District of Haora, state of West Bengal, India. It was established in 1968 and is overseen by the Department of Education Management. History Keshabpur High School is the largest school in Keshabpur village, and is under the WBCHSE. Courses taught include: Science, Arts, and Commerce. The school has been located centrally in the village. Students come from the surrounding villages including: Keshabpur, Rudrapur, Wadipur , Rongpara, Khantora, Kolora and others. There were 737 students attending as per government records. The staff numbers 30 staff. At this location is the ‘West Primary School’. The current head teacher is Haradhan Kungar. Students with financial difficulties attend on scholarship. They are currently developing a science program including a new teaching staff and a laboratory. Government support was used to construct a fourth floor seminar hall. Facilities and services they have include: a parking lot and government supported mid-day meals. See also Keshabpur References External links Keshabphur High School High schools and secondary schools in West Bengal Schools in Howrah district Educational institutions established in 1968 1968 establishments in West Bengal
Hijab is a veil worn by Muslim women. Hijab or Hidjab may also refer to: Religion Hijab, is a concept in Sufism. Types of hijab, are styles of dress. Hijab by country, is an Islamic style of dress by country. World Hijab Day, is an event to encourage women to wear the hijab. People Hijab Imtiaz Ali, is an Indian writer, editor and diarist. Riyad Farid Hijab, is a Syrian politician.
Automeris larra is a moth of the family Saturniidae. It is found in South America, including Brazil, French Guiana, Venezuela, Colombia, Peru, Bolivia and Ecuador. Subspecies Automeris larra larra Automeris larra eitschbergeri (Peru, Ecuador) External links Silk moths Hemileucinae Moths described in 1855 Moths of South America
Leizhou () is a county-level city in Guangdong Province, China. It is under the jurisdiction of the prefecture-level city of Zhanjiang. The city was formerly known as Haikang County (postal: Hoihong); it was upgraded into a city in 1994. Geography Leizhou is located at the extreme southwestern end of Guangdong and lies on the Leizhou Peninsula. Transportation China National Highway 207 Climate Notable People Mạc Cửu (1655–1731): Founder of the Principality of Hà Tiên. See also Leizhou dialect References County-level cities in Guangdong Zhanjiang
Ulick Lupede (born 1 June 1984 in Pointe-à-Pitre) is a French footballer currently under amator contract for French side C.O. Saint-Saturnin Arche. Lupede previously played for Le Mans Union Club 72 in Ligue 1 and Ligue 2. Lupede appeared in four 2010 Caribbean Cup matches to help Guadeloupe to a runner's-up finish. References 1984 births Living people French men's footballers Guadeloupean men's footballers Men's association football defenders Le Mans FC players Entente SSG players 2011 CONCACAF Gold Cup players Guadeloupe men's international footballers Tours FC players Rodez AF players Associação Naval 1º de Maio players S.C. Covilhã players
```objective-c #pragma once namespace search::tensor { class DistanceConverter { public: virtual ~DistanceConverter() = default; /** * Convert threshold (external distance units) to internal units. */ virtual double convert_threshold(double threshold) const noexcept = 0; /** * Convert internal distance to rawscore (also used as closeness). */ virtual double to_rawscore(double distance) const noexcept = 0; /** * Convert rawscore to external distance. * Override this when the rawscore is NOT defined as (1.0 / (1.0 + external_distance)). */ virtual double to_distance(double rawscore) const noexcept { return (1.0 / rawscore) - 1.0; } /** * The minimum rawscore (also used as closeness) that this distance function can return. */ virtual double min_rawscore() const noexcept { return 0.0; } }; } ```
```xml import { defineMessages } from 'react-intl'; export const messages = defineMessages({ androidAppButtonUrl: { id: 'voting.info.androidAppButtonUrl', defaultMessage: '!!!path_to_url description: '"androidAppButtonUrl" for the Catalyst voting app', }, appleAppButtonUrl: { id: 'voting.info.appleAppButtonUrl', defaultMessage: '!!!path_to_url description: '"appleAppButtonUrl" for the Catalyst voting app', }, }); ```
```c++ // Boost.Geometry (aka GGL, Generic Geometry Library) // Unit Test // Parts of Boost.Geometry are redesigned from Geodan's Geographic Library // (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands. // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // path_to_url #include <boost/geometry/core/cs.hpp> #include <boost/geometry/geometries/concepts/check.hpp> struct ro_point { float x, y; }; struct rw_point { float x, y; }; namespace boost { namespace geometry { namespace traits { template <> struct tag<ro_point> { typedef point_tag type; }; template <> struct coordinate_type<ro_point> { typedef float type; }; template <> struct coordinate_system<ro_point> { typedef cs::cartesian type; }; template <> struct dimension<ro_point> { enum { value = 2 }; }; template <> struct access<ro_point, 0> { static float get(ro_point const& p) { return p.x; } }; template <> struct access<ro_point, 1> { static float get(ro_point const& p) { return p.y; } }; template <> struct tag<rw_point> { typedef point_tag type; }; template <> struct coordinate_type<rw_point> { typedef float type; }; template <> struct coordinate_system<rw_point> { typedef cs::cartesian type; }; template <> struct dimension<rw_point> { enum { value = 2 }; }; template <> struct access<rw_point, 0> { static float get(rw_point const& p) { return p.x; } static void set(rw_point& p, float value) { p.x = value; } }; template <> struct access<rw_point, 1> { static float get(rw_point const& p) { return p.y; } static void set(rw_point& p, float value) { p.y = value; } }; }}} // namespace bg::traits int main() { boost::geometry::concepts::check<const ro_point>(); boost::geometry::concepts::check<rw_point>(); } ```
Manganese germanide (MnGe) is an intermetallic compound, a germanide of manganese. Its crystals have a cubic symmetry with no inversion center, they are therefore helical, with right-hand and left-handed chiralities. Magnetism At low temperatures, MnGe and its relative MnSi exhibit unusual spatial arrangements of electron spin, which were named magnetic skyrmion, tetrahedral and cubic hedgehog lattices. Their structure can be controlled not only by the Si/Ge ratio, but also by temperature and magnetic field. This property has potential application in ultrahigh-density magnetic storage devices. Synthesis MnGe crystals can be produced by processing a mixture of Mn and Ge powders at a pressure of 4–5 GPa and a temperature of 600–1000 °C for 1–3 hours. They are metastable and decompose into Mn11Ge8 and Ge upon subsequent heating to 600 °C at ambient pressure. Structure Manganese germanide is a non-stoichiometric compound where the Ge:Mn ratio often deviates from 1. The Mn3Ge5 compound is a Nowotny phase exhibiting a chimney ladder structure. It is either a semimetal or a narrow-gap semiconductor. References Manganese compounds Germanides Iron monosilicide structure type
```c++ ////////////////////////////////////////////////////////////////////////////// // // LICENSE_1_0.txt or copy at path_to_url // // See path_to_url for documentation. // ////////////////////////////////////////////////////////////////////////////// #include <boost/container/flat_map.hpp> struct empty { friend bool operator == (const empty &, const empty &){ return true; } friend bool operator < (const empty &, const empty &){ return true; } }; template class ::boost::container::flat_map<empty, empty>; template class ::boost::container::flat_multimap<empty, empty>; int main() { ::boost::container::flat_map<empty, empty> dummy; ::boost::container::flat_multimap<empty, empty> dummy2; (void)dummy; (void)dummy2; return 0; } ```
Chiara Ingrao (born 25 April 1949) is an Italian politician. Chiara Ingrao was the daughter of Pietro Ingrao and his wife . She was born in Rome on 25 April 1949. Ingrao was a translator, and has written about her experience in a metalworking union. She was elected to the Chamber of Deputies as a member of the Democratic Party of the Left, and sat on the Legislature XI, which met between 1992 and 1994. Ingrao later served as an adviser to the Italian Minister for Equal Opportunities. References 1949 births Living people 20th-century Italian women writers Deputies of Legislature XI of Italy 20th-century Italian women politicians Democratic Party of the Left politicians Italian translators Italian trade unionists Italian women trade unionists Writers from Rome Politicians from Rome Women members of the Chamber of Deputies (Italy)
```yaml static_resources: listeners: - name: listener_0 address: socket_address: protocol: TCP address: 0.0.0.0 port_value: 10000 filter_chains: - filters: - name: envoy.filters.network.http_connection_manager typed_config: "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager stat_prefix: ingress_http route_config: name: local_route virtual_hosts: - name: local_service domains: ["*"] routes: - match: prefix: "/" route: host_rewrite_literal: upstream.com cluster: upstream_com http_filters: - name: envoy.filters.http.checksum typed_config: "@type": type.googleapis.com/envoy.extensions.filters.http.checksum.v3alpha.ChecksumConfig reject_unmatched: true checksums: - path_matcher: exact: /path/to/hashed.asset sha256: your_sha256_hash - path_matcher: exact: /path/to/other-hashed.asset sha256: your_sha256_hash - name: envoy.filters.http.router typed_config: "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router clusters: - name: upstream_com type: LOGICAL_DNS # Comment out the following line to test on v6 networks dns_lookup_family: V4_ONLY lb_policy: ROUND_ROBIN load_assignment: cluster_name: service_upstream_com endpoints: - lb_endpoints: - endpoint: address: socket_address: address: upstream.com port_value: 443 transport_socket: name: envoy.transport_sockets.tls typed_config: "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext sni: upstream.com ```
```graphql type Model1 { data: String! } ```
Holroyd River is a locality in the Shire of Cook, Queensland, Australia. In the , Holroyd River had a population of 0 people. Geography The Archer River forms a small part of the northern boundary. References Shire of Cook Localities in Queensland
```yaml name: Zello description: 'Dispatch Hub is a communication solution between dispatchers and drivers.' website: 'path_to_url category: Business keywords: - ptt - 'push to talk' - dispatch - communication - dispatcher - driver - courier - fleet - call ```
Georg Wahl (Kosel, Upper Silesia, 21 February 1920 – 4 November 2013) was Chief Rider at the Spanish Riding School in Vienna, dressage instructor, rider and trainer. He was also known as the coach and trainer of Swiss Olympic medalist Christine Stückelberger. He grew up at the Kosel Stud, where his father was a trainer of coach horses. Wahl learned to ride at an early age, mainly bareback. In 1939, Wahl joined the cavalry of the Wehrmacht, where he was under the command of Hans-Joachim Köhler from Verden. Köhler, who later became known worldwide as an expert in the field of horse training, organised a dressage competition for his troops to improve their morale. Held only 20 km behind the front lines, Wahl showed himself to be the best of the riders present. Alois Podhajsky happened to attend this competition, and in 1940, Podhajsky recruited Wahl to the Spanish Riding School. Because Wahl possessed extraordinary talent and feeling for horses he advanced quickly at the school and within a very short time participated in the School's public performances. From 1938 until 1944 he served as a Rider (Bereiter) at the School. After a brief period of time away from the School in 1944 to serve at the Russian front in Hungary, and a brief wartime imprisonment, he returned to the Spanish Riding School. On one of the Tours of the School in Switzerland, Wahl met Fredy Knie, of the famed Circus Knie and left the School in 1951 to work for Knie. For two years, Knie and Wahl performed a widely known Pas de Deux exhibition. In 1955 Wahl took over the Community Riding School (Stadtreitschule) in Bern, Switzerland where he first met Christine Stückelberger. In 1967 Hans Handler, who succeeded Podhajsky as director of the Spanish Riding School, rehired Wahl, promoting him to Chief Rider (Oberbereiter), where he remained until 1971. Upon his return to Switzerland in 1971, Wahl devoted himself to the training and promotion of Stückelberger and her Holsteiner gelding Granat. Wahl and Stückelberger eventually became life partners. The horse and rider team of Stückelberger and Granat made dressage history as the most successful performers in international Dressage competition for the nation of Switzerland. They won team silver at the Dressage World Cup in Copenhagen 1974, individual gold at the 1975 European Dressage Championships, and an Equestrian individual gold at the 1976 Montreal Olympics. Following their Olympic win, they took gold again at the 1977 European Dressage Championships and at the 1978 Dressage World Championships in Goodwood. Georg Wahl was married twice. He and his first wife, Maria, had four children. He also had two daughters from a second marriage. He lived with Stückelberg in Kirchberg near St. Gallen. He remained committed to the classical theory of dressage, and values the preservation and dissemination of classical horsemanship. Footnotes References Dressage trainers Austrian dressage riders Spanish Riding School German emigrants to Austria People from Kędzierzyn-Koźle 1920 births 2013 deaths
National Institute of Technology Warangal (NIT Warangal or NITW) is a public technical and research university located in Warangal, India. It is recognised as an Institute of National Importance by the Government of India. The foundation stone for this institute was laid by then Prime Minister Jawaharlal Nehru on 1959, the first in the chain of 31 NITs (formerly known as RECs) in the country. The institute was renamed as the National Institute of Technology, Warangal in 2002. History The Regional Engineering College, Warangal was the first to be established (in 1959) among the chain of 15 Regional Engineering Colleges in the country. The approval of the Government of India to establish one of the RECs meant for the southern region of Warangal was conveyed to the State Government through its letter no .F 11- 5/ 58- T. 5 dated 30 January 1959. The Government of Andhra Pradesh through G. O. Ms. No. 2440 (Education Department) dated 15 July 1959 constituted the first board of Governors of the Regional Engineering College, Warangal, appointing Dr. D.S. Reddy (then Vice-Chancellor of Osmania University) as the chairman. The foundation stone for the college was laid by Pandit Jawaharlal Nehru on 10 October 1959 in Kazipet. The classes for the first batch of students in Civil, Electrical and Mechanical Engineering branches commenced on 12 September 1959 in temporary sheds erected in the Balasamudram area of Hanamkonda. Later temporary sheds were also erected in the Industrial Colony of Warangal for conducting lecture classes. Laboratory classes and workshop classes were conducted in the Government Polytechnic, Warangal in the initial stages. The conduction of classes on the premises of the permanent site in Kazipet commenced in January 1963. The bachelor's programme in Chemical Engineering was initiated in 1964. The following year saw the start of the Metallurgical Engineering programme. In 1971, the college started the undergraduate program in Electronics and Communication Engineering. BTech in Computer Science & Engineering was started in 1983 and MTech in Computer Science & Engineering in 1987. In 2006, the institute started a bachelor's program in Biotechnology. In 1976, its affiliation was changed from Jawaharlal Nehru Technological University to Kakatiya University, with which it remained affiliated until the institute was granted deemed university status in 2002. In 1994, the institute was selected by the Overseas Development Administration of the UK for exchange programs between the UK and India in the field of information technology. This program ran till 1999. In 2000, the institute started its MBA program. In 2001, a centre of Software Technology Parks of India was opened in its campus. In September 2002, the college was renamed as National Institute of Technology and was given deemed university status. NITW was given the status of Institute of National Importance (INI) on 15 August 2007. Campus Dr VA Sastry Centre for Innovation and Incubation NIT Warangal has a centre established with the objective of providing laboratory space for start-up industries. Future Now Innosoft (p) ltd and Sky e it solutions have started their activities in this centre. The Lakshya Foundation, an alumni-led organization, also has its office set up here. M/s Infosys (p) ltd. funded a research project which is carried out in this centre by the faculty and students of NIT Warangal. CUSMAT, A Virtual Reality-based startup also has emerged at the Centre for Innovation and Incubation, NIT Warangal. It also houses the Web and Software Development Cell (WSDC), a team of students which develop the institute website, semester registrations, online feedback, online attendance, online mess and hostel allotment (OMAHA) among many other things. Center For Innovation And Incubation The Innovation Garage is a 24x7 student-run multidisciplinary workspace for innovation. This innovators space provides students access to the latest gadgets, tools and technology devices. It is a joint initiative of the institute and Lakshya Foundation. Other facilities State Bank of India is situated beside the campus main gate and has two ATMs – one located in the bank premises and the other near the Sports Stadium. The institute has one shopping centre which caters to the needs of the students and residents. The campus has a post office located near Viswesvraya Hall. The NITW campus comes under a separate postal zone and it is a postal delivery office. The dispensary has an X-ray machine, an ECG and a pathology laboratory with equipment like an electronic BP apparatus with pulse reader, a mini lab, an electronic binocular microscope, and a sterilization oven. Academics Admission Bachelor of Technology admissions for Indian students are based on the Joint Entrance Examination (JEE – Main). Foreign students are accepted through Direct Admission of Students Abroad (DASA) and ICCR schemes. MTech students are admitted through the Graduate Aptitude Test in Engineering (GATE). MCA students are admitted through the NIT MCA Common Entrance Test (NIMCET). Admissions to the MBA program is based on Common Admission Test (CAT) or Management Aptitude Test (MAT) scores, and short listed candidates undergo group discussion or a personal interview for the final selection. MSc and MSc (Tech.) students are admitted through the National Institute of Technology Warangal Entrance Test (NITWET). Admissions in various MSc courses is also done on the basis of marks scored in JAM(Joint Admission test for MSc) through CCMN. NIT Warangal invites applications for PhD degree admissions in almost all departments twice every academic year, in July and December. Rankings NIT Warangal was ranked 21st among engineering colleges in India by the National Institutional Ranking Framework (NIRF) in 2023 and 53rd overall. Student life Festivities NIT Warangal holds technical and cultural events throughout the year. Major annual events include Technozion (technical fest), SpringSpree (cultural fest) and Cura (management fest). The event called Zero Gravity is held every year, which is an interbranch competition in cultural events. Technozion Technozion is a three-day annual technical symposium organized by the student fraternity of the National Institute of Technology, Warangal, and is aimed at providing a platform for students across India to assemble, interact, and share knowledge in various fields of science and technology. Started in 2006, it has a footfall of over 6000 students. It is a collection of events, initiatives, workshops, guest lectures, and exhibitions. Its name comes from "techno" for technology and "Zion" meaning the promised land. Springspree Springspree is an annual cultural festival of the National Institute of Technology, Warangal. It is organized by the student fraternity of NIT Warangal. This cultural fest has a footfall of around 10,000 and participation of around 600 colleges. It started in 1978. CURA Cura is a momentous management event organised by the students of School of Management, NIT Warangal. Cura signifying "Thoughtfulness" is a platform that started to unleash the potential of the management aspirants all over India. The aim of the event is to elucidate the major business activities through different events thus to elicit the diverse resp onses from the rapt and admiring students of management.Event is a beacon of light for all those who can balance and blend their skills with palatable and innovative ideas accompanied with verve. Notable alumni Padma Kuppa, State Representative of Michigan's 41st House of Representatives district in the United States Pushmeet Kohli, Head of Research at Google DeepMind, highly cited researcher in machine learning and computer vision Sujatha Gidla, Author Siva S. Banda, Director of Control Science Center of Excellence and Chief Scientist for Air Vehicles Directorate at United States Air Force Research Laboratory; elected to National Academy of Engineering; President's Award for Distinguished Federal Civilian Service in 2010 Lalit Goel, Professor of Electrical Engineering, Nanyang Technological University V. V. Lakshminarayana, former Joint Director for India's Central Bureau of Investigation Kavuri Sambasiva Rao, Member of Parliament, India (5th term, 8th, 9th, 12th, 14th, 15th Lok Sabha Madhura Sreedhar Reddy, director, producer, Telugu cinema S.P.Y. Reddy, Member of Parliament, India (2nd term, 14th, 15th Lok Sabha) Rao Remala, first Indian employee of Microsoft Ramajogayya Sastry, film lyricist, winner of Filmfare Award for Best Lyricist – Telugu Biswanath Rath, film director, screenwriter, editor and producer Nambala Keshava Rao, General Secretary of Communist Party of India (Maoist) Sudhir Kumar Mishra, Director General of the Defence Research & Development Organisation (DRDO) Arvind Kumar, CEO of Dukes India Kartikeya Gummakonda, Actor Sadanala Ramakrishna, Maoist Shreya Dhanwanthary, Actress K Gowri Shankar, Group Director of G&G Group of Companies Dr. Chirantan Muliya, Founder of Baba Chatuanand foundation See also List of educational institutions in Telangana List of institutions of higher education in Telangana List of National Institutes of Technology in India References External links National Institutes of Technology Universities and colleges established in 1959 Universities and colleges in Telangana Engineering colleges in Telangana Education in Warangal University departments in India Business schools in Telangana All India Council for Technical Education 1959 establishments in Andhra Pradesh
```swift // // SplitViewController.swift // CleanseGithubBrowser // // Created by Mike Lewis on 6/12/16. // import UIKit /// Common base class for UISplitViewController that has better default constructors class SplitViewController : UISplitViewController { init<RootVC: UIViewController>(masterViewController: RootVC) where RootVC: UISplitViewControllerDelegate { super.init(nibName: nil, bundle: nil) self.delegate = masterViewController self.viewControllers = [UINavigationController(rootViewController: masterViewController)] } @available(*, unavailable) required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } } ```
USS Mansfield (DD-728), was an of the United States Navy Namesake Duncan Mansfield was born in February 1778 at Albany, New York. He enlisted in the United States Marine Corps at Philadelphia, Pennsylvania on 11 August 1798 and served until 1805. While serving on the schooner during the First Barbary War, Sergeant Mansfield volunteered for the cutting‑out expedition led by Lt. Stephen Decatur, Jr., 16 February 1804. Lieutenant Decatur and his 84‑man crew sailed ketch , disguised as an Arab ship, into Tripoli Harbor to destroy the recently captured U.S. frigate and prevent her use against the United States. The name Mansfield was canceled for DD-594 and reassigned to DD-728 on 26 July 1943. DD-594 was renamed on 21 March 1944 before launching. Initial operations Mansfield was laid down 28 August 1943 by the Bath Iron Works Corp., Bath, Maine; launched 29 January 1944; sponsored by Mrs. Edmond F. Jewell; and commissioned 14 April 1944. After shakedown off Bermuda and further training at Norfolk and Casco Bay, Maine, Mansfield steamed via the Panama Canal for the West Coast, arriving San Diego 10 September 1944. A week later, in company with DesDiv 122, she headed for Pearl Harbor, conducting training exercises en route. After antiaircraft and shore bombardment exercises at Pearl Harbor, Mansfield and four other destroyers escorted a convoy to Ulithi. World War II There Mansfield joined TG 38.1 to screen and serve as picket during carrier strikes against central Luzon, including the Manila area. On 10 December 1944, Mansfield, with DesRon 61 in TG 38.2, again screened raids on Luzon. After several successful strikes, a sudden typhoon canceled further strikes and capsized destroyers , , and . Mansfield's task group picked up survivors and returned to Ulithi. On 30 December, Mansfield joined TG 30.1 for airstrikes against Formosa and central Luzon. Afterwards, Admiral William Halsey took the 3rd Fleet, with TG 30.1, through the Bashi Strait into the South China Sea. However, no Japanese units challenged Halsey's fleet during its 3,800 mile foray along the China coast from Hong Kong to Saigon. From 10 to 20 January 1945, 3rd Fleet aircraft battered enemy facilities and merchant ships and destroyed 112 Japanese planes. In early February 1945, Mansfield screened in TG 58.1 as carriers flew strikes against targets in the Tokyo industrial area. On 15 February Mansfield helped splash an enemy fighter closing the formation. From 17 to 23 February, TG 58.1 lent fighter support for the Iwo Jima assault, then steamed at full speed back to the Tokyo area for bombing runs on Nagoya and Kobe. As heavy weather set in, the task group retired southward, pounding enemy shore installations on Okinawa while en route to Ulithi for replenishment. From 14 March to 27 April 1945, Mansfield screened carriers during strikes against southern Kyushu, followed by sweeps against Okinawa Gunto. On 9 May 1945, her flattops again pounded Kyushu, Okinawa, and the island groups between. From 28 May, when the 5th Fleet again became the 3rd Fleet and TG 58.1 became TG 38.1, to the Japanese surrender 15 August, the destroyer operated off the Japanese homeland. Three weeks before VJ Day, Mansfield, with eight destroyers of DesRon 61, conducted a daring high‑speed torpedo run into Nojima Saki, sinking or damaging four enemy ships. After witnessing the formal Japanese surrender ceremony (alongside the USS Missouri) in September in Tokyo Bay, Mansfield returned to the West Coast. During the postwar years, the combat veterans trained reservists from the West Coast and made annual cruises to WestPac as part of the Destroyer Force, Pacific Fleet. Korean War On 27 June 1950, two days after the North Korean invasion of South Korea, Mansfield steamed from Sasebo, Japan, to South Korea to provide gunfire support and escort services. Three months later, as flagship for DesDiv 91, she led the division into Inchon Channel, openly inviting shore batteries to unmask themselves. After the shore opened up upon her, Mansfield smothered them with a 5‑inch bombardment; she suffered no damage or casualties in the action. Two weeks after Inchon, Mansfield, while searching for a downed Air Force B‑26, struck a mine which severed the bow below the main deck and seriously injured 27 crewmembers. Receiving a stub bow at Subic Bay, she steamed to Naval Shipyard, Bremerton, Washington for repairs; rejoining the U.N. Fleet off South Korea late in 1951 for gunfire support, escort, and shore bombardment duty. After Korea, Mansfield alternated between duty in WestPac and training West Coast reservists. Overhauled in the fall of 1955 at the Naval Shipyard, Long Beach, California, she returned there in 1960 for FRAM. The Mark II overhaul and conversion replaced her 3-inch 50 cal. battery with Mark 25 and Mark 32 antisubmarine torpedo batteries, and configured the aft superstructure for DASH. From October 1960 to October 1961, the "new" destroyer conducted training exercises with the 1st Fleet off the West Coast. For the following 3 years, home ported at Yokosuka, she provided escort service for the 7th Fleet's Fast Carrier Attack Force. Vietnam To be home ported at Long Beach, the destroyer returned to the United States in June 1964. On 20 August 1965 Mansfield again sailed westward for duty with the 7th Fleet. For the next 6 months she carried out screening and plane guard duties with fast carriers, and provided gunfire support for South Vietnamese, Australian, and American forces fighting in South Vietnam. In June 1966, Mansfield was once again assigned Yokosuka Naval Base as her homeport, after which her deployment schedule repeatedly took her back to the South China Sea for operations off the coast of Vietnam. Excluding 2 weeks in September with TF 130 as an alternate recovery ship for Gemini XI and 2 weeks in late November as station ship at Hong Kong, she spent the remainder of 1966 off the Vietnamese coast in roles which ranged from blockade patrol in the I Corps area and the interdiction of junk and sampan traffic from the north into South Vietnam, to gunfire support south of Saigon. Adding air‑sea rescue to her services in 1967, she continued to carry out similar missions in support of Allied operations in Vietnam from 1967 into 1970. On September 25, 1967, 0930,the Mansfield was hit by North Vietnam shore battery off Tiger Island, north of the DMZ. MM2 Richard Archer was KIA and 19 were wounded. End of career The Mansfield was decommissioned 4 February 1971. On 1 February 1974, she was officially stricken from the Navy Vessel Register. Along with USS Collett, she was sold to Argentina, 4 June 1974, and cannibalized for spare parts. Mansfield received five battle stars for World War II service, three for Korean service, and at least three for Vietnam service. See also , intended name Mansfield References External links navsource.org: USS Mansfield hazegray.org: USS Mansfield Invasions of Inchon and Wonsan remembered Allen M. Sumner-class destroyers of the United States Navy Ships built in Bath, Maine 1944 ships World War II destroyers of the United States Cold War destroyers of the United States Korean War destroyers of the United States Vietnam War destroyers of the United States Seguí-class destroyers
Sindhanur is a city and taluk headquarter of Sindhanur taluk of Raichur District in Karnataka. The river Tungabhadra covers the irrigation area by left bank canal. Most of the land in the field is composed of cultivable black soil. Paddy is cultivated using the Tungabhadra River water. Sindhanur is also known as the Paddy Granary of Raichur. With the availability of Tungabhadra river water, paddy rice is grown twice a year. Sindhanur is the place where the majority of tractor sales take place in Asia. as agricultural activities take place year-round. Sona Masuri and Basmati rice are grown in Sindhanur.Amba Matha also spelled as Amba Mutt (Kannada: ಅಂಬಾ ಮಠ) is a village near Somalapura in the Sindhanur taluk. Amba Matha is a holy place, Sri Amba Devi Temple is located in the village. Geography Sindhanur is located at . Sindhanur is a City and City Municipal Council located in Raichur District in the state of Karnataka. Residents prefer the many amenities found in Sindhanur over other district zones in Karnataka. Sindhanur city has 37,040 households and is divided into 31 wards. It has food services like Swiggy. Sindhanur city elections are held every 5 years to elect a representative of each ward. It has an average elevation of , and its area is . And also more Sufi Santa are here I would say about Son's of Hazrath Khaja Banda Nawaz Gulbarga, Hazrath Syed Shah Peer Sahab Hussaini Rh. Economy Sindhanur is a commercial center and a major focal point for the paddy industry, with its rural areas being important for paddy cultivation – it is considered the Paddy Granary of Raichur. The Sindhanur Taluk is the taluk with the most cooperative associations. Many tractor company showrooms are in Sindhanur, also automobile spare parts sales. A railway line and railway station are being constructed (in 2023). Demographics As of a 2017 Indian Census, Sindhanur had a population of 116,837 (59,029 male, 57,808 female), representing a 54.06% increase since 2011. The sex-ratio of Sindhanur city is around 994, higher than the state average of 973. Sindhanur City has an average literacy rate of 83.98%, higher than the national average of 59.5%; male literacy is 87.72%, and female literacy is 80.01%. In Sindhanur, 19.44% of the population is under 6 years of age. Majority of population speak Kannada and minor population speak different languages like Telugu, Bengali, Rajasthani, Urdu, etc. The Ambamma jathre Fair at Sindhanur is famous which is held in January of every year at Amba matha An evening fair is a special event here. Interesting thing to notice about Sindhanur Taluk is its diversity. 20% of the population is Bengali speaking(Bengali camp sindhanur). Rajasthani, Telugu, Urdu are other minor languages. The dominant religion of town is Hinduism (64.54%) with significant Muslim (32.71%) and Christian (0.46%) populations. Most of the Christians are Roman Catholic. Most of the remaining people follow Sikhism, Jainism and other religions. Religion References http://sindhanurcity.mrc.gov.in/ Cities and towns in Raichur district Taluks of Karnataka
```scss .e-app-import-content { padding-block-end : spacing(20); &__plugins-notice { margin-block-end: spacing(20); } } ```
Hugh Campbell (20 January 1911 — after 1939) was a Scottish professional footballer who played as a winger. Career Born in Glasgow, Campbell began his career with Rangers but did not make an appearance for the first team. After a spell with Clapton Orient, he joined Cardiff City where he made one appearance in a 2–1 defeat to Bristol City. He later spent time with Ballymena before finishing his professional career with Halifax Town. References 1911 births Date of death missing Footballers from Glasgow Scottish men's footballers Rangers F.C. players Leyton Orient F.C. players Cardiff City F.C. players Ballymena F.C. players Halifax Town A.F.C. players English Football League players Men's association football wingers
```html <html lang="en"> <head> <title>AArch64 Directives - Using as</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="Using as"> <meta name="generator" content="makeinfo 4.11"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="AArch64_002dDependent.html#AArch64_002dDependent" title="AArch64-Dependent"> <link rel="prev" href="AArch64-Floating-Point.html#AArch64-Floating-Point" title="AArch64 Floating Point"> <link rel="next" href="AArch64-Opcodes.html#AArch64-Opcodes" title="AArch64 Opcodes"> <link href="path_to_url" rel="generator-home" title="Texinfo Homepage"> <!-- This file documents the GNU Assembler "as". Permission is granted to copy, distribute and/or modify this document or any later version published by the Free Software Foundation; with no Invariant Sections, with no Front-Cover Texts, and with no Back-Cover Texts. A copy of the license is included in the --> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> </head> <body> <div class="node"> <p> <a name="AArch64-Directives"></a> Next:&nbsp;<a rel="next" accesskey="n" href="AArch64-Opcodes.html#AArch64-Opcodes">AArch64 Opcodes</a>, Previous:&nbsp;<a rel="previous" accesskey="p" href="AArch64-Floating-Point.html#AArch64-Floating-Point">AArch64 Floating Point</a>, Up:&nbsp;<a rel="up" accesskey="u" href="AArch64_002dDependent.html#AArch64_002dDependent">AArch64-Dependent</a> <hr> </div> <h4 class="subsection">9.1.5 AArch64 Machine Directives</h4> <p><a name="index-machine-directives_002c-AArch64-547"></a><a name="index-AArch64-machine-directives-548"></a> <dl> <!-- AAAAAAAAAAAAAAAAAAAAAAAAA --> <p><a name=your_sha256_hash></a><dt><code>.arch </code><var>name</var><dd>Select the target architecture. Valid values for <var>name</var> are the same as for the <samp><span class="option">-march</span></samp> commandline option. <p>Specifying <code>.arch</code> clears any previously selected architecture extensions. <p><a name=your_sha256_hash2c-AArch64-550"></a><br><dt><code>.arch_extension </code><var>name</var><dd>Add or remove an architecture extension to the target architecture. Valid values for <var>name</var> are the same as those accepted as architectural extensions by the <samp><span class="option">-mcpu</span></samp> commandline option. <p><code>.arch_extension</code> may be used multiple times to add or remove extensions incrementally to the architecture being compiled for. <!-- BBBBBBBBBBBBBBBBBBBBBBBBBB --> <p><a name="index-g_t_0040code_007b_002ebss_007d-directive_002c-AArch64-551"></a><br><dt><code>.bss</code><dd>This directive switches to the <code>.bss</code> section. <!-- CCCCCCCCCCCCCCCCCCCCCCCCCC --> <!-- DDDDDDDDDDDDDDDDDDDDDDDDDD --> <!-- EEEEEEEEEEEEEEEEEEEEEEEEEE --> <!-- FFFFFFFFFFFFFFFFFFFFFFFFFF --> <!-- GGGGGGGGGGGGGGGGGGGGGGGGGG --> <!-- HHHHHHHHHHHHHHHHHHHHHHHHHH --> <!-- IIIIIIIIIIIIIIIIIIIIIIIIII --> <!-- JJJJJJJJJJJJJJJJJJJJJJJJJJ --> <!-- KKKKKKKKKKKKKKKKKKKKKKKKKK --> <!-- LLLLLLLLLLLLLLLLLLLLLLLLLL --> <p><a name=your_sha256_hash2"></a><br><dt><code>.ltorg</code><dd>This directive causes the current contents of the literal pool to be dumped into the current section (which is assumed to be the .text section) at the current location (aligned to a word boundary). GAS maintains a separate literal pool for each section and each sub-section. The <code>.ltorg</code> directive will only affect the literal pool of the current section and sub-section. At the end of assembly all remaining, un-empty literal pools will automatically be dumped. <p>Note - older versions of GAS would dump the current literal pool any time a section change occurred. This is no longer done, since it prevents accurate control of the placement of literal pools. <!-- MMMMMMMMMMMMMMMMMMMMMMMMMM --> <!-- NNNNNNNNNNNNNNNNNNNNNNNNNN --> <!-- OOOOOOOOOOOOOOOOOOOOOOOOOO --> <!-- PPPPPPPPPPPPPPPPPPPPPPPPPP --> <p><a name=your_sha256_hash></a><br><dt><code>.pool</code><dd>This is a synonym for .ltorg. <!-- QQQQQQQQQQQQQQQQQQQQQQQQQQ --> <!-- RRRRRRRRRRRRRRRRRRRRRRRRRR --> <p><a name="index-g_t_0040code_007b_002ereq_007d-directive_002c-AArch64-554"></a><br><dt><var>name</var><code> .req </code><var>register name</var><dd>This creates an alias for <var>register name</var> called <var>name</var>. For example: <pre class="smallexample"> foo .req w0 </pre> <!-- SSSSSSSSSSSSSSSSSSSSSSSSSS --> <!-- TTTTTTTTTTTTTTTTTTTTTTTTTT --> <!-- UUUUUUUUUUUUUUUUUUUUUUUUUU --> <p><a name=your_sha256_hash5"></a><br><dt><code>.unreq </code><var>alias-name</var><dd>This undefines a register alias which was previously defined using the <code>req</code> directive. For example: <pre class="smallexample"> foo .req w0 .unreq foo </pre> <p>An error occurs if the name is undefined. Note - this pseudo op can be used to delete builtin in register name aliases (eg 'w0'). This should only be done if it is really necessary. <!-- VVVVVVVVVVVVVVVVVVVVVVVVVV --> <!-- WWWWWWWWWWWWWWWWWWWWWWWWWW --> <!-- XXXXXXXXXXXXXXXXXXXXXXXXXX --> <!-- YYYYYYYYYYYYYYYYYYYYYYYYYY --> <!-- ZZZZZZZZZZZZZZZZZZZZZZZZZZ --> <p><a name=your_sha256_hash6"></a><br><dt><code>.xword</code><dd>The <code>.xword</code> directive produces 64 bit values. </dl> </body></html> ```
```coffeescript cx = require 'classnames' xss = require 'xss' React = require 'react' recorder = require 'actions-recorder' Immutable = require 'immutable' PureRenderMixin = require 'react-addons-pure-render-mixin' assign = require 'object-assign' query = require '../query' mixinMessageHandler = require '../mixin/message-handler' mixinMessageContent = require '../mixin/message-content' lang = require '../locales/lang' detect = require '../util/detect' format = require '../util/format' notifyActions = require '../actions/notify' routerHandlers = require '../handlers/router' QuoteSlim = React.createFactory require './quote-slim' FileGlance = React.createFactory require './file-glance' MessageToolbar = React.createFactory require './message-toolbar' MessageAttachmentSlim = React.createFactory require './message-attachment-slim' MessageInlineEditor = React.createFactory require './message-inline-editor' RelativeTime = React.createFactory require '../module/relative-time' MessageRichSpeech = React.createFactory require '../module/message-rich-speech' a = React.createFactory 'a' div = React.createFactory 'div' span = React.createFactory 'span' strong = React.createFactory 'strong' L = lang.getText T = React.PropTypes module.exports = React.createClass displayName: 'message-slim' mixins: [mixinMessageHandler, mixinMessageContent, PureRenderMixin] propTypes: isDuplicated: T.bool isUnread: T.bool selected: T.bool showActions: T.bool onClick: T.func onFileClick: T.func.isRequired message: T.instanceOf(Immutable.Map) isEditMode: T.bool getDefaultProps: -> isDuplicated: false isUnread: false selected: false showActions: false isEditMode: false onClick: -> @props.onClick? @props.message.get('_id') renderAttachmentRTF: -> return if not @props.message.get('attachments')?.size and @props.message.get('attachments').get(0).get('category') isnt 'rtf' attachment = @props.message.get('attachments').get(0) maybeImage = detect.imageUrlInHtml attachment.data.text html = format.htmlAsText attachment.data.text textLength = html.trim().length content = format.textAsAbbr html div onClick: @onPostViewerShow, span className: 'slim-post-text', if textLength is 0 and maybeImage then L('images-only') else content renderAttachmentQuote: -> return if not @props.message.get('attachments')?.size and @props.message.get('attachments').get(0).get('category') isnt 'quote' renderMessageAuthor: -> div className: 'avator', strong ref: 'author', className: 'name', onClick: @onAuthorClick, @getAuthorName() renderMessageBody: -> div className: 'container', if @props.message.get('body')?.length > 0 @renderContent() @renderMessageAttachment() renderMessageAttachment: -> return if not @props.message.get('attachments')?.size div className: 'attachment', @props.message.get('attachments').map (attachment, index) => data = attachment.get('data') switch attachment.get('category') when 'file' FileGlance key: index, progress: attachment.get('progress'), file: data, onClick: => @props.onFileClick(attachment) when 'quote' QuoteSlim key: index, quote: data, onClick: @onQuoteRedirect when 'rtf' QuoteSlim key: index, quote: data, onClick: @onPostViewerShow when 'snippet' QuoteSlim key: index, quote: data, onClick: @onSnippetViewerShow when 'speech' MessageRichSpeech key: index source: data.get('previewUrl') duration: data.get('duration') isUnread: @props.isUnread when 'message' _roomId = attachment.getIn(['data', 'room', '_id']) _teamId = attachment.getIn(['data', '_teamId']) _messageId = attachment.getIn(['data', '_id']) onClick = -> topics = query.topicsBy(recorder.getState(), _teamId) if topics.map((room) -> room.get('_id')).includes(_roomId) routerHandlers.room _teamId, _roomId, {search: _messageId} else notifyActions.info(lang.getText('topic-not-exists')) MessageAttachmentSlim key: index message: data onClick: onClick renderMessageSide: -> div className: 'side static-line', MessageToolbar message: @props.message RelativeTime data: (@props.message.get('updatedAt') or @props.message.get('createdAt')) renderSlimBody: -> # in case of empty link title firstAttachment = @props.message.getIn(['attachments', 0]) or Immutable.Map() quoteTitle = firstAttachment.getIn(['data', 'title']) quoteText = firstAttachment.getIn(['data', 'title']) quoteContent = if quoteText then format.textAsAbbr format.htmlAsText quoteText else undefined div className: 'body', if @props.message.get('attachments')?.get(0).get('data')?.get('text').size @renderSlimPost() else if @props.message.get('attachments')?.get(0).get('data')?.get('category') is 'file' undefined else @renderContent() div className: 'actions static-line', MessageToolbar message: @props.message RelativeTime data: (@props.message.get('updatedAt') or @props.message.get('createdAt')) if firstAttachment.get('category') is 'file' FileGlance file: firstAttachment.get('data'), onClick: => @props.onFileClick(firstAttachment) if @props.message.get('attachments')?.get(0).get('category') is 'speech' MessageRichSpeech source: firstAttachment.getIn(['data', 'previewUrl']) isUnread: @props.isUnread duration: firstAttachment.getIn(['data', 'duration']) if firstAttachment.get('category') is 'quote' if firstAttachment.getIn(['data', 'redirectUrl'])? onClick = -> window.open firstAttachment.getIn(['data', 'redirectUrl']) else onClick = -> return false div className: 'quote line', onClick: onClick, if firstAttachment.getIn(['data', 'authorName'])? span className: 'author', firstAttachment.getIn(['data', 'authorName']) if firstAttachment.getIn(['data', 'title'])? span className: 'short text muted', (format.htmlAsText quoteTitle) if quoteContent div className: 'content', quoteContent renderInlineMessageEditor: -> MessageInlineEditor message: @props.message renderMessage: -> return if not @props.message? _userId = query.userId(recorder.getState()) messageReceiptData = @getMessageReceiptData() isDuplicated = @props.isDuplicated and not @props.isEditMode classMessage = cx 'message-slim', messageReceiptData.class, 'is-duplicated': isDuplicated 'be-mine': @props.message.getIn(['creator', '_id']) is _userId 'is-robot': @props.message.getIn(['creator', 'isRobot']) 'is-selected': @props.selected 'is-local': @props.message.get('isLocal') props = assign className: classMessage onClick: @onClick , messageReceiptData.props div props, unless isDuplicated @renderMessageAuthor() if @props.isEditMode @renderInlineMessageEditor() else @renderMessageBody() @renderMessageSide() @renderMemberCard() @renderPostViewer() @renderSnippetViewer() render: -> @renderMessage() ```
```java /* This library is free software; you can redistribute it and/or modify it under the terms of the GNU General Public version 2 of the license, or (at your option) any later version. */ package org.gjt.jclasslib.mdi; import org.gjt.jclasslib.util.GUIHelper; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.beans.PropertyVetoException; import java.lang.reflect.Constructor; import java.util.*; import java.util.List; import java.util.prefs.Preferences; /** Parent frame for MDI application. Handles window actions, state saving and loading and supplies various utility methods. @author <a href="mailto:jclasslib@ej-technologies.com">Ingo Kegel</a> @version $Revision: 1.1 $ $Date: 2005/11/01 13:18:24 $ */ public class BasicMDIFrame extends JFrame { private static final int DEFAULT_WINDOW_WIDTH = 800; private static final int DEFAULT_WINDOW_HEIGHT = 600; private static final String SETTINGS_WINDOW_WIDTH = "windowWidth"; private static final String SETTINGS_WINDOW_HEIGHT = "windowHeight"; private static final String SETTINGS_WINDOW_X = "windowX"; private static final String SETTINGS_WINDOW_Y = "windowY"; private static final String SETTINGS_WINDOW_MAXIMIZED = "windowMaximized"; // Actions /** Action for selecting the next child window. */ protected Action actionNextWindow; /** Action for selecting the provious child window. */ protected Action actionPreviousWindow; /** Action for tiling all child windows. */ protected Action actionTileWindows; /** Action for stacking all child windows. */ protected Action actionStackWindows; // Visual components /** <tt>JDesktop</tt> pane which contains all child windows. */ protected JScrollPane scpDesktop; /** The desktop pane. */ protected JDesktopPane desktopPane; /** <tt>DesktopManager</tt> for this MDI parent frame. */ protected BasicDesktopManager desktopManager; /** <tt>JMenu</tt> for window actions. */ protected JMenu menuWindow; private Rectangle lastNormalFrameBounds; /** Constructor. */ public BasicMDIFrame() { setupActions(); setupMenu(); setupFrame(); setupEventHandlers(); loadWindowSettings(); } /** Create a <tt>BasicDesktopManager</tt> for this MDI parent window. @return the <tt>BasicDesktopManager</tt> */ protected BasicDesktopManager createDesktopManager() { return new BasicDesktopManager(this); } /** Exit the application. */ protected void doQuit() { saveWindowSettings(); dispose(); //System.exit(0); } /** Close all internal frames. */ protected void closeAllFrames() { List frames = desktopManager.getOpenFrames(); while (frames.size() > 0) { BasicInternalFrame frame = (BasicInternalFrame)frames.get(0); frame.doDefaultCloseAction(); } } /** Create an <tt>MDIConfig</tt> object that describes the current configuration of all internal frames. This object can be serialized and reactivated with <tt>readMDIConfig</tt>. @return the <tt>MDIConfig</tt> object */ protected MDIConfig createMDIConfig() { MDIConfig config = new MDIConfig(); List openFrames = desktopManager.getOpenFrames(); List<MDIConfig.InternalFrameDesc> internalFrameDescs = new ArrayList<MDIConfig.InternalFrameDesc>(openFrames.size()); for (int i = 0; i < openFrames.size(); i++) { BasicInternalFrame internalFrame = (BasicInternalFrame)openFrames.get(i); Rectangle bounds = internalFrame.getNormalBounds(); MDIConfig.InternalFrameDesc internalFrameDesc = new MDIConfig.InternalFrameDesc(); internalFrameDesc.setClassName(internalFrame.getClass().getName()); internalFrameDesc.setInitParam(internalFrame.getInitParam()); internalFrameDesc.setX(bounds.x); internalFrameDesc.setY(bounds.y); internalFrameDesc.setWidth(bounds.width); internalFrameDesc.setHeight(bounds.height); internalFrameDesc.setMaximized(internalFrame.isMaximum()); internalFrameDesc.setIconified(internalFrame.isIcon()); if (internalFrame == desktopPane.getSelectedFrame()) { config.setActiveFrameDesc(internalFrameDesc); } internalFrameDescs.add(internalFrameDesc); } config.setInternalFrameDescs(internalFrameDescs); return config; } /** Takes an <tt>MDIConfig</tt> object that describes a configuration of internal frames and populates the MDI frame with this configuration. @param config the <tt>MDIConfig</tt> object to be read */ protected void readMDIConfig(MDIConfig config) { boolean anyFrameMaximized = false; Iterator it = config.getInternalFrameDescs().iterator(); while (it.hasNext()) { MDIConfig.InternalFrameDesc internalFrameDesc = (MDIConfig.InternalFrameDesc)it.next(); Constructor frameConstructor; try { Class frameClass = Class.forName(internalFrameDesc.getClassName()); frameConstructor = frameClass.getConstructor(getFrameConstructorArguments(frameClass)); } catch (ClassNotFoundException ex) { System.out.println("class not found:" + ex.getMessage()); continue; } catch (NoSuchMethodException ex) { System.out.println("constructor not found:" + ex.getMessage()); continue; } BasicInternalFrame frame; try { frame = (BasicInternalFrame)frameConstructor.newInstance(new Object[] {desktopManager, internalFrameDesc.getInitParam()}); } catch (Exception ex) { ex.printStackTrace(); Throwable cause = ex.getCause(); if (cause != null) { ex.printStackTrace(); } continue; } desktopManager.resizeFrame( frame, internalFrameDesc.getX(), internalFrameDesc.getY(), internalFrameDesc.getWidth(), internalFrameDesc.getHeight() ); boolean frameMaximized = internalFrameDesc.isMaximized(); anyFrameMaximized = anyFrameMaximized || frameMaximized; try { if (frameMaximized || anyFrameMaximized) { frame.setMaximum(true); } else if (internalFrameDesc.isIconified()) { frame.setIcon(true); } } catch (PropertyVetoException ex) { } if (internalFrameDesc == config.getActiveFrameDesc()) { desktopManager.setActiveFrame(frame); } } desktopManager.showAll(); } /** Get the constructor arguments classes for the constructor of the supplied frame class. @param frameClass the frame class. @return the constructor argument classes. */ protected Class[] getFrameConstructorArguments(Class frameClass) { return BasicInternalFrame.CONSTRUCTOR_ARGUMENTS; } private void setupActions() { actionNextWindow = new WindowAction("Next window"); actionNextWindow.putValue(Action.SHORT_DESCRIPTION, "Cycle to the next opened window"); actionNextWindow.setEnabled(false); actionPreviousWindow = new WindowAction("Previous window"); actionPreviousWindow.putValue(Action.SHORT_DESCRIPTION, "Cycle to the previous opened window"); actionPreviousWindow.setEnabled(false); actionTileWindows = new WindowAction("Tile windows"); actionTileWindows.putValue(Action.SHORT_DESCRIPTION, "Tile all windows in the main frame"); actionTileWindows.setEnabled(false); actionStackWindows = new WindowAction("Stack windows"); actionStackWindows.putValue(Action.SHORT_DESCRIPTION, "Stack all windows in the main frame"); actionStackWindows.setEnabled(false); } private void setupMenu() { menuWindow = new JMenu("Window"); menuWindow.add(actionPreviousWindow).setAccelerator( KeyStroke.getKeyStroke(KeyEvent.VK_F2, Event.CTRL_MASK)); menuWindow.add(actionNextWindow).setAccelerator( KeyStroke.getKeyStroke(KeyEvent.VK_F3, Event.CTRL_MASK)); menuWindow.add(actionTileWindows); menuWindow.add(actionStackWindows); } private void setupFrame() { setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); Container contentPane = getContentPane(); contentPane.setLayout(new BorderLayout(5,5)); contentPane.add(buildDesktop(), BorderLayout.CENTER); } private void setupEventHandlers() { addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent event) { doQuit(); } }); addComponentListener(new ComponentAdapter() { public void componentResized(ComponentEvent event) { desktopManager.checkResizeInMaximizedState(); recordLastNormalFrameBounds(); } public void componentMoved(ComponentEvent event) { recordLastNormalFrameBounds(); } }); } private void saveWindowSettings() { Preferences preferences = Preferences.userNodeForPackage(getClass()); boolean maximized = (getExtendedState() & MAXIMIZED_BOTH) != 0; preferences.putBoolean(SETTINGS_WINDOW_MAXIMIZED, maximized); Rectangle frameBounds = maximized ? lastNormalFrameBounds : getBounds(); if (frameBounds != null) { preferences.putInt(SETTINGS_WINDOW_WIDTH, frameBounds.width); preferences.putInt(SETTINGS_WINDOW_HEIGHT, frameBounds.height); preferences.putInt(SETTINGS_WINDOW_X, frameBounds.x); preferences.putInt(SETTINGS_WINDOW_Y, frameBounds.y); } } private void loadWindowSettings() { Preferences preferences = Preferences.userNodeForPackage(getClass()); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); Rectangle screenBounds = new Rectangle(screenSize); int windowX = preferences.getInt(SETTINGS_WINDOW_X, (int)(screenSize.getWidth() - DEFAULT_WINDOW_WIDTH)/2); int windowY = preferences.getInt(SETTINGS_WINDOW_Y, (int)(screenSize.getHeight() - DEFAULT_WINDOW_HEIGHT)/2); int windowWidth = preferences.getInt(SETTINGS_WINDOW_WIDTH, DEFAULT_WINDOW_WIDTH); int windowHeight = preferences.getInt(SETTINGS_WINDOW_HEIGHT, DEFAULT_WINDOW_HEIGHT); Rectangle frameBounds = new Rectangle(windowX, windowY, windowWidth, windowHeight); // sanitize frame bounds frameBounds.translate(-Math.min(0, frameBounds.x), -Math.min(0, frameBounds.y)); frameBounds.translate(-Math.max(0, frameBounds.x + frameBounds.width - screenSize.width), -Math.max(0, frameBounds.y + frameBounds.height- screenSize.height)); frameBounds = screenBounds.intersection(frameBounds); setBounds(frameBounds); if (preferences.getBoolean(SETTINGS_WINDOW_MAXIMIZED, false)) { setExtendedState(MAXIMIZED_BOTH); } } private void recordLastNormalFrameBounds() { if ((getExtendedState() & MAXIMIZED_BOTH) == 0) { Rectangle frameBounds = getBounds(); if (frameBounds.getX() >= 0 && frameBounds.getY() >= 0) { lastNormalFrameBounds = frameBounds; } } } private JComponent buildDesktop() { desktopPane = new JDesktopPane(); desktopManager = createDesktopManager(); desktopPane.setDesktopManager(desktopManager); scpDesktop = new JScrollPane(desktopPane); GUIHelper.setDefaultScrollbarUnits(scpDesktop); return scpDesktop; } private class WindowAction extends AbstractAction { private WindowAction(String name) { super(name); } public void actionPerformed(ActionEvent ev) { if (this == actionPreviousWindow) { desktopManager.cycleToPreviousWindow(); } else if (this == actionNextWindow) { desktopManager.cycleToNextWindow(); } else if (this == actionTileWindows) { desktopManager.tileWindows(); } else if (this == actionStackWindows) { desktopManager.stackWindows(); } } } } ```
Jadid Khan Pathan (born 6 January 1989) is a Pakistani professional footballer, who plays for Sui Southern Gas as a midfielder. He has also played for Pakistan. He earned his first international cap during the 2008 SAFF Cup against the Maldives. Early life Khan was born in Chaman, Balochistan. He spent majority of his career playing for his hometown club Afghan Chaman, where he was the team captain from 2008 to 2011. Club career Afghan Chaman 2007–08 Khan made his debut for Afghan Chaman in 2007–08 season. Khan spent most of his first season as a substitute, he scored his first goal for the club on 3 December 2007, scoring the opening goal in 7th minute in a 2–2 draw against Pakistan Navy. Khan's second goal was also against Pakistan Navy in a reverse fixture on 11 January 2008, with match ending with the similar scoreline of 2–2, with Khan scoring the equaliser on 26th minute after Muhamamd Shahzad gave lead to Pakistan Navy from the spot in 20th minute. Khan scored three goals in his debut season in 15 appearances with his last goal coming against Karachi Electric Supply Corporation in a 2–0 victory. 2008–09 Khan was made the captain of the team after one year of joining. Khan scored his first goal of the 2007–08 season 2 months after the start of the season, scoring the opening goal in 51st minute against Karachi Port Trust in a 2–0 victory. Khan's second goal of the season was against Pakistan Navy in a 3–1 loss, where he scored the first goal of the game in 38th minute. On 22 November 2008, Khan his first senior hat-trick against Pakistan Television completing his hat-trick in 15 minutes, scoring his first goal in 61 minute, second in 67th minute and third in 76th minute. Khan ended his 2008–09 season with five goals in 26 appearances. 2009–10 Khan scored only one goal in his 2009–10 against Karachi Electric Supply Corporation in a 3–0 victory on 2 October 2009. 2010–11 After a bad season, Khan started the 2010–11 with an assist and a goal in the game against Karachi Electric Supply Corporation, assisting Moaz Khan for the opening goal in 37th and then scoring in 46th minute from Abdul Nasir's assist. Khan assisted Fazal Rehman to score the equaliser against Pak Elektron. On 14 October 2010, Khan scored his second career hat-trick in a 3–0 away win over Young Blood, scoring the goals in 70th, 81st and 87th minutes. Khan scored his seventh goal of the season against Karachi Port Trust in a 2–2 draw. Khan's eight goal of the season was opener against Habib Bank in a 1–1 draw on 4 November 2010. He provided the assist for Abdul Hadi's consolation goal against Sui Southern Gas in a 2–1 loss. On 10 November 2010, Khan scored his ninth goal of the campaign in a 4–1 win over National Bank, finding the net in 76th minute. Khan ended the campaign with 10 goals in 30 appearances. 2011–12 Khan scored in the opening match of 2011–12 season against PMC Athletico in 13th minute in a 3–0 win. Khan scored his second goal of the season against Pakistan Navy when dribbled past four defenders to score a solo goal in the 38th minute, causing home crowd to shower the player with cash prizes. Khan went on a one and half month goal drought before scoring a brace against Karachi Electric Supply Corporation on 18 September 2011, finding the net 76th minutes to reduce the deficit to 2–1, before scoring his in the 90th minute to draw the game 2–2. Four days later Khan scored the winner against National Bank in 58th minute. Khan scored his brace of the season in a reverse fixture against Karachi Electric Supply Corporation, against whom he scored a brace earlier in season. Khan found the net 39th and 56th minute in a 3–2 win. On 13 December 2011, Khan scored his third career hat-trick away from home against Baloch Nushki in a 1–3 win. Khan scored the equaliser in 30th minute before twice finding the net again on 48th and 76th minute. Three days later Khan scored four goals against Pakistan Police in a 6–0 victory. Khan scored his first goal in 13th minute, second in 50th, third in 56th and fourth in 65th minute. Khan won the golden boot after finishing the season as the top-scorer with 22 goals in 30 appearances, giving his team their highest position ever when they finished second in the league. 2012–13 Khan started the 2012–13 season with a goal in the opening match against local rivals Muslim in a 1–1 draw. Khan scored his third goal against title contenders WAPDA in a 1–1 draw, scoring the goal in 9th minute of the game. Khan then scored against Pakistan Airlines in a narrow 1–0 victory. Khan failed to continue to his good form from last season and ended the league season with 6 goals in 25 appearances. WAPDA 2013 During the 2012–13 season Khan joined WAPDA after the end of the league, in 2013 National Football Challenge Cup, Khan scored in the opening game against Sui Southern Gas in 5th minute, Khan completed his brace at 58th minute. Khan then scored against Karachi Electric Supply Corportion in a 3–1 victory in the cup competition. Khan reached the semi-finals where they were knocked-out by eventual winners National Bank. Afghan Chaman Second Spell: 2013–2016 Khan returned to his hometown club Afghan Chaman for the 2013–14 season. Khan opened his campaign by scoring in a 2–2 draw against newly promoted Pak Afghan Clearing. A month later, Khan scored his second goal in a 4–3 defeat to Karachi Port Trust, scoring in 29th minute. On 27 November 2013, Khan scored the brace against Karachi Port Trust in a 3–2 victory, avenging the last week's 4–3 to the Portmen. Khan scored the goals in 19th and 76th minute. Khan ended the season with 10 goals in 30 appearances. Khan scored a brace against Baloch Quetta in 5–1 win in the 2014–15 campaign, finding the net in the 20th and 66th minute. WAPDA Second spell: 2016 After no footballing activity in Pakistan since the conclusion of 2014–15 Pakistan Premier League, Khan once again joined WAPDA for 2016 PFF Cup as Afghan Chaman were not participating the national cup competition. Khan scored a hat-trick against Pakistan Navy in the group-stage as WAPDA defeated Pakistan Navy 5–1. Khan found the scored in 12th, 74th and 86th minute. Khan and WAPDA were knocked out in quarter-finals after Khan Research Laboratories defeated them 3–1. Sui Southern Gas 2018– present Khan joined newly promoted Sui Southern Gas after getting frustrated with life at Afghan Chaman. He was signed by former national team coach Tariq Lutfi. International career On 6 April 2009, Khan scored his first goal for Pakistan in 2010 AFC Challenge Cup qualification against Brunei in a 6–0 win at Sugathadasa Stadium, Colombo. Khan scored the second goal of the match in 31st minute. Khan also played for his country in a 1-1 draw against Nepal in the 2011 SAFF where he was substituted in the beginning of the first half, Pakistan finished third in Group B. Furthermore, he also played in 2012 AFC Challenge Cup qualifications, he was substituted off against Chinese Taipei on 38th minute due to injury. Khan was part of the national squad in 2014 AFC Challenge Cup qualifications, as an unused substitution throughout the tournament. Career statistics Club International International goals ''As of match played 21 March 2013. Pakistan score listed first, score column indicates score after each Khan goal. References External links Pakistani men's footballers Pakistan men's international footballers Pashtun footballers Sportspeople from Balochistan, Pakistan Afghan FC Chaman players Living people 1989 births Men's association football midfielders SSGC F.C. players People from Killa Abdullah District
```xml // empty file (fakes no controllers) ```
Dhapuk Simal Bhanjyang is a village development committee in Syangja District in the Gandaki Zone of central Nepal. At the time of the 1991 Nepal census it had a population of 3666 people living in 835 individual households. References External links UN map of the municipalities of Syangja District Populated places in Syangja District
```m4sugar dnl GAS_CHECK_DECL_NEEDED(name, typedefname, typedef, headers) AC_DEFUN([GAS_CHECK_DECL_NEEDED],[ AC_MSG_CHECKING(whether declaration is required for $1) AC_CACHE_VAL(gas_cv_decl_needed_$1, AC_TRY_LINK([$4], [ typedef $3; $2 x; x = ($2) $1; ], gas_cv_decl_needed_$1=no, gas_cv_decl_needed_$1=yes))dnl AC_MSG_RESULT($gas_cv_decl_needed_$1) if test $gas_cv_decl_needed_$1 = yes; then AC_DEFINE([NEED_DECLARATION_]translit($1, [a-z], [A-Z]), 1, [Define if $1 is not declared in system header files.]) fi ])dnl dnl dnl Some non-ANSI preprocessors botch requoting inside strings. That's bad dnl enough, but on some of those systems, the assert macro relies on requoting dnl working properly! dnl GAS_WORKING_ASSERT AC_DEFUN([GAS_WORKING_ASSERT], [AC_MSG_CHECKING([for working assert macro]) AC_CACHE_VAL(gas_cv_assert_ok, AC_TRY_LINK([#include <assert.h> #include <stdio.h>], [ /* check for requoting problems */ static int a, b, c, d; static char *s; assert (!strcmp(s, "foo bar baz quux")); /* check for newline handling */ assert (a == b || c == d); ], gas_cv_assert_ok=yes, gas_cv_assert_ok=no))dnl AC_MSG_RESULT($gas_cv_assert_ok) test $gas_cv_assert_ok = yes || AC_DEFINE(BROKEN_ASSERT, 1, [assert broken?]) ])dnl dnl dnl Since many Bourne shell implementations lack subroutines, use this dnl hack to simplify the code in configure.in. dnl GAS_UNIQ(listvar) AC_DEFUN([GAS_UNIQ], [_gas_uniq_list="[$]$1" _gas_uniq_newlist="" dnl Protect against empty input list. for _gas_uniq_i in _gas_uniq_dummy [$]_gas_uniq_list ; do case [$]_gas_uniq_i in _gas_uniq_dummy) ;; *) case " [$]_gas_uniq_newlist " in *" [$]_gas_uniq_i "*) ;; *) _gas_uniq_newlist="[$]_gas_uniq_newlist [$]_gas_uniq_i" ;; esac ;; esac done $1=[$]_gas_uniq_newlist ])dnl sinclude(../libtool.m4) dnl The lines below arrange for aclocal not to bring libtool.m4 dnl AM_PROG_LIBTOOL into aclocal.m4, while still arranging for automake dnl to add a definition of LIBTOOL to Makefile.in. ifelse(yes,no,[ AC_DEFUN([AM_PROG_LIBTOOL],) AC_DEFUN([AC_CHECK_LIBM],) AC_SUBST(LIBTOOL) ]) sinclude(../gettext.m4) ifelse(yes,no,[ AC_DEFUN([CY_WITH_NLS],) AC_SUBST(INTLLIBS) ]) ```
```swift import SwiftSyntax import SwiftParser enum ComplexType { indirect case attributed(attributes: [String], baseType: ComplexType) indirect case optional(wrappedType: ComplexType, isImplicit: Bool) indirect case array(elementType: ComplexType) indirect case dictionary(keyType: ComplexType, valueType: ComplexType) case closure(Closure) case type(String) init(syntax: TypeSyntax) { if let implicitOptionalType = syntax.as(ImplicitlyUnwrappedOptionalTypeSyntax.self) { self = .optional(wrappedType: ComplexType(syntax: implicitOptionalType.wrappedType), isImplicit: true) } else if let optionalType = syntax.as(OptionalTypeSyntax.self) { self = .optional(wrappedType: ComplexType(syntax: optionalType.wrappedType), isImplicit: false) } else if let attributedType = syntax.as(AttributedTypeSyntax.self) { self = .attributed( attributes: [ attributedType.attributes.map { $0.trimmedDescription }, attributedType.specifier.map { [$0.trimmedDescription] } ?? [], ].flatMap { $0 }, baseType: ComplexType(syntax: attributedType.baseType) ) } else if let functionType = syntax.as(FunctionTypeSyntax.self) { self = .closure( Closure( parameters: functionType.parameters.map { Closure.Parameter( label: $0.secondName?.trimmedDescription, type: ComplexType(syntax: $0.type) ) }, effects: Closure.Effects(effectSpecifiers: functionType.effectSpecifiers), returnType: ComplexType(syntax: functionType.returnClause.type) ) ) } else if let identifierType = syntax.as(IdentifierTypeSyntax.self) { switch identifierType.filteredDescription { case "Dictionary": let arguments = identifierType.genericArgumentClause?.arguments guard let keyType = arguments?.first?.argument, let valueType = arguments?.last?.argument else { fatalError("Cuckoo error: Failed to get Dictionary type, please open an issue.") } self = .dictionary( keyType: ComplexType(syntax: keyType), valueType: ComplexType(syntax: valueType) ) case "Array": let arguments = identifierType.genericArgumentClause?.arguments guard let elementType = arguments?.first?.argument else { fatalError("Cuckoo error: Failed to get Array type, please open an issue.") } self = .array(elementType: ComplexType(syntax: elementType)) default: self = .type(syntax.trimmedDescription) } } else { self = .type(syntax.trimmedDescription) } } // Can't use value type here. final class Closure { let parameters: [Parameter] let effects: Effects let returnType: ComplexType var description: String { let parametersString = parameters.map { parameter in [ parameter.label.map { "_ \($0)" }, parameter.type.description, ] .compactMap { $0 } .joined(separator: ": ") } .joined(separator: ", ") return [ "(\(parametersString))", effects.description, "-> \(returnType.description)", ] .compactMap { $0.trimmed.nilIfEmpty } .joined(separator: " ") } init(parameters: [Parameter], effects: Effects, returnType: ComplexType) { self.parameters = parameters self.effects = effects self.returnType = returnType } struct Parameter: Equatable { let label: String? let type: ComplexType } struct Effects: OptionSet, Equatable { let rawValue: Int static let `async` = Effects(rawValue: 1 << 0) static let `throws` = Effects(rawValue: 1 << 1) static let none: Effects = [] var description: String { [ contains(.async) ? "async" : nil, contains(.throws) ? "throws" : nil, ] .compactMap { $0 } .joined(separator: " ") } } } enum ComplexTypeError: Error { case parsingFailed } } extension ComplexType: Equatable { static func == (lhs: ComplexType, rhs: ComplexType) -> Bool { switch (lhs, rhs) { case (.attributed(let lhsAttributes, let lhsBaseType), .attributed(let rhsAttributes, let rhsBaseType)): lhsAttributes == rhsAttributes && lhsBaseType == rhsBaseType case (.optional(let lhsWrappedType, _), .optional(let rhsWrappedType, _)): lhsWrappedType == rhsWrappedType case (.array(let lhsElementType), .array(let rhsElementType)): lhsElementType == rhsElementType case (.dictionary(let lhsKeyType, let lhsValueType), .dictionary(let rhsKeyType, let rhsValueType)): lhsKeyType == rhsKeyType && lhsValueType == rhsValueType case (.closure(let lhsClosure), .closure(let rhsClosure)): lhsClosure == rhsClosure case (.type(let lhsIdentifier), .type(let rhsIdentifier)): lhsIdentifier.filter { !$0.isWhitespace } == rhsIdentifier.filter { !$0.isWhitespace } default: false } } } extension ComplexType.Closure: Equatable { static func == (lhs: ComplexType.Closure, rhs: ComplexType.Closure) -> Bool { lhs.parameters == rhs.parameters && lhs.effects == rhs.effects && lhs.returnType == rhs.returnType } } extension ComplexType { var isVoid: Bool { if case .type("Void") = self { true } else { false } } var isOptional: Bool { switch self { case .optional: true case .attributed(_, let baseType): baseType.isOptional case .array, .dictionary, .closure, .type: false } } var isClosure: Bool { findClosure() != nil } var unoptionaled: ComplexType { if case .optional(let wrappedType, _) = self { wrappedType } else { self } } func withoutAttributes(except whitelist: [String] = []) -> ComplexType { if case .attributed(let attributes, let baseType) = self { return .attributed(attributes: attributes.filter { whitelist.contains($0) }, baseType: baseType) } else { return self } } func containsAttribute(named name: String) -> Bool { if case .attributed(let attributes, _) = self { attributes.contains(name) } else { false } } func findClosure() -> Closure? { switch self { case .attributed(_, let baseType): baseType.findClosure() case .optional(let wrappedType, _): wrappedType.findClosure() case .closure(let closure): closure case .type, .array, .dictionary: nil } } } extension ComplexType.Closure.Effects { init(effectSpecifiers: TypeEffectSpecifiersSyntax?) { guard let effectSpecifiers else { self = .none return } var effects: Self = [] if effectSpecifiers.asyncSpecifier?.isPresent == true { effects.insert(.async) } if effectSpecifiers.throwsSpecifier?.isPresent == true { effects.insert(.throws) } self = effects } } extension ComplexType: CustomStringConvertible { var description: String { switch self { case .attributed(let attributes, let baseType): return "\(attributes.joined(separator: " ")) \(baseType.description)" case .optional(let wrappedType, let isImplicit): let suffix = isImplicit ? "!" : "?" if wrappedType.isClosure { return "(\(wrappedType.description))\(suffix)" } else { return "\(wrappedType.description)\(suffix)" } case .array(let elementType): return "[\(elementType)]" case .dictionary(let keyType, let valueType): return "[\(keyType): \(valueType)]" case .closure(let closure): return closure.description case .type(let type): return type } } } extension ComplexType: CustomDebugStringConvertible { var debugDescription: String { switch self { case .attributed(let attributes, let baseType): ".attributed(\(attributes.map(\.quoted)) \(baseType.debugDescription)" case .optional(let wrappedType, let isImplicit): ".optional(\(wrappedType.debugDescription), isImplicit: \(isImplicit)" case .array(let elementType): ".array(\(elementType.debugDescription))" case .dictionary(let keyType, let valueType): ".dictionary(\(keyType.debugDescription), \(valueType.debugDescription))" case .closure(let closure): ".closure(\(closure.description.quoted))" case .type(let type): type.quoted } } } ```
The 2017–18 United States national rugby sevens team season included both the 2017–18 World Rugby Sevens Series and the 2018 Rugby World Cup Sevens. The season began badly. In the first tournament of the 2017–18 World Rugby Sevens Series in Dubai, 2017 World Rugby Player of the Year Perry Baker suffered a concussion, and the United States team limped to a last place finish. The U.S. improved from that point on, reaching the semifinals of the Australia Sevens. The team then won the 2018 USA Sevens, the first time the U.S. won their home tournament, boosted in large part by Perry Baker, who led all scorers with 8 tries. Baker, along with forwards Ben Pinkelman and Danny Barrett all made the tournament Dream Team. Overall the team displayed inconsistent performances, reaching the Cup semifinals three times, but also failing to qualify for the cup quarterfinals three times. Despite the inconsistent play, the U.S. finished sixth overall. 2017–18 World Sevens Series Player statistics The following table shows the leading players for the U.S. after the 2017–18 Sevens Series season. Among all World Series, players, Isles ranked first in tries scored with 49, and Baker ranked fifth with 37. Ben Pinkelman ranked fourth in tackles with 124 and fourth in matches played with 58. Source: World Rugby website. 2018 USA Sevens The United States won the tournament by beating Argentina 28–0 in the final. This was the first time that the United States won its home tournament. USA's Perry Baker led the tournament with 8 tries and 11 breaks. Baker, Pinkelman, and Barrett were all named to the seven-man tournament Dream Team. Substitutes: Carlin Isles, Malon Aljiboori Unavailable due to injury: Madison Hughes, Stephen Tomasin, Maka Unufe With the U.S. finishing out the game with a lineup that included Isles (wing), Baker (center), Williams (fly-half) and Aljiboori (forward), it is believed that this is the first time the U.S. has fielded a team where the majority of players are African-American. 2018 Rugby World Cup Sevens Round of 16 Quarterfinals 5th place semi-finals 5th place final Head coach: Mike Friday See also 2018 Rugby World Cup Sevens – Women's tournament References 2017–18 United States 2018 in American rugby union 2017 in American rugby union
```c++ /* * * This program is free software: you can redistribute it and/or modify * the Free Software Foundation, either version 2 or (at your option) * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * along with this program. If not, see <path_to_url */ #include "NewDatabaseWizardPage.h" #include "ui_NewDatabaseWizardPage.h" #include "core/Database.h" #include "gui/dbsettings/DatabaseSettingsWidget.h" NewDatabaseWizardPage::NewDatabaseWizardPage(QWidget* parent) : QWizardPage(parent) , m_ui(new Ui::NewDatabaseWizardPage()) { m_ui->setupUi(this); } NewDatabaseWizardPage::~NewDatabaseWizardPage() = default; /** * Set the database settings page widget for this wizard page. * The wizard page will take ownership of the settings page widget. * * @param page database settings page widget */ void NewDatabaseWizardPage::setPageWidget(DatabaseSettingsWidget* page) { m_pageWidget = page; m_ui->pageContent->setWidget(m_pageWidget); } /** * @return database settings widget of this page widget. */ DatabaseSettingsWidget* NewDatabaseWizardPage::pageWidget() { return m_pageWidget; } /** * Set the database to be configured by the wizard page. * The wizard will NOT take ownership of the database object. * * @param db database object to be configured */ void NewDatabaseWizardPage::setDatabase(QSharedPointer<Database> db) { m_db = std::move(db); } void NewDatabaseWizardPage::initializePage() { Q_ASSERT(m_pageWidget && m_db); if (!m_pageWidget || !m_db) { return; } m_pageWidget->loadSettings(m_db); } bool NewDatabaseWizardPage::validatePage() { Q_ASSERT(m_pageWidget && m_db); if (!m_pageWidget || !m_db) { return false; } bool valid = m_pageWidget->saveSettings(); m_pageWidget->uninitialize(); return valid; } ```
```c /* _Feraise function */ #include <yvals.h> #include <errno.h> #include <ymath.h> _C_STD_BEGIN #ifdef _Feraise #undef _Feraise #endif void __CLRCALL_PURE_OR_CDECL _Feraise(int except) { /* report floating-point exception */ if ((except & (_FE_DIVBYZERO | _FE_INVALID)) != 0) errno = EDOM; else if ((except & (_FE_UNDERFLOW | _FE_OVERFLOW)) != 0) errno = ERANGE; } _C_STD_END /* * Consult your license regarding permissions and restrictions. V6.50:0009 */ ```
```c++ /*============================================================================= file LICENSE_1_0.txt or copy at path_to_url =============================================================================*/ // this file deliberately contains non-ascii characters // boostinspect:noascii #include <boost/detail/lightweight_test.hpp> #include <boost/spirit/home/x3.hpp> #include <boost/fusion/include/vector.hpp> #include <boost/fusion/include/at.hpp> #include <string> #include <cstring> #include <iostream> #include "test.hpp" namespace x3 = boost::spirit::x3; int got_it = 0; struct my_rule_class { template <typename Iterator, typename Exception, typename Context> x3::error_handler_result on_error(Iterator&, Iterator const& last, Exception const& x, Context const& context) { std::cout << "Error! Expecting: " << x.which() << ", got: \"" << std::string(x.where(), last) << "\"" << std::endl ; return x3::error_handler_result::fail; } template <typename Iterator, typename Attribute, typename Context> inline void on_success(Iterator const&, Iterator const&, Attribute&, Context const&) { ++got_it; } }; int main() { using spirit_test::test_attr; using spirit_test::test; using namespace boost::spirit::x3::ascii; using boost::spirit::x3::rule; using boost::spirit::x3::int_; using boost::spirit::x3::lit; { // show that ra = rb and ra %= rb works as expected rule<class a, int> ra; rule<class b, int> rb; int attr; auto ra_def = (ra %= int_); BOOST_TEST(test_attr("123", ra_def, attr)); BOOST_TEST(attr == 123); auto rb_def = (rb %= ra_def); BOOST_TEST(test_attr("123", rb_def, attr)); BOOST_TEST(attr == 123); auto rb_def2 = (rb = ra_def); BOOST_TEST(test_attr("123", rb_def2, attr)); BOOST_TEST(attr == 123); } { // show that ra %= rb works as expected with semantic actions rule<class a, int> ra; rule<class b, int> rb; int attr; auto f = [](auto c){}; auto ra_def = (ra %= int_[f]); BOOST_TEST(test_attr("123", ra_def, attr)); BOOST_TEST(attr == 123); auto ra_def2 = (rb = (ra %= int_[f])); BOOST_TEST(test_attr("123", ra_def2, attr)); BOOST_TEST(attr == 123); } { // std::string as container attribute with auto rules std::string attr; // test deduced auto rule behavior auto text = rule<class text, std::string>() = +(!char_(')') >> !char_('>') >> char_); attr.clear(); BOOST_TEST(test_attr("x", text, attr)); BOOST_TEST(attr == "x"); } { // error handling auto r = rule<my_rule_class, char const*>() = '(' > int_ > ',' > int_ > ')'; BOOST_TEST(test("(123,456)", r)); BOOST_TEST(!test("(abc,def)", r)); BOOST_TEST(!test("(123,456]", r)); BOOST_TEST(!test("(123;456)", r)); BOOST_TEST(!test("[123,456]", r)); BOOST_TEST(got_it == 1); } { typedef boost::variant<double, int> v_type; auto r1 = rule<class r1, v_type>() = int_; v_type v; BOOST_TEST(test_attr("1", r1, v) && v.which() == 1 && boost::get<int>(v) == 1); typedef boost::optional<int> ov_type; auto r2 = rule<class r2, ov_type>() = int_; ov_type ov; BOOST_TEST(test_attr("1", r2, ov) && ov && boost::get<int>(ov) == 1); } // test handling of single element fusion sequences { using boost::fusion::vector; using boost::fusion::at_c; auto r = rule<class r, vector<int>>() = int_; vector<int> v(0); BOOST_TEST(test_attr("1", r, v) && at_c<0>(v) == 1); } { // attribute compatibility test using boost::spirit::x3::rule; using boost::spirit::x3::int_; auto const expr = int_; short i; BOOST_TEST(test_attr("1", expr, i) && i == 1); // ok const rule< class int_rule, int > int_rule( "int_rule" ); auto const int_rule_def = int_; auto const start = int_rule = int_rule_def; short j; BOOST_TEST(test_attr("1", start, j) && j == 1); // error } return boost::report_errors(); } ```
The Gymnasium bei St. Anna is a school in Augsburg, Bavaria, Germany, founded in 1531 and still active. History The school was founded in 1531 by the then predominantly Evangelical Council of the City of Augsburg as a counterweight to the Catholic cathedral and monastery schools. It was to persuade the noble families of the city about an alternative to education of their children. The prevailing practice was to hire private tutors, many of them incompetent and outmoded. St Anne Gymnasium was the school to be trusted to educate new leaders for the Protestant town. The students were from the age 7 to 16 and taught in nine classes: the literature, introduction to the dialectic ( logic), and rhetoric based on the ancient Greek and Latin, a little Hebrew. There were also teachers competent in Mathematics, Music and Calligraphy. On the other hand, the school lacked any instruction in German and the modern foreign languages. The first Rector of the Gymnasium was a German playwright, Sixtus Birck (1536-1554.) Under his leadership the school acquired an excellent reputation as a reformed and progressive institution. This tradition was further developed under Hieronymus Wolf’s leadership who recruited an outstanding faculty to tech at the Gymnasium including Georg Henisch References Köberlin, Karl, Geschichte des Hum. Gymnasiums bei St. Anna in Augsburg von 1531 bis 1931. Zur Vierhundertjahrfeier der Anstalt, Augsburg 1931. Freudenberger, Rudolf, Das Evangelische Gymnasium und das Evangelische Kolleg bei St. Anna in der Zeit des Dreißigjährigen Krieges, in: Das Gymnasium bei St. Anna in Augsburg. 475 Jahre von 1531 bis 2006, hg. v. Karl-August Keil, Augsburg 2006, S. 53–72. External links 1531 establishments in the Holy Roman Empire Buildings and structures in Augsburg Gymnasiums in Germany Schools in Bavaria
```c /* * Program write.c - dump memory structures to file for iso9660 filesystem. Written by Eric Youngdale (1993). This program is free software; you can redistribute it and/or modify the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* APPLE_HYB James Pearson j.pearson@ge.ucl.ac.uk 16/3/1999 */ #include <string.h> #include <stdlib.h> #include <err.h> #include "config.h" #include "mkisofs.h" #include "iso9660.h" #include "volume.h" #include "write.h" #include "apple_proto.h" #include "mac_label_proto.h" #include <time.h> #include <errno.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> #ifdef HAVE_UNISTD_H #include <unistd.h> #endif #ifdef __SVR4 extern char * strdup(const char *); #endif #ifdef VMS extern char * strdup(const char *); #endif /* Max number of sectors we will write at one time */ #define NSECT 16 /* Counters for statistics */ static int table_size = 0; static int total_dir_size = 0; static int rockridge_size = 0; static struct directory ** pathlist; static int next_path_index = 1; static int sort_goof; struct output_fragment * out_tail; struct output_fragment * out_list; struct iso_primary_descriptor vol_desc; #ifdef APPLE_HYB static int hfs_pad; #endif /* APPLE_HYB */ static int root_gen __PR((void)); static int generate_path_tables __PR((void)); static int file_gen __PR((void)); static int dirtree_dump __PR((void)); /* Routines to actually write the disc. We write sequentially so that we could write a tape, or write the disc directly */ #define FILL_SPACE(X) memset(vol_desc.X, ' ', sizeof(vol_desc.X)) void FDECL2(set_721, char *, pnt, unsigned int, i) { pnt[0] = i & 0xff; pnt[1] = (i >> 8) & 0xff; } void FDECL2(set_722, char *, pnt, unsigned int, i) { pnt[0] = (i >> 8) & 0xff; pnt[1] = i & 0xff; } void FDECL2(set_723, char *, pnt, unsigned int, i) { pnt[3] = pnt[0] = i & 0xff; pnt[2] = pnt[1] = (i >> 8) & 0xff; } void FDECL2(set_731, char *, pnt, unsigned int, i) { pnt[0] = i & 0xff; pnt[1] = (i >> 8) & 0xff; pnt[2] = (i >> 16) & 0xff; pnt[3] = (i >> 24) & 0xff; } void FDECL2(set_732, char *, pnt, unsigned int, i) { pnt[3] = i & 0xff; pnt[2] = (i >> 8) & 0xff; pnt[1] = (i >> 16) & 0xff; pnt[0] = (i >> 24) & 0xff; } int FDECL1(get_733, char *, p) { return ((p[0] & 0xff) | ((p[1] & 0xff) << 8) | ((p[2] & 0xff) << 16) | ((p[3] & 0xff) << 24)); } void FDECL2(set_733, char *, pnt, unsigned int, i) { pnt[7] = pnt[0] = i & 0xff; pnt[6] = pnt[1] = (i >> 8) & 0xff; pnt[5] = pnt[2] = (i >> 16) & 0xff; pnt[4] = pnt[3] = (i >> 24) & 0xff; } void FDECL4(xfwrite, void *, buffer, int, count, int, size, FILE *, file) { /* * This is a hack that could be made better. XXXIs this the only place? * It is definitely needed on Operating Systems that do not * allow to write files that are > 2GB. * If the system is fast enough to be able to feed 1400 KB/s * writing speed of a DVD-R drive, use stdout. * If the system cannot do this reliable, you need to use this * hacky option. */ static int idx = 0; if (split_output != 0 && (idx == 0 || ftell(file) >= (1024 * 1024 * 1024) )) { char nbuf[512]; extern char *outfile; if (idx == 0) unlink(outfile); snprintf(nbuf, sizeof nbuf, "%s_%02d", outfile, idx++); file = freopen(nbuf, "wb", file); if (file == NULL) { fprintf(stderr, "Cannot open '%s'.\n", nbuf); exit(1); } } while(count) { int got = fwrite(buffer,size,count,file); if(got<=0) { fprintf(stderr,"cannot fwrite %d*%d\n",size,count); exit(1); } count-=got,*(char**)&buffer+=size*got; } } #ifdef APPLE_HYB /* use the deferred_write struct to store info about the hfs_boot_file */ static struct deferred_write mac_boot; #endif /* APPLE_HYB */ static struct deferred_write * dw_head = NULL, * dw_tail = NULL; unsigned int last_extent_written =0; static int path_table_index; static time_t begun; /* We recursively walk through all of the directories and assign extent numbers to them. We have already assigned extent numbers to everything that goes in front of them */ static int FDECL1(assign_directory_addresses, struct directory *, node) { int dir_size; struct directory * dpnt; dpnt = node; while (dpnt) { /* skip if it's hidden */ if(dpnt->dir_flags & INHIBIT_ISO9660_ENTRY) { dpnt = dpnt->next; continue; } /* * If we already have an extent for this (i.e. it came from * a multisession disc), then don't reassign a new extent. */ dpnt->path_index = next_path_index++; if( dpnt->extent == 0 ) { dpnt->extent = last_extent; dir_size = (dpnt->size + (SECTOR_SIZE - 1)) >> 11; last_extent += dir_size; /* * Leave room for the CE entries for this directory. Keep them * close to the reference directory so that access will be * quick. */ if(dpnt->ce_bytes) { last_extent += ROUND_UP(dpnt->ce_bytes) >> 11; } } if(dpnt->subdir) { assign_directory_addresses(dpnt->subdir); } dpnt = dpnt->next; } return 0; } #ifdef APPLE_HYB static void FDECL4(write_one_file, char *, filename, unsigned int, size, FILE *, outfile, unsigned int, off) #else static void FDECL3(write_one_file, char *, filename, unsigned int, size, FILE *, outfile) #endif /* APPLE_HYB */ { char buffer[SECTOR_SIZE * NSECT]; FILE * infile; int remain; int use; if ((infile = fopen(filename, "rb")) == NULL) { #if defined(sun) || defined(_AUX_SOURCE) fprintf(stderr, "cannot open %s: (%d)\n", filename, errno); #else fprintf(stderr, "cannot open %s: %s\n", filename, strerror(errno)); #endif exit(1); } #ifdef APPLE_HYB fseek(infile, off, SEEK_SET); #endif /* APPLE_HYB */ remain = size; while(remain > 0) { use = (remain > SECTOR_SIZE * NSECT - 1 ? NSECT*SECTOR_SIZE : remain); use = ROUND_UP(use); /* Round up to nearest sector boundary */ memset(buffer, 0, use); if (fread(buffer, 1, use, infile) == 0) { fprintf(stderr,"cannot read from %s\n",filename); exit(1); } xfwrite(buffer, 1, use, outfile); last_extent_written += use/SECTOR_SIZE; #if 0 if((last_extent_written % 1000) < use/SECTOR_SIZE) { fprintf(stderr,"%d..", last_extent_written); } #else if((last_extent_written % 5000) < use/SECTOR_SIZE && verbose > 3) { time_t now; time_t the_end; double frac; time(&now); frac = last_extent_written / (double)last_extent; the_end = begun + (now - begun) / frac; fprintf(stderr, "%6.2f%% done, estimate finish %s", frac * 100., ctime(&the_end)); } #endif remain -= use; } fclose(infile); } /* write_one_file(... */ static void FDECL1(write_files, FILE *, outfile) { struct deferred_write * dwpnt, *dwnext; dwpnt = dw_head; while(dwpnt) { if(dwpnt->table) { xfwrite(dwpnt->table, 1, ROUND_UP(dwpnt->size), outfile); last_extent_written += ROUND_UP(dwpnt->size) / SECTOR_SIZE; table_size += dwpnt->size; /* fprintf(stderr,"Size %d ", dwpnt->size); */ free(dwpnt->table); } else { #ifdef VMS vms_write_one_file(dwpnt->name, dwpnt->size, outfile); #else #ifdef APPLE_HYB write_one_file(dwpnt->name, dwpnt->size, outfile, dwpnt->off); #else write_one_file(dwpnt->name, dwpnt->size, outfile); #endif /* APPLE_HYB */ #endif free(dwpnt->name); } #ifdef APPLE_HYB if (apple_hyb) { /* we may have to pad out ISO files to work with HFS clump sizes */ char blk[SECTOR_SIZE]; int i; for(i=0;i<dwpnt->pad;i++) xfwrite(blk, 1, SECTOR_SIZE, outfile); last_extent_written += dwpnt->pad; } #endif /* APPLE_HYB */ dwnext = dwpnt; dwpnt = dwpnt->next; free(dwnext); } } /* write_files(... */ #if 0 static void dump_filelist() { struct deferred_write * dwpnt; dwpnt = dw_head; while(dwpnt) { fprintf(stderr, "File %s\n",dwpnt->name); dwpnt = dwpnt->next; } fprintf(stderr,"\n"); } #endif static int FDECL2(compare_dirs, const void *, rr, const void *, ll) { char * rpnt, *lpnt; struct directory_entry ** r, **l; r = (struct directory_entry **) rr; l = (struct directory_entry **) ll; rpnt = (*r)->isorec.name; lpnt = (*l)->isorec.name; #ifdef APPLE_HYB /* resource fork MUST (not sure if this is true for HFS volumes) be before the data fork - so force it here */ if ((*r)->assoc && (*r)->assoc == (*l)) return 1; if ((*l)->assoc && (*l)->assoc == (*r)) return -1; #endif /* APPLE_HYB */ /* * If the entries are the same, this is an error. */ if( strcmp(rpnt, lpnt) == 0 ) { sort_goof++; } /* * Put the '.' and '..' entries on the head of the sorted list. * For normal ASCII, this always happens to be the case, but out of * band characters cause this not to be the case sometimes. * * FIXME(eric) - these tests seem redundant, in taht the name is * never assigned these values. It will instead be \000 or \001, * and thus should always be sorted correctly. I need to figure * out why I thought I needed this in the first place. */ #if 0 if( strcmp(rpnt, ".") == 0 ) return -1; if( strcmp(lpnt, ".") == 0 ) return 1; if( strcmp(rpnt, "..") == 0 ) return -1; if( strcmp(lpnt, "..") == 0 ) return 1; #else /* * The code above is wrong (as explained in Eric's comment), leading to incorrect * sort order iff the -L option ("allow leading dots") is in effect and a directory * contains entries that start with a dot. * * (TF, Tue Dec 29 13:49:24 CET 1998) */ if((*r)->isorec.name_len[0] == 1 && *rpnt == 0) return -1; /* '.' */ if((*l)->isorec.name_len[0] == 1 && *lpnt == 0) return 1; if((*r)->isorec.name_len[0] == 1 && *rpnt == 1) return -1; /* '..' */ if((*l)->isorec.name_len[0] == 1 && *lpnt == 1) return 1; #endif while(*rpnt && *lpnt) { if(*rpnt == ';' && *lpnt != ';') return -1; if(*rpnt != ';' && *lpnt == ';') return 1; if(*rpnt == ';' && *lpnt == ';') return 0; if(*rpnt == '.' && *lpnt != '.') return -1; if(*rpnt != '.' && *lpnt == '.') return 1; if((unsigned char)*rpnt < (unsigned char)*lpnt) return -1; if((unsigned char)*rpnt > (unsigned char)*lpnt) return 1; rpnt++; lpnt++; } if(*rpnt) return 1; if(*lpnt) return -1; return 0; } /* * Function: sort_directory * * Purpose: Sort the directory in the appropriate ISO9660 * order. * * Notes: Returns 0 if OK, returns > 0 if an error occurred. */ int FDECL1(sort_directory, struct directory_entry **, sort_dir) { int dcount = 0; int xcount = 0; int j; int i, len; struct directory_entry * s_entry; struct directory_entry ** sortlist; /* need to keep a count of how many entries are hidden */ s_entry = *sort_dir; while(s_entry) { if (s_entry->de_flags & INHIBIT_ISO9660_ENTRY) xcount++; dcount++; s_entry = s_entry->next; } if( dcount == 0 ) { return 0; } /* * OK, now we know how many there are. Build a vector for sorting. */ sortlist = (struct directory_entry **) e_malloc(sizeof(struct directory_entry *) * dcount); j = dcount - 1; dcount = 0; s_entry = *sort_dir; while(s_entry) { if(s_entry->de_flags & INHIBIT_ISO9660_ENTRY) { /* put any hidden entries at the end of the vector */ sortlist[j--] = s_entry; } else { sortlist[dcount] = s_entry; dcount++; } len = s_entry->isorec.name_len[0]; s_entry->isorec.name[len] = 0; s_entry = s_entry->next; } /* * Each directory is required to contain at least . and .. */ if( dcount < 2 ) { sort_goof = 1; } else { /* only sort the non-hidden entries */ sort_goof = 0; #ifdef __STDC__ qsort(sortlist, dcount, sizeof(struct directory_entry *), (int (*)(const void *, const void *))compare_dirs); #else qsort(sortlist, dcount, sizeof(struct directory_entry *), compare_dirs); #endif /* * Now reassemble the linked list in the proper sorted order * We still need the hidden entries, as they may be used in the * Joliet tree. */ for(i=0; i<dcount+xcount-1; i++) { sortlist[i]->next = sortlist[i+1]; } sortlist[dcount+xcount-1]->next = NULL; *sort_dir = sortlist[0]; } free(sortlist); return sort_goof; } static int root_gen() { init_fstatbuf(); root_record.length[0] = 1 + sizeof(struct iso_directory_record) - sizeof(root_record.name); root_record.ext_attr_length[0] = 0; set_733((char *) root_record.extent, root->extent); set_733((char *) root_record.size, ROUND_UP(root->size)); iso9660_date(root_record.date, root_statbuf.st_mtime); root_record.flags[0] = 2; root_record.file_unit_size[0] = 0; root_record.interleave[0] = 0; set_723(root_record.volume_sequence_number, volume_sequence_number); root_record.name_len[0] = 1; return 0; } static void FDECL1(assign_file_addresses, struct directory *, dpnt) { struct directory * finddir; struct directory_entry * s_entry; struct file_hash *s_hash; struct deferred_write * dwpnt; char whole_path[1024]; while (dpnt) { s_entry = dpnt->contents; for(s_entry = dpnt->contents; s_entry; s_entry = s_entry->next) { /* * If we already have an extent for this entry, * then don't assign a new one. It must have come * from a previous session on the disc. Note that * we don't end up scheduling the thing for writing * either. */ if( isonum_733((unsigned char *) s_entry->isorec.extent) != 0 ) { continue; } /* * This saves some space if there are symlinks present */ s_hash = find_hash(s_entry->dev, s_entry->inode); if(s_hash) { if(verbose > 2) { fprintf(stderr, "Cache hit for %s%s%s\n",s_entry->filedir->de_name, SPATH_SEPARATOR, s_entry->name); } set_733((char *) s_entry->isorec.extent, s_hash->starting_block); set_733((char *) s_entry->isorec.size, s_hash->size); continue; } /* * If this is for a directory that is not a . or a .. entry, * then look up the information for the entry. We have already * assigned extents for directories, so we just need to * fill in the blanks here. */ if (strcmp(s_entry->name,".") && strcmp(s_entry->name,"..") && s_entry->isorec.flags[0] == 2) { finddir = dpnt->subdir; while(1==1) { if(finddir->self == s_entry) break; finddir = finddir->next; if(!finddir) { fprintf(stderr,"Fatal goof\n"); exit(1); } } set_733((char *) s_entry->isorec.extent, finddir->extent); s_entry->starting_block = finddir->extent; s_entry->size = ROUND_UP(finddir->size); total_dir_size += s_entry->size; add_hash(s_entry); set_733((char *) s_entry->isorec.size, ROUND_UP(finddir->size)); continue; } /* * If this is . or .., then look up the relevant info from the * tables. */ if(strcmp(s_entry->name,".") == 0) { set_733((char *) s_entry->isorec.extent, dpnt->extent); /* * Set these so that the hash table has the * correct information */ s_entry->starting_block = dpnt->extent; s_entry->size = ROUND_UP(dpnt->size); add_hash(s_entry); s_entry->starting_block = dpnt->extent; set_733((char *) s_entry->isorec.size, ROUND_UP(dpnt->size)); continue; } if(strcmp(s_entry->name,"..") == 0) { if(dpnt == root) { total_dir_size += root->size; } set_733((char *) s_entry->isorec.extent, dpnt->parent->extent); /* * Set these so that the hash table has the * correct information */ s_entry->starting_block = dpnt->parent->extent; s_entry->size = ROUND_UP(dpnt->parent->size); add_hash(s_entry); s_entry->starting_block = dpnt->parent->extent; set_733((char *) s_entry->isorec.size, ROUND_UP(dpnt->parent->size)); continue; } /* * Some ordinary non-directory file. Just schedule the * file to be written. This is all quite * straightforward, just make a list and assign extents * as we go. Once we get through writing all of the * directories, we should be ready write out these * files */ if(s_entry->size) { dwpnt = (struct deferred_write *) e_malloc(sizeof(struct deferred_write)); #ifdef APPLE_HYB /* save this directory entry for later use */ dwpnt->s_entry = s_entry; /* set the initial padding to zero */ dwpnt->pad = 0; /* maybe an offset to start of the real file/fork */ dwpnt->off = s_entry->hfs_off; #endif /* APPLE_HYB */ if(dw_tail) { dw_tail->next = dwpnt; dw_tail = dwpnt; } else { dw_head = dwpnt; dw_tail = dwpnt; } if(s_entry->inode == TABLE_INODE) { dwpnt->table = s_entry->table; dwpnt->name = NULL; #ifdef APPLE_HYB snprintf(whole_path, sizeof whole_path, "%s%s%s", s_entry->filedir->whole_name, SPATH_SEPARATOR, trans_tbl); #else snprintf(whole_path, sizeof whole_path, "%s%sTRANS.TBL", s_entry->filedir->whole_name, SPATH_SEPARATOR); #endif /* APPLE_HYB */ } else { dwpnt->table = NULL; strcpy(whole_path, s_entry->whole_name); dwpnt->name = strdup(whole_path); } dwpnt->next = NULL; dwpnt->size = s_entry->size; dwpnt->extent = last_extent; set_733((char *) s_entry->isorec.extent, last_extent); s_entry->starting_block = last_extent; add_hash(s_entry); last_extent += ROUND_UP(s_entry->size) >> 11; if(verbose > 2) { fprintf(stderr,"%d %d %s\n", s_entry->starting_block, last_extent-1, whole_path); } #ifdef DBG_ISO if((ROUND_UP(s_entry->size) >> 11) > 500) { fprintf(stderr,"Warning: large file %s\n", whole_path); fprintf(stderr,"Starting block is %d\n", s_entry->starting_block); fprintf(stderr,"Reported file size is %d extents\n", s_entry->size); } #endif #ifdef NOT_NEEDED /* Never use this code if you like to create a DVD */ if(last_extent > (800000000 >> 11)) { /* * More than 800Mb? Punt */ fprintf(stderr,"Extent overflow processing file %s\n", whole_path); fprintf(stderr,"Starting block is %d\n", s_entry->starting_block); fprintf(stderr,"Reported file size is %d extents\n", s_entry->size); exit(1); } #endif continue; } /* * This is for zero-length files. If we leave the extent 0, * then we get screwed, because many readers simply drop files * that have an extent of zero. Thus we leave the size 0, * and just assign the extent number. */ set_733((char *) s_entry->isorec.extent, last_extent); } if(dpnt->subdir) { assign_file_addresses(dpnt->subdir); } dpnt = dpnt->next; } } /* assign_file_addresses(... */ static void FDECL1(free_one_directory, struct directory *, dpnt) { struct directory_entry * s_entry; struct directory_entry * s_entry_d; s_entry = dpnt->contents; while(s_entry) { s_entry_d = s_entry; s_entry = s_entry->next; if( s_entry_d->name != NULL ) { free (s_entry_d->name); } if( s_entry_d->whole_name != NULL ) { free (s_entry_d->whole_name); } #ifdef APPLE_HYB if (apple_both && s_entry_d->hfs_ent && !s_entry_d->assoc) free(s_entry_d->hfs_ent); #endif /* APPLE_HYB */ free (s_entry_d); } dpnt->contents = NULL; } /* free_one_directory(... */ static void FDECL1(free_directories, struct directory *, dpnt) { while (dpnt) { free_one_directory(dpnt); if(dpnt->subdir) free_directories(dpnt->subdir); dpnt = dpnt->next; } } void FDECL2(generate_one_directory, struct directory *, dpnt, FILE *, outfile) { unsigned int ce_address = 0; char * ce_buffer; unsigned int ce_index = 0; unsigned int ce_size; unsigned int dir_index; char * directory_buffer; int new_reclen; struct directory_entry * s_entry; struct directory_entry * s_entry_d; unsigned int total_size; total_size = (dpnt->size + (SECTOR_SIZE - 1)) & ~(SECTOR_SIZE - 1); directory_buffer = (char *) e_malloc(total_size); memset(directory_buffer, 0, total_size); dir_index = 0; ce_size = (dpnt->ce_bytes + (SECTOR_SIZE - 1)) & ~(SECTOR_SIZE - 1); ce_buffer = NULL; if(ce_size) { ce_buffer = (char *) e_malloc(ce_size); memset(ce_buffer, 0, ce_size); ce_index = 0; /* * Absolute byte address of CE entries for this directory */ ce_address = last_extent_written + (total_size >> 11); ce_address = ce_address << 11; } s_entry = dpnt->contents; while(s_entry) { /* skip if it's hidden */ if(s_entry->de_flags & INHIBIT_ISO9660_ENTRY) { s_entry = s_entry->next; continue; } /* * We do not allow directory entries to cross sector boundaries. * Simply pad, and then start the next entry at the next sector */ new_reclen = s_entry->isorec.length[0]; if( (dir_index & (SECTOR_SIZE - 1)) + new_reclen >= SECTOR_SIZE ) { dir_index = (dir_index + (SECTOR_SIZE - 1)) & ~(SECTOR_SIZE - 1); } memcpy(directory_buffer + dir_index, &s_entry->isorec, sizeof(struct iso_directory_record) - sizeof(s_entry->isorec.name) + s_entry->isorec.name_len[0]); dir_index += sizeof(struct iso_directory_record) - sizeof (s_entry->isorec.name)+ s_entry->isorec.name_len[0]; /* * Add the Rock Ridge attributes, if present */ if(s_entry->rr_attr_size) { if(dir_index & 1) { directory_buffer[dir_index++] = 0; } /* * If the RR attributes were too long, then write the * CE records, as required. */ if(s_entry->rr_attr_size != s_entry->total_rr_attr_size) { unsigned char * pnt; int len, nbytes; /* * Go through the entire record and fix up the CE entries * so that the extent and offset are correct */ pnt = s_entry->rr_attributes; len = s_entry->total_rr_attr_size; while(len > 3) { #ifdef DEBUG if (!ce_size) { fprintf(stderr,"Warning: ce_index(%d) && ce_address(%d) not initialized\n", ce_index, ce_address); } #endif if(pnt[0] == 'C' && pnt[1] == 'E') { nbytes = get_733( (char *) pnt+20); if((ce_index & (SECTOR_SIZE - 1)) + nbytes >= SECTOR_SIZE) { ce_index = ROUND_UP(ce_index); } set_733( (char *) pnt+4, (ce_address + ce_index) >> 11); set_733( (char *) pnt+12, (ce_address + ce_index) & (SECTOR_SIZE - 1)); /* * Now store the block in the ce buffer */ memcpy(ce_buffer + ce_index, pnt + pnt[2], nbytes); ce_index += nbytes; if(ce_index & 1) { ce_index++; } } len -= pnt[2]; pnt += pnt[2]; } } rockridge_size += s_entry->total_rr_attr_size; memcpy(directory_buffer + dir_index, s_entry->rr_attributes, s_entry->rr_attr_size); dir_index += s_entry->rr_attr_size; } if(dir_index & 1) { directory_buffer[dir_index++] = 0; } s_entry_d = s_entry; s_entry = s_entry->next; /* * Joliet doesn't use the Rock Ridge attributes, so we free it here. */ if (s_entry_d->rr_attributes) { free(s_entry_d->rr_attributes); s_entry_d->rr_attributes = NULL; } } if(dpnt->size != dir_index) { fprintf(stderr,"Unexpected directory length %d %d %s\n",dpnt->size, dir_index, dpnt->de_name); } xfwrite(directory_buffer, 1, total_size, outfile); last_extent_written += total_size >> 11; free(directory_buffer); if(ce_size) { if(ce_index != dpnt->ce_bytes) { fprintf(stderr,"Continuation entry record length mismatch (%d %d).\n", ce_index, dpnt->ce_bytes); } xfwrite(ce_buffer, 1, ce_size, outfile); last_extent_written += ce_size >> 11; free(ce_buffer); } } /* generate_one_directory(... */ static void FDECL1(build_pathlist, struct directory *, node) { struct directory * dpnt; dpnt = node; while (dpnt) { /* skip if it's hidden */ if( (dpnt->dir_flags & INHIBIT_ISO9660_ENTRY) == 0 ) pathlist[dpnt->path_index] = dpnt; if(dpnt->subdir) build_pathlist(dpnt->subdir); dpnt = dpnt->next; } } /* build_pathlist(... */ static int FDECL2(compare_paths, void const *, r, void const *, l) { struct directory const *ll = *(struct directory * const *)l; struct directory const *rr = *(struct directory * const *)r; if (rr->parent->path_index < ll->parent->path_index) { return -1; } if (rr->parent->path_index > ll->parent->path_index) { return 1; } return strcmp(rr->self->isorec.name, ll->self->isorec.name); } /* compare_paths(... */ static int generate_path_tables() { struct directory_entry * de; struct directory * dpnt; int fix; int i; int j; int namelen; char * npnt; char * npnt1; int tablesize; /* * First allocate memory for the tables and initialize the memory */ tablesize = path_blocks << 11; path_table_m = (char *) e_malloc(tablesize); path_table_l = (char *) e_malloc(tablesize); memset(path_table_l, 0, tablesize); memset(path_table_m, 0, tablesize); /* * Now start filling in the path tables. Start with root directory */ if( next_path_index > 0xffff ) { fprintf(stderr, "Unable to generate sane path tables - too many directories (%d)\n", next_path_index); exit(1); } path_table_index = 0; pathlist = (struct directory **) e_malloc(sizeof(struct directory *) * next_path_index); memset(pathlist, 0, sizeof(struct directory *) * next_path_index); build_pathlist(root); do { fix = 0; #ifdef __STDC__ qsort(&pathlist[1], next_path_index-1, sizeof(struct directory *), (int (*)(const void *, const void *))compare_paths); #else qsort(&pathlist[1], next_path_index-1, sizeof(struct directory *), compare_paths); #endif for(j=1; j<next_path_index; j++) { if(pathlist[j]->path_index != j) { pathlist[j]->path_index = j; fix++; } } } while(fix); for(j=1; j<next_path_index; j++) { dpnt = pathlist[j]; if(!dpnt) { fprintf(stderr,"Entry %d not in path tables\n", j); exit(1); } npnt = dpnt->de_name; /* * So the root comes out OK */ if( (*npnt == 0) || (dpnt == root) ) { npnt = "."; } npnt1 = strrchr(npnt, PATH_SEPARATOR); if(npnt1) { npnt = npnt1 + 1; } de = dpnt->self; if(!de) { fprintf(stderr,"Fatal goof\n"); exit(1); } namelen = de->isorec.name_len[0]; path_table_l[path_table_index] = namelen; path_table_m[path_table_index] = namelen; path_table_index += 2; set_731(path_table_l + path_table_index, dpnt->extent); set_732(path_table_m + path_table_index, dpnt->extent); path_table_index += 4; set_721(path_table_l + path_table_index, dpnt->parent->path_index); set_722(path_table_m + path_table_index, dpnt->parent->path_index); path_table_index += 2; for(i =0; i<namelen; i++) { path_table_l[path_table_index] = de->isorec.name[i]; path_table_m[path_table_index] = de->isorec.name[i]; path_table_index++; } if(path_table_index & 1) { path_table_index++; /* For odd lengths we pad */ } } free(pathlist); if(path_table_index != path_table_size) { fprintf(stderr,"Path table lengths do not match %d %d\n", path_table_index, path_table_size); } return 0; } /* generate_path_tables(... */ void FDECL3(memcpy_max, char *, to, char *, from, int, max) { int n = strlen(from); if (n > max) { n = max; } memcpy(to, from, n); } /* memcpy_max(... */ void FDECL1(outputlist_insert, struct output_fragment *, frag) { if( out_tail == NULL ) { out_list = out_tail = frag; } else { out_tail->of_next = frag; out_tail = frag; } } static int FDECL1(file_write, FILE *, outfile) { int should_write; #ifdef APPLE_HYB char buffer[2048]; memset(buffer, 0, sizeof(buffer)); if (apple_hyb) { int i; /* write out padding to round up to HFS allocation block */ for(i=0;i<hfs_pad;i++) xfwrite(buffer, 1, sizeof(buffer), outfile); last_extent_written += hfs_pad; } #endif /* APPLE_HYB */ /* * OK, all done with that crap. Now write out the directories. * This is where the fur starts to fly, because we need to keep track of * each file as we find it and keep track of where we put it. */ should_write = last_extent - session_start; if( print_size > 0 ) { #ifdef APPLE_HYB if (apple_hyb) fprintf(stderr,"Total extents scheduled to be written (inc HFS) = %d\n", last_extent - session_start); else #endif fprintf(stderr,"Total extents scheduled to be written = %d\n", last_extent - session_start); exit(0); } if( verbose > 2 ) { #ifdef DBG_ISO fprintf(stderr,"Total directory extents being written = %d\n", last_extent); #endif #ifdef APPLE_HYB if (apple_hyb) fprintf(stderr,"Total extents scheduled to be written (inc HFS) = %d\n", last_extent - session_start); else #endif fprintf(stderr,"Total extents scheduled to be written = %d\n", last_extent - session_start); } /* * Now write all of the files that we need. */ write_files(outfile); #ifdef APPLE_HYB /* write out extents/catalog/dt file */ if (apple_hyb) { xfwrite(hce->hfs_ce, hce->hfs_tot_size, HFS_BLOCKSZ, outfile); /* round up to a whole CD block */ if (H_ROUND_UP(hce->hfs_tot_size) - hce->hfs_tot_size*HFS_BLOCKSZ) xfwrite(buffer, 1, H_ROUND_UP(hce->hfs_tot_size) - hce->hfs_tot_size*HFS_BLOCKSZ, outfile); last_extent_written += ROUND_UP(hce->hfs_tot_size*HFS_BLOCKSZ)/SECTOR_SIZE; /* write out HFS boot block */ if (mac_boot.name) write_one_file(mac_boot.name, mac_boot.size, outfile, mac_boot.off); } #endif /* APPLE_HYB */ /* * The rest is just fluff. */ if( verbose == 0 ) { return 0; } #ifdef APPLE_HYB if (apple_hyb) { fprintf(stderr, "Total extents actually written (inc HFS) = %d\n", last_extent_written - session_start); fprintf(stderr, "(Size of ISO volume = %d, HFS extra = %d)\n", last_extent_written - session_start - hfs_extra, hfs_extra); } else #else fprintf(stderr,"Total extents actually written = %d\n", last_extent_written - session_start); #endif /* APPLE_HYB */ /* * Hard links throw us off here */ if(should_write != last_extent - session_start) { fprintf(stderr,"Number of extents written not what was predicted. Please fix.\n"); fprintf(stderr,"Predicted = %d, written = %d\n", should_write, last_extent); } fprintf(stderr,"Total translation table size: %d\n", table_size); fprintf(stderr,"Total rockridge attributes bytes: %d\n", rockridge_size); fprintf(stderr,"Total directory bytes: %d\n", total_dir_size); fprintf(stderr,"Path table size(bytes): %d\n", path_table_size); #ifdef DEBUG fprintf(stderr, "next extent, last_extent, last_extent_written %d %d %d\n", next_extent, last_extent, last_extent_written); #endif return 0; } /* iso_write(... */ /* * Function to write the PVD for the disc. */ static int FDECL1(pvd_write, FILE *, outfile) { char iso_time[17]; int should_write; struct tm local; struct tm gmt; time(&begun); local = *localtime(&begun); gmt = *gmtime(&begun); /* * This will break in the year 2000, I supose, but there is no good way * to get the top two digits of the year. */ snprintf(iso_time, sizeof iso_time, "%4.4d%2.2d%2.2d%2.2d%2.2d%2.2d00", 1900 + local.tm_year, local.tm_mon+1, local.tm_mday, local.tm_hour, local.tm_min, local.tm_sec); local.tm_min -= gmt.tm_min; local.tm_hour -= gmt.tm_hour; local.tm_yday -= gmt.tm_yday; iso_time[16] = (local.tm_min + 60*(local.tm_hour + 24*local.tm_yday)) / 15; /* * Next we write out the primary descriptor for the disc */ memset(&vol_desc, 0, sizeof(vol_desc)); vol_desc.type[0] = ISO_VD_PRIMARY; memcpy(vol_desc.id, ISO_STANDARD_ID, sizeof(ISO_STANDARD_ID) - 1); vol_desc.version[0] = 1; memset(vol_desc.system_id, ' ', sizeof(vol_desc.system_id)); memcpy_max(vol_desc.system_id, system_id, strlen(system_id)); memset(vol_desc.volume_id, ' ', sizeof(vol_desc.volume_id)); memcpy_max(vol_desc.volume_id, volume_id, strlen(volume_id)); should_write = last_extent - session_start; set_733((char *) vol_desc.volume_space_size, should_write); set_723(vol_desc.volume_set_size, volume_set_size); set_723(vol_desc.volume_sequence_number, volume_sequence_number); set_723(vol_desc.logical_block_size, 2048); /* * The path tables are used by DOS based machines to cache directory * locations */ set_733((char *) vol_desc.path_table_size, path_table_size); set_731(vol_desc.type_l_path_table, path_table[0]); set_731(vol_desc.opt_type_l_path_table, path_table[1]); set_732(vol_desc.type_m_path_table, path_table[2]); set_732(vol_desc.opt_type_m_path_table, path_table[3]); /* * Now we copy the actual root directory record */ memcpy(vol_desc.root_directory_record, &root_record, sizeof(vol_desc.root_directory_record)); /* * The rest is just fluff. It looks nice to fill in many of these fields, * though. */ FILL_SPACE(volume_set_id); if(volset_id) memcpy_max(vol_desc.volume_set_id, volset_id, strlen(volset_id)); FILL_SPACE(publisher_id); if(publisher) memcpy_max(vol_desc.publisher_id, publisher, strlen(publisher)); FILL_SPACE(preparer_id); if(preparer) memcpy_max(vol_desc.preparer_id, preparer, strlen(preparer)); FILL_SPACE(application_id); if(appid) memcpy_max(vol_desc.application_id, appid, strlen(appid)); FILL_SPACE(copyright_file_id); if(copyright) memcpy_max(vol_desc.copyright_file_id, copyright, strlen(copyright)); FILL_SPACE(abstract_file_id); if(abstract) memcpy_max(vol_desc.abstract_file_id, abstract, strlen(abstract)); FILL_SPACE(bibliographic_file_id); if(biblio) memcpy_max(vol_desc.bibliographic_file_id, biblio, strlen(biblio)); FILL_SPACE(creation_date); FILL_SPACE(modification_date); FILL_SPACE(expiration_date); FILL_SPACE(effective_date); vol_desc.file_structure_version[0] = 1; FILL_SPACE(application_data); memcpy(vol_desc.creation_date, iso_time, 17); memcpy(vol_desc.modification_date, iso_time, 17); memcpy(vol_desc.expiration_date, "0000000000000000", 17); memcpy(vol_desc.effective_date, iso_time, 17); /* * if not a bootable cd do it the old way */ xfwrite(&vol_desc, 1, 2048, outfile); last_extent_written++; return 0; } /* * Function to write the EVD for the disc. */ static int FDECL1(evd_write, FILE *, outfile) { struct iso_primary_descriptor evol_desc; /* * Now write the end volume descriptor. Much simpler than the other one */ memset(&evol_desc, 0, sizeof(evol_desc)); evol_desc.type[0] = ISO_VD_END; memcpy(evol_desc.id, ISO_STANDARD_ID, sizeof(ISO_STANDARD_ID) - 1); evol_desc.version[0] = 1; xfwrite(&evol_desc, 1, 2048, outfile); last_extent_written += 1; return 0; } /* * Function to write the EVD for the disc. */ static int FDECL1(pathtab_write, FILE *, outfile) { /* * Next we write the path tables */ xfwrite(path_table_l, 1, path_blocks << 11, outfile); xfwrite(path_table_m, 1, path_blocks << 11, outfile); last_extent_written += 2*path_blocks; free(path_table_l); free(path_table_m); path_table_l = NULL; path_table_m = NULL; return 0; } static int FDECL1(exten_write, FILE *, outfile) { xfwrite(extension_record, 1, SECTOR_SIZE, outfile); last_extent_written++; return 0; } /* * Functions to describe padding block at the start of the disc. */ int FDECL1(oneblock_size, int, starting_extent) { last_extent++; return 0; } /* * Functions to describe padding block at the start of the disc. */ static int FDECL1(pathtab_size, int, starting_extent) { path_table[0] = starting_extent; path_table[1] = 0; path_table[2] = path_table[0] + path_blocks; path_table[3] = 0; last_extent += 2*path_blocks; return 0; } static int FDECL1(padblock_size, int, starting_extent) { last_extent += 16; return 0; } static int file_gen() { #ifdef APPLE_HYB int start_extent = last_extent; /* orig ISO files start */ #endif /* APPLE_HYB */ assign_file_addresses(root); #ifdef APPLE_HYB /* put this here for the time being - may when I've worked out how to use Eric's new system for creating/writing parts of the image it may move to it's own routine */ if (apple_hyb) { int Csize; /* clump size for HFS vol */ int loop = CTC_LOOP; int last_extent_save = last_extent; /* allocate memory for the libhfs/mkisofs extra info */ hce = (hce_mem *)e_malloc(sizeof(hce_mem)); hce->error = (char *)e_malloc(ERROR_SIZE); /* mark as unallocated for use later */ hce->hfs_ce = hce->hfs_hdr = hce->hfs_map = 0; /* reserve space for the label partition - if it is needed */ if (gen_pt) hce->hfs_map_size = HFS_MAP_SIZE; else hce->hfs_map_size = 0; /* set the intial factor to increase Catalog file size */ hce->ctc_size = CTC; /* "create" the HFS volume (just the header, catalog/extents files) if there's a problem with the Catalog file being too small, we keep on increasing the size (up to CTC_LOOP) times and try again. Unfortunately I don't know enough about the inner workings of HFS, so I can't workout the size of the Catalog file in advance (and I don't want to "grow" as is is normally allowed to), therefore, this approach is a bit over the top as it involves throwing away the "volume" we have created and trying again ... */ do { hce->error[0] = '\0'; /* attempt to create the Mac volume */ Csize = make_mac_volume(root, start_extent); /* if we have a problem ... */ if (Csize < 0) { /* we've made too many attempts, or got some other error */ if (loop == 0 || errno != HCE_ERROR) { /* HCE_ERROR is not a valid errno value */ if (errno == HCE_ERROR) errno = 0; /* exit with the error */ if (*hce->error) fprintf(stderr, "%s\n", hce->error); err(1, "%s", hfs_error); } else { /* increase Catalog file size factor */ hce->ctc_size *= CTC; /* reset the initial "last_extent" and try again */ last_extent = last_extent_save; } } else /* everything OK - just carry on ... */ loop = 0; } while (loop--); hfs_extra = H_ROUND_UP(hce->hfs_tot_size)/SECTOR_SIZE; last_extent += hfs_extra; /* generate the Mac label and HFS partition maps */ mac_boot.name = hfs_boot_file; /* only generate the partition tables etc. if we are making a bootable CD - or if the -part option is given */ if (gen_pt) { if (gen_mac_label(&mac_boot)) { if (*hce->error) fprintf(stderr, "%s\n", hce->error); err(1, "%s", hfs_error); } } /* set Autostart filename if required */ if (autoname) { if(autostart()) errx(1, "Autostart filename must less than 12 characters"); } /* finished with any HFS type errors */ free(hce->error); hce->error = 0; /* the ISO files need to start on a multiple of the HFS allocation blocks, so find out how much padding we need */ /* take in accout alignment of files wrt HFS volume start */ hfs_pad = V_ROUND_UP(start_extent*SECTOR_SIZE + (hce->hfs_hdr_size + hce->hfs_map_size)*HFS_BLOCKSZ, Csize)/SECTOR_SIZE; hfs_pad -= (start_extent + (hce->hfs_hdr_size + hce->hfs_map_size)/BLK_CONV); } #endif /* APPLE_HYB */ return 0; } static int dirtree_dump() { if (verbose > 2) { dump_tree(root); } return 0; } static int FDECL1(dirtree_fixup, int, starting_extent) { if (use_RockRidge && reloc_dir) finish_cl_pl_entries(); if (use_RockRidge ) update_nlink_field(root); return 0; } static int FDECL1(dirtree_size, int, starting_extent) { assign_directory_addresses(root); return 0; } static int FDECL1(ext_size, int, starting_extent) { extern int extension_record_size; struct directory_entry * s_entry; extension_record_extent = starting_extent; s_entry = root->contents; set_733((char *) s_entry->rr_attributes + s_entry->rr_attr_size - 24, extension_record_extent); set_733((char *) s_entry->rr_attributes + s_entry->rr_attr_size - 8, extension_record_size); last_extent++; return 0; } static int FDECL1(dirtree_write, FILE *, outfile) { generate_iso9660_directories(root, outfile); return 0; } static int FDECL1(dirtree_cleanup, FILE *, outfile) { free_directories(root); return 0; } static int FDECL1(padblock_write, FILE *, outfile) { char buffer[2048]; int i; #ifdef APPLE_HYB int n = 0; #endif /* APPLE_HYB */ memset(buffer, 0, sizeof(buffer)); #ifdef APPLE_HYB if (apple_hyb) { int r; /* HFS hdr output */ int tot_size = hce->hfs_map_size + hce->hfs_hdr_size; /* get size in CD blocks == 4xHFS_BLOCKSZ == 2048 */ n = tot_size/BLK_CONV; r = tot_size%BLK_CONV; /* write out HFS volume header info */ xfwrite(hce->hfs_map, tot_size, HFS_BLOCKSZ, outfile); /* write out any partial CD block */ if (r) { xfwrite(buffer, BLK_CONV-r, HFS_BLOCKSZ, outfile); n++; } } /* write out the remainder of the ISO header */ for(i=n; i<16; i++) #else for(i=0; i<16; i++) #endif /* APPLE_HYB */ { xfwrite(buffer, 1, sizeof(buffer), outfile); } last_extent_written += 16; return 0; } #ifdef APPLE_HYB /* ** get_adj_size: get the ajusted size of the volume with the HFS ** allocation block size for each file */ int FDECL1(get_adj_size, int, Csize) { struct deferred_write *dw; int size = 0; int count = 0; /* loop through all the files finding the new total size */ for(dw = dw_head; dw; dw = dw->next) { size += V_ROUND_UP(dw->size, Csize); count++; } /* crude attempt to prevent overflows - HFS can only cope with a maximum of about 65536 forks (actually less) - this will trap cases when we have far too many files */ if (count >= 65536) return (-1); else return(size); } /* ** adj_size: adjust the ISO record entries for all files ** based on the HFS allocation block size */ int FDECL3(adj_size, int, Csize, int, start_extent, int, extra) { struct deferred_write *dw; struct directory_entry *s_entry; int size; /* get the adjusted start_extent (with padding) */ /* take in accout alignment of files wrt HFS volume start */ start_extent = V_ROUND_UP(start_extent*SECTOR_SIZE + extra *HFS_BLOCKSZ, Csize)/SECTOR_SIZE; start_extent -= (extra/BLK_CONV); /* initialise file hash */ flush_hash(); /* loop through all files changing their starting blocks and finding any padding needed to written out latter */ for(dw = dw_head; dw; dw = dw->next) { s_entry = dw->s_entry; s_entry->starting_block = dw->extent = start_extent; set_733((char *) s_entry->isorec.extent, start_extent); size = V_ROUND_UP(dw->size, Csize)/SECTOR_SIZE; dw->pad = size - ROUND_UP(dw->size)/SECTOR_SIZE; /* cache non-HFS files - as there may be multiple links to these files (HFS files can't have multiple links). We will need to change the starting extent of the other links later */ if (!s_entry->hfs_ent) add_hash(s_entry); start_extent += size; } return(start_extent); } /* ** adj_size_other: adjust any non-HFS files that may be linked ** to an existing file (i.e. not have a deferred_write ** entry of it's own */ void FDECL1(adj_size_other, struct directory *, dpnt) { struct directory_entry * s_entry; struct file_hash *s_hash; while (dpnt) { s_entry = dpnt->contents; for(s_entry = dpnt->contents; s_entry; s_entry = s_entry->next) { /* if it's an HFS file or a directory - then ignore (we're after non-HFS files) */ if (s_entry->hfs_ent || (s_entry->isorec.flags[0] & 2)) continue; /* find any cached entry and assign new starting extent */ s_hash = find_hash(s_entry->dev, s_entry->inode); if(s_hash) { set_733((char *) s_entry->isorec.extent, s_hash->starting_block); /* not vital - but tidy */ s_entry->starting_block = s_hash->starting_block; } } if(dpnt->subdir) { adj_size_other(dpnt->subdir); } dpnt = dpnt->next; } /* clear file hash */ flush_hash(); } #endif /* APPLE_HYB */ struct output_fragment padblock_desc = {NULL, padblock_size, NULL, padblock_write}; struct output_fragment voldesc_desc = {NULL, oneblock_size, root_gen, pvd_write}; struct output_fragment end_vol = {NULL, oneblock_size, NULL, evd_write}; struct output_fragment pathtable_desc = {NULL, pathtab_size, generate_path_tables, pathtab_write}; struct output_fragment dirtree_desc = {NULL, dirtree_size, NULL, dirtree_write}; struct output_fragment dirtree_clean = {NULL, dirtree_fixup, dirtree_dump, dirtree_cleanup}; struct output_fragment extension_desc = {NULL, ext_size, NULL, exten_write}; struct output_fragment files_desc = {NULL, NULL, file_gen, file_write}; ```
```javascript /** * @license Apache-2.0 * * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ 'use strict'; // MODULES // var dispatch = require( '@stdlib/strided/base/binary-addon-dispatch' ).ndarray; var addon = require( './../src/addon.node' ); var js = require( './ndarray.js' ); // MAIN // /** * Adds each element in a strided array `x` to a corresponding element in a strided array `y` and assigns the results to elements in a strided array `z`. * * @name add * @type {Function} * @param {integer} N - number of indexed elements * @param {*} dtypeX - `x` data type * @param {Collection} x - input array * @param {integer} strideX - `x` stride length * @param {NonNegativeInteger} offsetX - starting `x` index * @param {*} dtypeY - `y` data type * @param {Collection} y - input array * @param {integer} strideY - `y` stride length * @param {NonNegativeInteger} offsetY - starting `y` index * @param {*} dtypeZ - `z` data type * @param {Collection} z - destination array * @param {integer} strideZ - `z` stride length * @param {NonNegativeInteger} offsetZ - starting `z` index * @throws {TypeError} first argument must be an integer * @throws {TypeError} third argument must be an array-like object * @throws {TypeError} fourth argument must be an integer * @throws {TypeError} fifth argument must be a nonnegative integer * @throws {TypeError} seventh argument must be an array-like object * @throws {TypeError} eighth argument must be an integer * @throws {TypeError} ninth argument must be a nonnegative integer * @throws {TypeError} eleventh argument must be an array-like object * @throws {TypeError} twelfth argument must be an integer * @throws {TypeError} thirteenth argument must be a nonnegative integer * @throws {Error} insufficient arguments * @throws {Error} too many arguments * @throws {RangeError} third argument has insufficient elements based on the associated stride and the number of indexed elements * @throws {RangeError} seventh argument has insufficient elements based on the associated stride and the number of indexed elements * @throws {RangeError} eleventh argument has insufficient elements based on the associated stride and the number of indexed elements * @throws {TypeError} unable to resolve a strided array function supporting the provided array argument data types * @returns {Collection} `z` * * @example * var Float64Array = require( '@stdlib/array/float64' ); * * var x = new Float64Array( [ -2.0, 1.0, 3.0, -5.0, 4.0 ] ); * var y = new Float64Array( [ 1.0, 2.0, 3.0, 4.0, 5.0 ] ); * var z = new Float64Array( [ 0.0, 0.0, 0.0, 0.0, 0.0 ] ); * * add( x.length, 'float64', x, 1, 0, 'float64', y, 1, 0, 'float64', z, 1, 0 ); * // z => <Float64Array>[ -1.0, 3.0, 6.0, -1.0, 9.0 ] */ var add = dispatch( addon, js ); // EXPORTS // module.exports = add; ```
```python from copy import deepcopy import numpy as np import unittest import ray import ray.rllib.algorithms.dqn as dqn from ray.rllib.utils.test_utils import ( check, check_compute_single_action, check_train_results, framework_iterator, ) class TestDQN(unittest.TestCase): @classmethod def setUpClass(cls) -> None: ray.init() @classmethod def tearDownClass(cls) -> None: ray.shutdown() def test_dqn_compilation(self): """Test whether DQN can be built on all frameworks.""" num_iterations = 1 config = ( dqn.dqn.DQNConfig() .environment("CartPole-v1") .env_runners(num_env_runners=2) .training(num_steps_sampled_before_learning_starts=0) ) for _ in framework_iterator(config): # Double-dueling DQN. print("Double-dueling") algo = config.build() for i in range(num_iterations): results = algo.train() check_train_results(results) print(results) check_compute_single_action(algo) algo.stop() # Rainbow. print("Rainbow") rainbow_config = deepcopy(config).training( num_atoms=10, noisy=True, double_q=True, dueling=True, n_step=5 ) algo = rainbow_config.build() for i in range(num_iterations): results = algo.train() check_train_results(results) print(results) check_compute_single_action(algo) algo.stop() def test_dqn_compilation_integer_rewards(self): """Test whether DQN can be built on all frameworks. Unlike the previous test, this uses an environment with integer rewards in order to test that type conversions are working correctly.""" num_iterations = 1 config = ( dqn.dqn.DQNConfig() .environment("Taxi-v3") .env_runners(num_env_runners=2) .training(num_steps_sampled_before_learning_starts=0) ) for _ in framework_iterator(config): # Double-dueling DQN. print("Double-dueling") algo = config.build() for i in range(num_iterations): results = algo.train() check_train_results(results) print(results) check_compute_single_action(algo) algo.stop() # Rainbow. print("Rainbow") rainbow_config = deepcopy(config).training( num_atoms=10, noisy=True, double_q=True, dueling=True, n_step=5 ) algo = rainbow_config.build() for i in range(num_iterations): results = algo.train() check_train_results(results) print(results) check_compute_single_action(algo) algo.stop() def test_dqn_exploration_and_soft_q_config(self): """Tests, whether a DQN Agent outputs exploration/softmaxed actions.""" config = ( dqn.dqn.DQNConfig() .environment("FrozenLake-v1") .env_runners(num_env_runners=0) .environment(env_config={"is_slippery": False, "map_name": "4x4"}) ).training(num_steps_sampled_before_learning_starts=0) obs = np.array(0) # Test against all frameworks. for _ in framework_iterator(config): # Default EpsilonGreedy setup. algo = config.build() # Setting explore=False should always return the same action. a_ = algo.compute_single_action(obs, explore=False) for _ in range(50): a = algo.compute_single_action(obs, explore=False) check(a, a_) # explore=None (default: explore) should return different actions. actions = [] for _ in range(50): actions.append(algo.compute_single_action(obs)) check(np.std(actions), 0.0, false=True) algo.stop() # Low softmax temperature. Behaves like argmax # (but no epsilon exploration). config.env_runners( exploration_config={"type": "SoftQ", "temperature": 0.000001} ) algo = config.build() # Due to the low temp, always expect the same action. actions = [algo.compute_single_action(obs)] for _ in range(50): actions.append(algo.compute_single_action(obs)) check(np.std(actions), 0.0, decimals=3) algo.stop() # Higher softmax temperature. config.exploration_config["temperature"] = 1.0 algo = config.build() # Even with the higher temperature, if we set explore=False, we # should expect the same actions always. a_ = algo.compute_single_action(obs, explore=False) for _ in range(50): a = algo.compute_single_action(obs, explore=False) check(a, a_) # Due to the higher temp, expect different actions avg'ing # around 1.5. actions = [] for _ in range(300): actions.append(algo.compute_single_action(obs)) check(np.std(actions), 0.0, false=True) algo.stop() # With Random exploration. config.env_runners(exploration_config={"type": "Random"}, explore=True) algo = config.build() actions = [] for _ in range(300): actions.append(algo.compute_single_action(obs)) check(np.std(actions), 0.0, false=True) algo.stop() if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", __file__])) ```
Major Robert Alan McFarland (born 9 August 1949 in Plumbridge, County Tyrone) is an Independent Unionist politician in Northern Ireland, who was a Member of the Legislative Assembly (MLA) for North Down from 1998 to 2011. He attended Rockport School near Holywood and Campbell College in east Belfast. After a short career in banking he was admitted to the Royal Military Academy Sandhurst and was commissioned into the Royal Tank Regiment in 1974. He is also a member of Mensa. He retired from the Army in 1992 with the rank of major and became a Parliamentary Assistant to James Molyneaux MP and the Rev. Martin Smyth MP. In 1995, he was selected by the Ulster Unionists to contest the North Down by-election over the favourite for the nomination, Sir Reg Empey, but was beaten in the election by Robert McCartney. He was again beaten by McCartney in the 1997 general election, but by a narrower margin. In 1996, he was elected to the Northern Ireland Forum for Political Dialogue for North Down and was involved in the talks process that resulted in the Belfast Agreement of 1998. He was one of three UUP members returned to the Assembly for North Down in the first elections to the body in 1998 and he retained his seat in the November 2003 election and March 2007 election. He was, until reconstitution in 2006, one of the UUP representatives on the Northern Ireland Policing Board. Following the resignation of David Trimble as UUP leader in 2005 he stood as a candidate in the contest to succeed him and was narrowly beaten by Sir Reg Empey. Sir Reg appointed McFarland as the party's chief negotiator following the election, in which role McFarland served through the period before restoration of devolution in Northern Ireland. In 2007, following the restoration of devolution the details of a row between McFarland and Empey were leaked to the press. It is believed that McFarland turned down the nomination to be Minister of Health when he discovered that Empey planned to take the UUP's other ministerial portfolio himself, insisting that the party leader should concentrate on rebuilding the party from outside the Northern Ireland Executive. Empey did not back down from his stance and appointed Michael McGimpsey to the Department of Health instead. Resignation McFarland announced his resignation from the Ulster Unionist Party on 30 March 2010, five days after the resignation by North Down MP Lady Sylvia Hermon (also formerly UUP), citing his disagreement with the UUP electoral pact with the Conservative Party. He made his intentions clear to continue to sit as an independent in the Assembly. In the 2011 Assembly Election, McFarland lost his seat. References External links NI Assembly biography AlanMcFarland.org 1949 births Living people Graduates of the Royal Military Academy Sandhurst Royal Tank Regiment officers Politicians from County Tyrone Ulster Unionist Party MLAs Independent members of the Northern Ireland Assembly Members of the Northern Ireland Forum Northern Ireland MLAs 1998–2003 Northern Ireland MLAs 2003–2007 Northern Ireland MLAs 2007–2011 People educated at Campbell College People educated at Rockport School Mensans
The Edward T. Gignoux U.S. Courthouse is a historic courthouse building at 156 Federal Street in Portland, Maine. It is the courthouse for the United States District Court for the District of Maine. Building history When it was completed in 1911, the U.S. Courthouse in Portland, now known as the Edward T. Gignoux U.S. Courthouse, was the first federal courthouse in Maine. Its national stature combined with its distinctive Italian Renaissance Revival architecture brought prestige to Portland's civic center. Designed by James Knox Taylor, Supervising Architect of the U.S. Treasury Department, the courthouse's classical details complement its neighbors surrounding Lincoln Park, which include the U.S. Custom House (1872), Cumberland County Courthouse (1910), and Portland City Hall (1912). The U.S. Courthouse was listed in the National Register of Historic Places in 1974. Construction of the U.S. Post Office Building near Lincoln Park in 1868 helped establish the area as a location for public buildings at the turn of the twentieth century. By 1908, the federal government had acquired a prominent site for a new courthouse adjacent to the park, and construction began that year. Knox designed a trapezoidal building with an interior courtyard to be constructed in two phases. The U-shaped first phase of construction was completed in 1911. In 1931–32, Knox's original design was completed, closing the U, under the direction of the Office of the Supervising Architect under James A. Wetmore. The new construction provided space for a post office and additional offices on the upper floors. In 1988, the U.S. Courthouse was renamed in honor of Judge Edward T. Gignoux, a veteran of 26 years on the bench, who had gained notoriety when he presided over the contempt trial of activists who attempted to disrupt the Democratic National Convention in Chicago in 1968. In 1996, the courthouse underwent extensive modernization, which added two new courtrooms in the 1931-32 addition. The principal features and details of the first and second floors of the 1911 construction were rehabilitated and restored. As a result of the project, the Edward T. Gignoux U.S. Courthouse was awarded an Institutional Preservation Award from Greater Portland Landmarks in 1999. Architecture The Edward T. Gignoux U.S. Courthouse's Italian Renaissance Revival style reflects its architect's belief that classicism was well suited for federal buildings. Entirely faced with New England granite, the building is composed of two stories above a raised stone base. Each level is articulated on the exterior through the use of subtle variations in ornamentation and textures. The first story is characterized by channeled stone. It is distinguished from the second story by a stringcourse and by the second story's smooth masonry. Differentiation also occurs in the fenestration. While the first floor has recessed, rectangular windows with simple moldings, the second-story windows are larger and elaborately detailed with classical moldings, balcony balustrades, and crowning triangular and segmented pediments, some of which are set within large arched niches with keystones. A continuous frieze, dentil molding, and cornice finish the top of the wall, where a parapet caps the composition. A succession of circular dormer windows punctuates the attic story of the slate-shingled mansard roof. The exterior articulation and ornamentation of the 1931-32 addition faithfully replicates the architectural detail of the original 1911 construction. The building's trapezoidal plan occupies an entire city block bounded by Federal, Newbury, Pearl, and Market Streets. The building's original U-shaped plan comprises the southwest portion of the block, with the 1931-32 addition completing the northeast side and enclosing the central courtyard. The courtyard is accessed through a porte-cochere on Federal Street, and features buff-colored brick walls with granite stringcourses and keystones for the walls. The building's formal entrance, located at the angled corner at Federal and Market Streets, is marked by a large, triangular pediment that surmounts a Doric frieze and engaged columns decorated with banded rustication. The entrance leads into the elliptical Rotunda, an elegant and open two-story foyer with refined classical detailing. The Rotunda features a curving marble staircase with a balustrade of thin cast-iron balusters, rising to the second floor along the perimeter of the room. The elegant public spaces are symmetrically composed using classical proportions and details for the bases, wainscoting, and crown molding. Stained wood finishes, such as oak and pine, are used in the courtrooms, corridors, and judicial chambers. Marble finishes and terrazzo floors are reserved for the courtrooms and the corridors in the first floor. Interior finishes in the 1931-32 addition reveal the reduced or — stripped — classical style of the era, as seen in the abstracted designs in the terrazzo flooring and the flattened moldings used for the door framing. In 1992, a major renovation project began to modernize and renovate the historic fabric of the building. Space in the original 1911 floor plan was converted into the Court Clerk offices and a new Magistrate Hearing room. District Courtroom No. 1 was carefully restored to its original design — including arched casement windows, period light fixtures, original color palette, and replicated plaster moldings for the ceiling. The renovations to the 1932 east wing allowed for significant alterations, most notably for the new District Courtroom No. 2, which features an open, two-story space illuminated by skylights and contemporary materials and finishes. The Edward T. Gignoux U.S. Courthouse remains a fine example of early twentieth century Federal architecture and an important historic landmark in Portland. Significant events 1903-08: The federal government purchases the site for a new courthouse. 1908-11: The first building phase of the U.S. Courthouse is completed. 1931-32: The U-shaped courthouse is enclosed with a connecting wing, following the building's original plan. 1974: The building is listed in the National Register of Historic Places. 1988: The U.S. Courthouse is named in honor of Judge Edward T. Gignoux. 1992-96: A modernization project restores the original District Courtroom, and adds courtrooms and other facilities. 1999: The building receives the Institutional Preservation Award from Greater Portland Landmarks. Building facts Architects: James Knox Taylor; James A. Wetmore Construction Dates: 1908-11; 1931–32 Landmark Status: Listed in the National Register of Historic Places Location: 156 Federal Street Architectural Style: Italian Renaissance Revival Primary Materials: Granite Prominent Features: Rotunda and spiral staircase; District Courtroom See also National Register of Historic Places listings in Portland, Maine Pullen Fountain References Attribution Government buildings completed in 1908 Courthouses in Maine Federal courthouses in the United States Government buildings in Portland, Maine Renaissance Revival architecture in Maine Italian Renaissance Revival architecture in the United States Courthouses on the National Register of Historic Places in Maine National Register of Historic Places in Portland, Maine Historic district contributing properties in Maine 1908 establishments in Maine
The Global Interdependence Center (GIC) is a Philadelphia-based non-profit organization that holds conferences and programming to increase global dialogue and promote free trade, in order to improve cooperation and understanding among nation states, with the goal of reducing international conflicts and improving worldwide living standards. History The GIC was founded in 1976 during Philadelphia's Bicentennial Celebration at convocation of leading United Nations and U.S. officials. Its vision was affirmed in a Declaration of Interdependence, which was crafted by historian Henry Steele Commager and signed by international dignitaries. Early leaders in the organization included Nobel Laureate Dr. Lawrence Klein, Benjamin Franklin Professor of Economics and Finance at the University of Pennsylvania. Notable Programming GIC hosts and sponsors many programs throughout the year, both in Philadelphia, where it is based, and abroad. Programming comprises international trips, domestic conferences and roundtable discussions. Included among these programs are: Annual Monetary and Trade Conference: This gathering together of global leaders has been held since 1979. Past speakers include Paul Volcker, chair of the International Accounting Standards Board and former chairman of the Board of Governors of the Federal Reserve System, Lawrence Kudlow and Robert Hormats, noted free-market economists, as well as chief economists for the International Monetary Fund and representatives from global finance and trade organizations. The Central Banking Series: This series assembles well-known bankers to speak on their countries' monetary policies, their views on U.S. economics and the business ramifications of monetary systems. Recent speakers have included Christian Noyer, governor of the Banque de France and Richard Fisher, president of the Federal Reserve Bank of Dallas. GIC Abroad: Delegations have visited France, Ireland, Estonia, Chile, South Africa and Israel. Delegations generally include business leaders from the United States as well as representatives from central banks of various nations. Celebration of Interdependence: Formerly the Annual Black Tie Gala, the annual Celebration in Philadelphia sees the presentation of the Global Citizen Award to individuals who demonstrate exceptional service to the increasingly global community. Recent honorees include Anthony Santomero, former president of the Philadelphia Federal Reserve Bank, in 2005, Michael Heavener, executive vice president and head of Wachovia Bank's Global Financial Institution and Trade Division, in 2006, Dr. Constantine Papadakis, president of Drexel University, in 2007, Edward G. Boehne, former president of the Philadelphia Federal Reserve Bank, in 2008, and long-time Board Members Sharon Javie and Bill Dunkelberg in 2012. In addition, Dr. Roger W. Ferguson Jr., president and CEO of TIAA-CREF, was awarded the Fred Heldring Global Leadership Award in 2006. Recent Programming and References External links Company Website Global Interdepence Center at Condé Nast Portfolio Charities based in Pennsylvania Conferences in the United States
```makefile ################################################################################ # # kodi-vfs-libarchive # ################################################################################ KODI_VFS_LIBARCHIVE_VERSION = 19.2.0-Matrix KODI_VFS_LIBARCHIVE_SITE = $(call github,xbmc,vfs.libarchive,$(KODI_VFS_LIBARCHIVE_VERSION)) KODI_VFS_LIBARCHIVE_LICENSE = GPL-2.0+ KODI_VFS_LIBARCHIVE_LICENSE_FILES = LICENSE.md KODI_VFS_LIBARCHIVE_DEPENDENCIES = \ bzip2 \ kodi \ libarchive \ lz4 \ lzo \ openssl \ xz \ zlib $(eval $(cmake-package)) ```
```smalltalk using System; using System.Collections.Generic; using UnityEngine; namespace Microsoft.MixedReality.Toolkit.UI { /// <summary> /// Profile container for theme and for loading theme settings /// </summary> [Serializable] public class VisualProfile { /// <summary> /// GameObject to Target /// </summary> public GameObject Target; /// <summary> /// Theme definition to build /// </summary> public Theme Theme; /// <summary> /// Create and initialize Theme Engines with the associated Target and Theme property /// </summary> /// <returns>List of Theme Engine instances</returns> public List<InteractableThemeBase> CreateThemeEngines() { List<InteractableThemeBase> results = new List<InteractableThemeBase>(); if (Theme != null) { foreach (var definition in Theme.Definitions) { results.Add(InteractableThemeBase.CreateAndInitTheme(definition, Target)); } } return results; } } } ```
```objective-c /* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef DILL_CR_INCLUDED #define DILL_CR_INCLUDED #include <stdint.h> #include "list.h" #include "qlist.h" #include "rbtree.h" #include "slist.h" #define DILL_DISABLE_RAW_NAMES #include "libdillimpl.h" /* The coroutine. The memory layout looks like this: +-------------------------------------------------------------+---------+ | stack | dill_cr | +-------------------------------------------------------------+---------+ - dill_cr contains generic book-keeping info about the coroutine - the stack is a standard C stack; it grows downwards (at the moment, libdill doesn't support microarchitectures where stacks grow upwards) */ struct dill_cr { /* When the coroutine is ready for execution but not running yet, it lives on this list (ctx->ready). 'id' is the result value to return from dill_wait() when the coroutine is resumed. Additionally, errno will be set to 'err'. */ struct dill_slist ready; /* Virtual function table. */ struct dill_hvfs vfs; int id; int err; /* When the coroutine is suspended 'ctx' holds the context (registers and such).*/ sigjmp_buf ctx; /* If the coroutine is blocked, here's the list of the clauses it's waiting for. */ struct dill_slist clauses; /* A list of coroutines belonging to a particular bundle. */ struct dill_list bundle; /* There are two possible reasons to disable blocking calls. 1. The coroutine is being closed by its owner. 2. The execution is happening within the context of an hclose() call. */ unsigned int no_blocking1 : 1; unsigned int no_blocking2 : 1; /* Set when the coroutine has finished its execution. */ unsigned int done : 1; /* If true, the coroutine was launched with go_mem. */ unsigned int mem : 1; /* When the coroutine handle is being closed, this points to the coroutine that is doing the hclose() call. */ struct dill_cr *closer; #if defined DILL_VALGRIND /* Valgrind stack identifier. This way, valgrind knows which areas of memory are used as stacks, and so it doesn't produce spurious warnings. Well, sort of. The mechanism is not perfect, but it's still better than nothing. */ int sid; #endif #if defined DILL_CENSUS /* Census record corresponding to this coroutine. */ struct dill_census_item *census; size_t stacksz; #endif /* Clang assumes that the client stack is aligned to 16-bytes on x86-64 architectures. To achieve this, we align this structure (with the added benefit of a minor optimization). */ } __attribute__((aligned(16))); struct dill_ctx_cr { /* Currently running coroutine. */ struct dill_cr *r; /* List of coroutines ready for execution. */ struct dill_qlist ready; /* All active timers. */ struct dill_rbtree timers; /* Last time poll was performed. */ int64_t last_poll; /* The main coroutine. We don't control the creation of the main coroutine's stack, so we have to store this info here instead of the top of the stack. */ struct dill_cr main; #if defined DILL_CENSUS struct dill_slist census; #endif }; struct dill_clause { /* The coroutine that owns this clause. */ struct dill_cr *cr; /* List of the clauses the coroutine is waiting on. See dill_cr::clauses. */ struct dill_slist item; /* Number to return from dill_wait() if this clause triggers. */ int id; /* Function to call when this clause is canceled. */ void (*cancel)(struct dill_clause *cl); }; /* Timer clause. */ struct dill_tmclause { struct dill_clause cl; /* An item in dill_ctx_cr::timers. */ struct dill_rbtree_item item; }; /* File descriptor clause. */ struct dill_fdclause; int dill_ctx_cr_init(struct dill_ctx_cr *ctx); void dill_ctx_cr_term(struct dill_ctx_cr *ctx); /* When dill_wait() is called next time, the coroutine will wait (among other clauses) on this clause. 'id' must not be negative. 'cancel' is a function to be called when the clause is canceled without being triggered. */ void dill_waitfor(struct dill_clause *cl, int id, void (*cancel)(struct dill_clause *cl)); /* Suspend running coroutine. Move to executing different coroutines. The coroutine will be resumed once one of the clauses previously added by dill_waitfor() is triggered. When that happens, all the clauses, whether triggered or not, will be canceled. The function returns the ID of the triggered clause or -1 on error. In either case, it sets errno to 0 indicate success or non-zero value to indicate error. */ int dill_wait(void); /* Schedule a previously suspended coroutine for execution. Keep in mind that this doesn't immediately run it, it just puts it into the coroutine ready queue. It will cause dill_wait() to return the id supplied in dill_waitfor(). */ void dill_trigger(struct dill_clause *cl, int err); /* Add a timer to the list of active clauses. */ void dill_timer(struct dill_tmclause *tmcl, int id, int64_t deadline); /* Returns 0 if blocking functions are allowed. Returns -1 and sets errno to ECANCELED otherwise. */ int dill_canblock(void); /* When set to 1, blocking calls return ECANCELED. Returns the old value of the flag */ int dill_no_blocking(int val); /* Cleans cached info about the fd. */ int dill_clean(int fd); #endif ```
Shtul () is a rural locality (a selo) and the administrative centre of Shtulsky Selsoviet, Kurakhsky District, Republic of Dagestan, Russia. The population was 258 as of 2010. There are 3 streets. Geography Shtul is located southeast of Kurakh (the district's administrative centre) by road. Kutul and Kurakh are the nearest rural localities. Nationalities Lezgins live there. References Rural localities in Kurakhsky District
```php <?php /** */ namespace OCA\User_LDAP\Tests\Integration\Lib\User; use OCA\User_LDAP\FilesystemHelper; use OCA\User_LDAP\Mapping\UserMapping; use OCA\User_LDAP\Tests\Integration\AbstractIntegrationTest; use OCA\User_LDAP\User\DeletedUsersIndex; use OCA\User_LDAP\User\Manager; use OCA\User_LDAP\User\User; use OCA\User_LDAP\User_LDAP; use OCA\User_LDAP\UserPluginManager; use OCP\IAvatarManager; use OCP\Image; use Psr\Log\LoggerInterface; require_once __DIR__ . '/../../Bootstrap.php'; class IntegrationTestUserAvatar extends AbstractIntegrationTest { /** @var UserMapping */ protected $mapping; /** * prepares the LDAP environment and sets up a test configuration for * the LDAP backend. */ public function init() { require(__DIR__ . '/../../setup-scripts/createExplicitUsers.php'); parent::init(); $this->mapping = new UserMapping(\OC::$server->getDatabaseConnection()); $this->mapping->clear(); $this->access->setUserMapper($this->mapping); $userBackend = new User_LDAP($this->access, \OC::$server->getNotificationManager(), \OC::$server->get(UserPluginManager::class), \OC::$server->get(LoggerInterface::class), \OC::$server->get(DeletedUsersIndex::class)); \OC_User::useBackend($userBackend); } /** * A method that does the common steps of test cases 1 and 2. The evaluation * is not happening here. * * @param string $dn * @param string $username * @param string $image */ private function execFetchTest($dn, $username, $image) { $this->setJpegPhotoAttribute($dn, $image); // assigns our self-picked oc username to the dn $this->mapping->map($dn, $username, 'fakeUUID-' . $username); // initialize home folder and make sure that the user will update // also remove an possibly existing avatar \OC_Util::tearDownFS(); \OC_Util::setupFS($username); \OC::$server->getUserFolder($username); \OC::$server->getConfig()->deleteUserValue($username, 'user_ldap', User::USER_PREFKEY_LASTREFRESH); if (\OC::$server->get(IAvatarManager::class)->getAvatar($username)->exists()) { \OC::$server->get(IAvatarManager::class)->getAvatar($username)->remove(); } // finally attempt to get the avatar set $user = $this->userManager->get($dn); $user->updateAvatar(); } /** * tests whether an avatar can be retrieved from LDAP and stored correctly * * @return bool */ protected function case1() { $image = file_get_contents(__DIR__ . '/../../data/avatar-valid.jpg'); $dn = 'uid=alice,ou=Users,' . $this->base; $username = 'alice1337'; $this->execFetchTest($dn, $username, $image); return \OC::$server->get(IAvatarManager::class)->getAvatar($username)->exists(); } /** * tests whether an image received from LDAP which is of an invalid file * type is dealt with properly (i.e. not set and not dying). * * @return bool */ protected function case2() { // gif by Pmspinner from path_to_url $image = file_get_contents(__DIR__ . '/../../data/avatar-invalid.gif'); $dn = 'uid=boris,ou=Users,' . $this->base; $username = 'boris7844'; $this->execFetchTest($dn, $username, $image); return !\OC::$server->get(IAvatarManager::class)->getAvatar($username)->exists(); } /** * This writes an image to the 'jpegPhoto' attribute on LDAP. * * @param string $dn * @param string $image An image read via file_get_contents * @throws \OC\ServerNotAvailableException */ private function setJpegPhotoAttribute($dn, $image) { $changeSet = ['jpegphoto' => $image]; ldap_mod_add($this->connection->getConnectionResource(), $dn, $changeSet); } protected function initUserManager() { $this->userManager = new Manager( \OC::$server->getConfig(), new FilesystemHelper(), \OC::$server->get(LoggerInterface::class), \OC::$server->get(IAvatarManager::class), new Image(), \OC::$server->getDatabaseConnection(), \OC::$server->getUserManager(), \OC::$server->getNotificationManager() ); } /** * sets up the LDAP configuration to be used for the test */ protected function initConnection() { parent::initConnection(); $this->connection->setConfiguration([ 'ldapUserFilter' => 'objectclass=inetOrgPerson', 'ldapUserDisplayName' => 'displayName', 'ldapGroupDisplayName' => 'cn', 'ldapLoginFilter' => 'uid=%uid', ]); } } /** @var string $host */ /** @var int $port */ /** @var string $adn */ /** @var string $apwd */ /** @var string $bdn */ $test = new IntegrationTestUserAvatar($host, $port, $adn, $apwd, $bdn); $test->init(); $test->run(); ```
John Peter Kohn Jr. (December 27, 1902 – November 27, 1993) was a justice of the Supreme Court of Alabama from May to November 1968. Born in Montgomery, Alabama, Kohn received his law degree from the University of Alabama in 1925. He served in the United States Army during World War II, and thereafter served as county attorney for Montgomery County until 1964. A 1963 letter from Kohn to gubernatorial advisor J. Kirkman Jackson cautioned the governor not to "go over the tight line" set by United States District Judge Seybourn Harris Lynne in enjoining governor George Wallace from preventing African-American students from enrolling at the University of Alabama. In May 1968 Governor Lurleen Wallace appointed Kohn to a seat on the Alabama Supreme Court vacated by the death of Justice John L. Goodwyn, until the election to fill the seat later that year. References 1902 births 1993 deaths People from Montgomery, Alabama University of Alabama alumni United States Army personnel of World War II Justices of the Supreme Court of Alabama
Museum-reserve of Alexander Sergeevich Pushkin "Boldino" is a large Russian literary Museum, located in the complex of buildings of the family estate of Pushkin in the village of Bolshoye Boldino of the Nizhny Novgorod region, which is associated with one of the most fruitful periods of the poet's work, the "Boldino autumn" (September–November 1830). History From 1619 "patrimony in Arzamas district in Zalesnoye going for Sadkovskii gate, the village of Boldino, which was a village Zavorotnyi under a large Mordovian black forest..."was listed as F. F. Pushkin-a participant in the protection of Moscow in 1612. Since 1840 the Boldino owned by the grandfather of Alexander Pushkin, Lev Aleksandrovich Pushkin. At the beginning of the 19th century, the estate was divided between Sergei Lvovich and Vasily Lvovich Pushkin and his father and uncle. Museum Boldino reserve and Pushkin Museum were created in stages. It took a lot of time to restore the original landlord's house, interior interiors, documents and things. The Park around the mansion is also centuries old. It was under these trees that the poet walked, rhyming lines for his poems. The fully renovated complex of the manor house, which includes a house, a kitchen, household services and a human, was ready in 1990. Work was done on the improvement of Boldino Church "assumption" and a house in the village of l'vivka, which also have been a poet. The Museum, dedicated to the Boldin period of the poet's life and work, is located in the former bar house of Pushkin. After the death of Sergei Lvovich (in 1849), the estate passed to Lev Sergeevich, the poet's brother. In 1911 the estate was purchased by the Treasury. Since 1949, this land is a Museum-reserve of Pushkin. Now it includes a memorial estate in the village of Big Boldino, the grove and the estate of the Archer of the poet's son AA Pushkin in the village of Lviv. Boldino reserve and Pushkin Museum were created in stages. It took a lot of time to restore the original landlord's house, interior interiors, documents and things. The Park around the mansion is also centuries old. It was under these trees that the poet walked, rhyming lines for his poems. The fully renovated complex of the manor house, which includes a house, a kitchen, household services and a human, was ready in 1990. Work was done on the improvement of Boldino Church "assumption" and a house in the village of l'vivka, which also have been a poet.The Museum, dedicated to the Boldin period of the poet's life and work, is located in the former bar house of Pushkin. After the death of Sergei Lvovich (in 1849), the estate passed to Lev Sergeevich, the poet's brother. In 1911 the estate was purchased by the Treasury. Since 1949, this land is a Museum-reserve of Pushkin. Now it includes a memorial estate in the village of Bolshoye Boldino, the grove and the estate of the Archer of the poet's son AA Pushkin in the village of Lviv. Museum now In 1973, Boldino was included in the route of the annual all-Union Pushkin poetry festival. The Museum-reserve annually takes part in the preparation and holding of the all-Russian festival of Opera and ballet art "boldinskaya autumn". For visitors to the Museum-reserve in addition to the review and thematic excursions, programs of folk-ethnographic character have been developed:" Wide Maslenitsa"," Village gatherings","Yuletide divination". With the involvement of innovative methods of work in the Museum developed and involved additional programs such as" ball of the Pushkin era"," in the mirror of two centuries"," Lesson in the parish school", theatrical tour " History of the village of Goryukhina...", the game program for children "on green, on a meadow", the program for newlyweds "a Wedding in Boldin". Visitors to the Museum have the opportunity to take pictures in costumes of the Pushkin era, a ride on the Phaeton. On the territory of the Museum-reserve opened "Fairy living room", where children's exhibition of creative works of participants of Museum circles and works sent by children to the Museum from other cities of Russia. It also shows the puppet Pushkin ball, where the dolls are made of salted dough pupils of one of the Nizhny Novgorod boarding schools; children show tales of As Pushkin, in addition, visitors to the living room can leave a memory made with their own hands from the proposed material crafts, take a picture in the costume of Pushkin's fairy-tale hero and just relax in a cozy wooden house after a tour of the estate. References Links Boldino Museum on the website about Culture Boldino Museum on the website of tourism Museums in Nizhny Novgorod Oblast Alexander Pushkin Cultural heritage monuments of federal significance in Nizhny Novgorod Oblast
```xml <vector xmlns:android="path_to_url" android:height="34.0dp" android:tint="?attr/colorControlNormal" android:viewportHeight="15" android:viewportWidth="15" android:width="34.0dp"> <path android:fillColor="@android:color/white" android:pathData="M7 12L7 5.37C6.7 5.19 6.5 4.87 6.5 4.5L6.5 2.38L1.56 3C1.29 3.03 1.04 2.84 1 2.56C0.97 2.29 1.16 2.04 1.44 2L6.51 1.37C6.57 0.88 6.99 0.5 7.5 0.5C7.92 0.5 8.28 0.76 8.43 1.13L13.44 0.5C13.71 0.47 13.96 0.66 14 0.94C14.03 1.21 13.84 1.46 13.56 1.5L8.5 2.13L8.5 4.5C8.5 4.87 8.3 5.19 8 5.37L8 12L12.53 12C12.75 12 12.95 12.15 13.01 12.36C13.09 12.62 12.93 12.9 12.67 12.98L7.5 14.5L2.33 12.98C2.12 12.92 1.97 12.72 1.97 12.5C1.97 12.22 2.2 12 2.47 12L7 12Z"/> </vector> ```
```xml <?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="15.0" xmlns="path_to_url"> <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" /> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform> <ProjectGuid>{312965E5-C4F6-4D95-BA64-79906B8BC7AC}</ProjectGuid> <OutputType>Exe</OutputType> <RootNamespace>DeepSpeechConsole</RootNamespace> <AssemblyName>DeepSpeechConsole</AssemblyName> <TargetFrameworkVersion>v4.6.2</TargetFrameworkVersion> <FileAlignment>512</FileAlignment> <AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects> <Deterministic>true</Deterministic> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|x64'"> <DebugSymbols>true</DebugSymbols> <OutputPath>bin\x64\Debug\</OutputPath> <DefineConstants>DEBUG;TRACE</DefineConstants> <DebugType>full</DebugType> <PlatformTarget>x64</PlatformTarget> <ErrorReport>prompt</ErrorReport> <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet> <Prefer32Bit>true</Prefer32Bit> </PropertyGroup> <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'"> <OutputPath>bin\x64\Release\</OutputPath> <DefineConstants>TRACE</DefineConstants> <Optimize>true</Optimize> <DebugType>pdbonly</DebugType> <PlatformTarget>x64</PlatformTarget> <ErrorReport>prompt</ErrorReport> <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet> <Prefer32Bit>true</Prefer32Bit> <AllowUnsafeBlocks>true</AllowUnsafeBlocks> </PropertyGroup> <ItemGroup> <Reference Include="NAudio, Version=1.8.5.0, Culture=neutral, processorArchitecture=MSIL"> <HintPath>..\packages\NAudio.1.8.5\lib\net35\NAudio.dll</HintPath> </Reference> <Reference Include="System" /> <Reference Include="System.Core" /> <Reference Include="System.Xml.Linq" /> <Reference Include="System.Data.DataSetExtensions" /> <Reference Include="Microsoft.CSharp" /> <Reference Include="System.Data" /> <Reference Include="System.Net.Http" /> <Reference Include="System.Xml" /> </ItemGroup> <ItemGroup> <Compile Include="Program.cs" /> <Compile Include="Properties\AssemblyInfo.cs" /> </ItemGroup> <ItemGroup> <None Include="App.config" /> <None Include="packages.config" /> </ItemGroup> <ItemGroup> <ProjectReference Include="..\DeepSpeechClient\DeepSpeechClient.csproj"> <Project>{56DE4091-BBBE-47E4-852D-7268B33B971F}</Project> <Name>DeepSpeechClient</Name> </ProjectReference> </ItemGroup> <ItemGroup> <Content Include="arctic_a0024.wav"> <CopyToOutputDirectory>Always</CopyToOutputDirectory> </Content> </ItemGroup> <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> </Project> ```
Amr Saad is an Egyptian actor. He graduated from the Faculty of Applied Arts. He began his artistic career in the late 1990s, when he first stood in front of the camera. Shahin, then the film Al Madina by Yusra Nasrallah, and then appeared in a short film entitled Ten pounds and then a legitimate betrayal. In 2007, he presented the movie Dikan Shehata, which was praised by the critics. He then presented films such as The Big, Iron, Rijata, the Walls of the Moon, Molana and Karma. He presented the first series in the drama in 2010, entitled Kingdom of the Mountain and then presented the series Abdul Aziz Street in 2011 and in 2012 presented the series Khirm needle, and then presented a second part of the series Abdel Aziz Street, and in 2016 presented the series Younis Born Silver, and in 2017 presented the series security situation. He won the Best Actor Award for his role in the film Maulana from the 33rd Alexandria Film Festival and was also awarded to the Luxor Film Festival as well as the 66th Catholic Film Festival. He also won the Best Actor Award from the 23rd Mediterranean Film Festival of Tetouan on the role of «Sheikh Hatem» in the film Mawlana. References External links . Cairo University alumni Egyptian male film actors Egyptian male stage actors Egyptian male television actors Egyptian Muslims Living people Male actors from Cairo Place of birth missing (living people) Year of birth missing (living people)
```c++ /* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* cJSON */ /* JSON parser in C. */ /* disable warnings about old C89 functions in MSVC */ #if !defined(_CRT_SECURE_NO_DEPRECATE) && defined(_MSC_VER) #define _CRT_SECURE_NO_DEPRECATE #endif #ifdef __GNUC__ #pragma GCC visibility push(default) #endif #if defined(_MSC_VER) #pragma warning (push) /* disable warning about single line comments in system headers */ #pragma warning (disable : 4001) #endif #include <string.h> #include <stdio.h> #include <math.h> #include <stdlib.h> #include <float.h> #include <limits.h> #include <ctype.h> #ifdef ENABLE_LOCALES #include <locale.h> #endif #if defined(_MSC_VER) #pragma warning (pop) #endif #ifdef __GNUC__ #pragma GCC visibility pop #endif #include "cJSON.h" /* define our own boolean type */ #define true ((cJSON_bool)1) #define false ((cJSON_bool)0) typedef struct { const unsigned char *json; size_t position; } error; static error global_error = { NULL, 0 }; CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void) { return (const char*) (global_error.json + global_error.position); } CJSON_PUBLIC(char *) cJSON_GetStringValue(cJSON *item) { if (!cJSON_IsString(item)) { return NULL; } return item->valuestring; } /* This is a safeguard to prevent copy-pasters from using incompatible C and header files */ #if (CJSON_VERSION_MAJOR != 1) || (CJSON_VERSION_MINOR != 7) || (CJSON_VERSION_PATCH != 1) #error cJSON.h and cJSON.c have different versions. Make sure that both have the same. #endif CJSON_PUBLIC(const char*) cJSON_Version(void) { static char version[15]; sprintf(version, "%i.%i.%i", CJSON_VERSION_MAJOR, CJSON_VERSION_MINOR, CJSON_VERSION_PATCH); return version; } /* Case insensitive string comparison, doesn't consider two NULL pointers equal though */ static int case_insensitive_strcmp(const unsigned char *string1, const unsigned char *string2) { if ((string1 == NULL) || (string2 == NULL)) { return 1; } if (string1 == string2) { return 0; } for(; tolower(*string1) == tolower(*string2); (void)string1++, string2++) { if (*string1 == '\0') { return 0; } } return tolower(*string1) - tolower(*string2); } typedef struct internal_context { size_t buffer_size; cJSON_bool format; cJSON_bool allow_data_after_json; cJSON_bool case_sensitive; cJSON_Allocators allocators; void *userdata; size_t end_position; } internal_context; #if defined(_MSC_VER) /* work around MSVC error C2322: '...' address of dillimport '...' is not static */ static void *internal_malloc(size_t size) { return malloc(size); } static void internal_free(void *pointer) { free(pointer); } #else #define internal_malloc malloc #define internal_free free #endif /* old style allocators for cJSON_InitHooks */ static cJSON_Hooks global_allocators = { internal_malloc, internal_free }; /* wrappers around global old style allocators */ static void *global_allocate(size_t size, void *userdata) { (void)userdata; return global_allocators.malloc_fn(size); } static void global_deallocate(void *pointer, void *userdata) { (void)userdata; free(pointer); } /* wrappers around standard allocators */ static void *malloc_wrapper(size_t size, void *userdata) { (void)userdata; return malloc(size); } static void *realloc_wrapper(void *pointer, size_t size, void *userdata) { (void)userdata; return realloc(pointer, size); } static void free_wrapper(void *pointer, void *userdata) { (void)userdata; free(pointer); } /* helpers to allocate memory with the allocators in a context */ static void *allocate(const internal_context * const context, size_t size) { return context->allocators.allocate(size, context->userdata); } static void *reallocate(const internal_context * const context, void *pointer, size_t size) { return context->allocators.reallocate(pointer, size, context->userdata); } static void deallocate(const internal_context * const context, void *pointer) { context->allocators.deallocate(pointer, context->userdata); } #define default_context {\ 256, /* default buffer size */\ true, /* enable formatting by default */\ true, /* allow data after the JSON by default */\ true, /* case sensitive by default */\ {\ malloc_wrapper,\ free_wrapper,\ realloc_wrapper\ },\ NULL, /* no userdata */\ 0 /* default end position */\ } /* this is necessary to assign the default context after initialization */ static internal_context global_default_context = default_context; static internal_context global_context = default_context; static unsigned char* custom_strdup(const unsigned char* string, const internal_context * const context) { size_t length = 0; unsigned char *copy = NULL; if (string == NULL) { return NULL; } length = strlen((const char*)string) + sizeof(""); copy = (unsigned char*)allocate(context, length); if (copy == NULL) { return NULL; } memcpy(copy, string, length); return copy; } CJSON_PUBLIC(void) cJSON_InitHooks(cJSON_Hooks* hooks) { if (hooks == NULL) { /* reset global context */ global_context.allocators.allocate = malloc_wrapper; global_context.allocators.deallocate = free_wrapper; global_context.allocators.reallocate = realloc_wrapper; return; } global_allocators.malloc_fn = internal_malloc; if (hooks->malloc_fn != NULL) { global_allocators.malloc_fn = hooks->malloc_fn; } global_allocators.free_fn = internal_free; if (hooks->free_fn != NULL) { global_allocators.free_fn = hooks->free_fn; } /* set the wrappers in the global context */ global_context.allocators.allocate = global_allocate; global_context.allocators.deallocate = global_deallocate; global_context.allocators.reallocate = NULL; } /* Internal constructor. */ static cJSON *create_item(const internal_context * const context) { cJSON* node = (cJSON*)allocate(context, sizeof(cJSON)); if (node) { memset(node, '\0', sizeof(cJSON)); } return node; } /* Delete a cJSON structure. */ static void delete_item(cJSON *item, const internal_context * const context) { cJSON *next = NULL; while (item != NULL) { next = item->next; if (!(item->type & cJSON_IsReference) && (item->child != NULL)) { delete_item(item->child, context); } if (!(item->type & cJSON_IsReference) && (item->valuestring != NULL)) { deallocate(context, item->valuestring); } if (!(item->type & cJSON_StringIsConst) && (item->string != NULL)) { deallocate(context, item->string); } deallocate(context, item); item = next; } } /* Delete a cJSON structure. */ CJSON_PUBLIC(void) cJSON_Delete(cJSON *item) { delete_item(item, &global_context); } static int double_to_saturated_integer(double number) { if (number >= INT_MAX) { return INT_MAX; } else if (number <= INT_MIN) { return INT_MIN; } return (int)number; } /* get the decimal point character of the current locale */ static unsigned char get_decimal_point(void) { #ifdef ENABLE_LOCALES struct lconv *lconv = localeconv(); return (unsigned char) lconv->decimal_point[0]; #else return '.'; #endif } typedef struct { const unsigned char *content; size_t length; size_t offset; size_t depth; /* How deeply nested (in arrays/objects) is the input at the current offset. */ internal_context context; } parse_buffer; /* check if the given size is left to read in a given parse buffer (starting with 1) */ #define can_read(buffer, size) ((buffer != NULL) && (((buffer)->offset + size) <= (buffer)->length)) /* check if the buffer can be accessed at the given index (starting with 0) */ #define can_access_at_index(buffer, index) ((buffer != NULL) && (((buffer)->offset + index) < (buffer)->length)) #define cannot_access_at_index(buffer, index) (!can_access_at_index(buffer, index)) /* get a pointer to the buffer at the position */ #define buffer_at_offset(buffer) ((buffer)->content + (buffer)->offset) /* Parse the input text to generate a number, and populate the result into item. */ static cJSON_bool parse_number(cJSON * const item, parse_buffer * const input_buffer) { double number = 0; unsigned char *after_end = NULL; unsigned char number_c_string[64]; unsigned char decimal_point = get_decimal_point(); size_t i = 0; if ((input_buffer == NULL) || (input_buffer->content == NULL)) { return false; } /* copy the number into a temporary buffer and replace '.' with the decimal point * of the current locale (for strtod) * This also takes care of '\0' not necessarily being available for marking the end of the input */ for (i = 0; (i < (sizeof(number_c_string) - 1)) && can_access_at_index(input_buffer, i); i++) { switch (buffer_at_offset(input_buffer)[i]) { case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '+': case '-': case 'e': case 'E': number_c_string[i] = buffer_at_offset(input_buffer)[i]; break; case '.': number_c_string[i] = decimal_point; break; default: goto loop_end; } } loop_end: number_c_string[i] = '\0'; number = strtod((const char*)number_c_string, (char**)&after_end); if (number_c_string == after_end) { return false; /* parse_error */ } item->valuedouble = number; item->valueint = double_to_saturated_integer(number); item->type = cJSON_Number; input_buffer->offset += (size_t)(after_end - number_c_string); return true; } /* don't ask me, but the original cJSON_SetNumberValue returns an integer or double */ CJSON_PUBLIC(double) cJSON_SetNumberHelper(cJSON *object, double number) { object->valueint = double_to_saturated_integer(number); return object->valuedouble = number; } typedef struct { unsigned char *buffer; size_t length; size_t offset; size_t depth; /* current nesting depth (for formatted printing) */ cJSON_bool noalloc; internal_context context; } printbuffer; /* realloc printbuffer if necessary to have at least "needed" bytes more */ static unsigned char* ensure(printbuffer * const p, size_t needed) { unsigned char *newbuffer = NULL; size_t newsize = 0; if ((p == NULL) || (p->buffer == NULL)) { return NULL; } if ((p->length > 0) && (p->offset >= p->length)) { /* make sure that offset is valid */ return NULL; } if (needed > INT_MAX) { /* sizes bigger than INT_MAX are currently not supported */ return NULL; } needed += p->offset + 1; if (needed <= p->length) { return p->buffer + p->offset; } if (p->noalloc) { return NULL; } /* calculate new buffer size */ if (needed > (INT_MAX / 2)) { /* overflow of int, use INT_MAX if possible */ if (needed <= INT_MAX) { newsize = INT_MAX; } else { return NULL; } } else { newsize = needed * 2; } if (p->context.allocators.reallocate != NULL) { /* reallocate with realloc if available */ newbuffer = (unsigned char*)reallocate(&p->context, p->buffer, newsize); if (newbuffer == NULL) { deallocate(&p->context, p->buffer); p->length = 0; p->buffer = NULL; return NULL; } } else { /* otherwise reallocate manually */ newbuffer = (unsigned char*)allocate(&p->context, newsize); if (!newbuffer) { deallocate(&p->context, p->buffer); p->length = 0; p->buffer = NULL; return NULL; } if (newbuffer) { memcpy(newbuffer, p->buffer, p->offset + 1); } deallocate(&p->context, p->buffer); } p->length = newsize; p->buffer = newbuffer; return newbuffer + p->offset; } /* calculate the new length of the string in a printbuffer and update the offset */ static void update_offset(printbuffer * const buffer) { const unsigned char *buffer_pointer = NULL; if ((buffer == NULL) || (buffer->buffer == NULL)) { return; } buffer_pointer = buffer->buffer + buffer->offset; buffer->offset += strlen((const char*)buffer_pointer); } #define is_nan(number) (number != number) #define is_infinity(number) (!is_nan(number) && (number * 0) != 0) /* Render the number nicely from the given item into a string. */ static cJSON_bool print_number(const cJSON * const item, printbuffer * const output_buffer) { unsigned char *output_pointer = NULL; double number = item->valuedouble; int integer = double_to_saturated_integer(number); int length = 0; size_t i = 0; unsigned char number_buffer[26]; /* temporary buffer to print the number into */ unsigned char decimal_point = get_decimal_point(); double test; if (output_buffer == NULL) { return false; } if (is_nan(number) || is_infinity(number)) { length = sprintf((char*)number_buffer, "null"); } else if (number == integer) /* avoid overhead for integers */ { length = sprintf((char*)number_buffer, "%d", integer); } else { /* Try 15 decimal places of precision to avoid nonsignificant nonzero digits */ length = sprintf((char*)number_buffer, "%1.15g", number); /* Check whether the original double can be recovered */ if ((sscanf((char*)number_buffer, "%lg", &test) != 1) || ((double)test != number)) { /* If not, print with 17 decimal places of precision */ length = sprintf((char*)number_buffer, "%1.17g", number); } } /* sprintf failed or buffer overrun occured */ if ((length < 0) || (length > (int)(sizeof(number_buffer) - 1))) { return false; } /* reserve appropriate space in the output */ output_pointer = ensure(output_buffer, (size_t)length + sizeof("")); if (output_pointer == NULL) { return false; } /* copy the printed number to the output and replace locale * dependent decimal point with '.' */ for (i = 0; i < ((size_t)length); i++) { if (number_buffer[i] == decimal_point) { output_pointer[i] = '.'; continue; } output_pointer[i] = number_buffer[i]; } output_pointer[i] = '\0'; output_buffer->offset += (size_t)length; return true; } /* parse 4 digit hexadecimal number */ static unsigned parse_hex4(const unsigned char * const input) { unsigned int h = 0; size_t i = 0; for (i = 0; i < 4; i++) { /* parse digit */ if ((input[i] >= '0') && (input[i] <= '9')) { h += (unsigned int) input[i] - '0'; } else if ((input[i] >= 'A') && (input[i] <= 'F')) { h += (unsigned int) 10 + input[i] - 'A'; } else if ((input[i] >= 'a') && (input[i] <= 'f')) { h += (unsigned int) 10 + input[i] - 'a'; } else /* invalid */ { return 0; } if (i < 3) { /* shift left to make place for the next nibble */ h = h << 4; } } return h; } /* converts a UTF-16 literal to UTF-8 * A literal can be one or two sequences of the form \uXXXX */ static unsigned char utf16_literal_to_utf8(const unsigned char * const input_pointer, const unsigned char * const input_end, unsigned char **output_pointer) { long unsigned int codepoint = 0; unsigned int first_code = 0; const unsigned char *first_sequence = input_pointer; unsigned char utf8_length = 0; unsigned char utf8_position = 0; unsigned char sequence_length = 0; unsigned char first_byte_mark = 0; if ((input_end - first_sequence) < 6) { /* input ends unexpectedly */ goto fail; } /* get the first utf16 sequence */ first_code = parse_hex4(first_sequence + 2); /* check that the code is valid */ if (((first_code >= 0xDC00) && (first_code <= 0xDFFF))) { goto fail; } /* UTF16 surrogate pair */ if ((first_code >= 0xD800) && (first_code <= 0xDBFF)) { const unsigned char *second_sequence = first_sequence + 6; unsigned int second_code = 0; sequence_length = 12; /* \uXXXX\uXXXX */ if ((input_end - second_sequence) < 6) { /* input ends unexpectedly */ goto fail; } if ((second_sequence[0] != '\\') || (second_sequence[1] != 'u')) { /* missing second half of the surrogate pair */ goto fail; } /* get the second utf16 sequence */ second_code = parse_hex4(second_sequence + 2); /* check that the code is valid */ if ((second_code < 0xDC00) || (second_code > 0xDFFF)) { /* invalid second half of the surrogate pair */ goto fail; } /* calculate the unicode codepoint from the surrogate pair */ codepoint = 0x10000 + (((first_code & 0x3FF) << 10) | (second_code & 0x3FF)); } else { sequence_length = 6; /* \uXXXX */ codepoint = first_code; } /* encode as UTF-8 * takes at maximum 4 bytes to encode: * 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx */ if (codepoint < 0x80) { /* normal ascii, encoding 0xxxxxxx */ utf8_length = 1; } else if (codepoint < 0x800) { /* two bytes, encoding 110xxxxx 10xxxxxx */ utf8_length = 2; first_byte_mark = 0xC0; /* 11000000 */ } else if (codepoint < 0x10000) { /* three bytes, encoding 1110xxxx 10xxxxxx 10xxxxxx */ utf8_length = 3; first_byte_mark = 0xE0; /* 11100000 */ } else if (codepoint <= 0x10FFFF) { /* four bytes, encoding 1110xxxx 10xxxxxx 10xxxxxx 10xxxxxx */ utf8_length = 4; first_byte_mark = 0xF0; /* 11110000 */ } else { /* invalid unicode codepoint */ goto fail; } /* encode as utf8 */ for (utf8_position = (unsigned char)(utf8_length - 1); utf8_position > 0; utf8_position--) { /* 10xxxxxx */ (*output_pointer)[utf8_position] = (unsigned char)((codepoint | 0x80) & 0xBF); codepoint >>= 6; } /* encode first byte */ if (utf8_length > 1) { (*output_pointer)[0] = (unsigned char)((codepoint | first_byte_mark) & 0xFF); } else { (*output_pointer)[0] = (unsigned char)(codepoint & 0x7F); } *output_pointer += utf8_length; return sequence_length; fail: return 0; } /* Parse the input text into an unescaped cinput, and populate item. */ static cJSON_bool parse_string(cJSON * const item, parse_buffer * const input_buffer) { const unsigned char *input_pointer = buffer_at_offset(input_buffer) + 1; const unsigned char *input_end = buffer_at_offset(input_buffer) + 1; unsigned char *output_pointer = NULL; unsigned char *output = NULL; /* not a string */ if (buffer_at_offset(input_buffer)[0] != '\"') { goto fail; } { /* calculate approximate size of the output (overestimate) */ size_t allocation_length = 0; size_t skipped_bytes = 0; while (((size_t)(input_end - input_buffer->content) < input_buffer->length) && (*input_end != '\"')) { /* is escape sequence */ if (input_end[0] == '\\') { if ((size_t)(input_end + 1 - input_buffer->content) >= input_buffer->length) { /* prevent buffer overflow when last input character is a backslash */ goto fail; } skipped_bytes++; input_end++; } input_end++; } if (((size_t)(input_end - input_buffer->content) >= input_buffer->length) || (*input_end != '\"')) { goto fail; /* string ended unexpectedly */ } /* This is at most how much we need for the output */ allocation_length = (size_t) (input_end - buffer_at_offset(input_buffer)) - skipped_bytes; output = (unsigned char*)allocate(&input_buffer->context, allocation_length + sizeof("")); if (output == NULL) { goto fail; /* allocation failure */ } } output_pointer = output; /* loop through the string literal */ while (input_pointer < input_end) { if (*input_pointer != '\\') { *output_pointer++ = *input_pointer++; } /* escape sequence */ else { unsigned char sequence_length = 2; if ((input_end - input_pointer) < 1) { goto fail; } switch (input_pointer[1]) { case 'b': *output_pointer++ = '\b'; break; case 'f': *output_pointer++ = '\f'; break; case 'n': *output_pointer++ = '\n'; break; case 'r': *output_pointer++ = '\r'; break; case 't': *output_pointer++ = '\t'; break; case '\"': case '\\': case '/': *output_pointer++ = input_pointer[1]; break; /* UTF-16 literal */ case 'u': sequence_length = utf16_literal_to_utf8(input_pointer, input_end, &output_pointer); if (sequence_length == 0) { /* failed to convert UTF16-literal to UTF-8 */ goto fail; } break; default: goto fail; } input_pointer += sequence_length; } } /* zero terminate the output */ *output_pointer = '\0'; item->type = cJSON_String; item->valuestring = (char*)output; input_buffer->offset = (size_t) (input_end - input_buffer->content); input_buffer->offset++; return true; fail: if (output != NULL) { deallocate(&input_buffer->context, output); } if (input_pointer != NULL) { input_buffer->offset = (size_t)(input_pointer - input_buffer->content); } return false; } /* Render the cstring provided to an escaped version that can be printed. */ static cJSON_bool print_string_ptr(const unsigned char * const input, printbuffer * const output_buffer) { const unsigned char *input_pointer = NULL; unsigned char *output = NULL; unsigned char *output_pointer = NULL; size_t output_length = 0; /* numbers of additional characters needed for escaping */ size_t escape_characters = 0; if (output_buffer == NULL) { return false; } /* empty string */ if (input == NULL) { output = ensure(output_buffer, sizeof("\"\"")); if (output == NULL) { return false; } strcpy((char*)output, "\"\""); return true; } /* set "flag" to 1 if something needs to be escaped */ for (input_pointer = input; *input_pointer; input_pointer++) { switch (*input_pointer) { case '\"': case '\\': case '\b': case '\f': case '\n': case '\r': case '\t': /* one character escape sequence */ escape_characters++; break; default: if (*input_pointer < 32) { /* UTF-16 escape sequence uXXXX */ escape_characters += 5; } break; } } output_length = (size_t)(input_pointer - input) + escape_characters; output = ensure(output_buffer, output_length + sizeof("\"\"")); if (output == NULL) { return false; } /* no characters have to be escaped */ if (escape_characters == 0) { output[0] = '\"'; memcpy(output + 1, input, output_length); output[output_length + 1] = '\"'; output[output_length + 2] = '\0'; return true; } output[0] = '\"'; output_pointer = output + 1; /* copy the string */ for (input_pointer = input; *input_pointer != '\0'; (void)input_pointer++, output_pointer++) { if ((*input_pointer > 31) && (*input_pointer != '\"') && (*input_pointer != '\\')) { /* normal character, copy */ *output_pointer = *input_pointer; } else { /* character needs to be escaped */ *output_pointer++ = '\\'; switch (*input_pointer) { case '\\': *output_pointer = '\\'; break; case '\"': *output_pointer = '\"'; break; case '\b': *output_pointer = 'b'; break; case '\f': *output_pointer = 'f'; break; case '\n': *output_pointer = 'n'; break; case '\r': *output_pointer = 'r'; break; case '\t': *output_pointer = 't'; break; default: /* escape and print as unicode codepoint */ sprintf((char*)output_pointer, "u%04x", *input_pointer); output_pointer += 4; break; } } } output[output_length + 1] = '\"'; output[output_length + 2] = '\0'; return true; } /* Invoke print_string_ptr (which is useful) on an item. */ static cJSON_bool print_string(const cJSON * const item, printbuffer * const p) { return print_string_ptr((unsigned char*)item->valuestring, p); } /* Predeclare these prototypes. */ static cJSON_bool parse_value(cJSON * const item, parse_buffer * const input_buffer); static cJSON_bool print_value(const cJSON * const item, printbuffer * const output_buffer); static cJSON_bool parse_array(cJSON * const item, parse_buffer * const input_buffer); static cJSON_bool print_array(const cJSON * const item, printbuffer * const output_buffer); static cJSON_bool parse_object(cJSON * const item, parse_buffer * const input_buffer); static cJSON_bool print_object(const cJSON * const item, printbuffer * const output_buffer); /* Utility to jump whitespace and cr/lf */ static parse_buffer *buffer_skip_whitespace(parse_buffer * const buffer) { if ((buffer == NULL) || (buffer->content == NULL)) { return NULL; } while (can_access_at_index(buffer, 0) && (buffer_at_offset(buffer)[0] <= 32)) { buffer->offset++; } if (buffer->offset == buffer->length) { buffer->offset--; } return buffer; } /* skip the UTF-8 BOM (byte order mark) if it is at the beginning of a buffer */ static parse_buffer *skip_utf8_bom(parse_buffer * const buffer) { if ((buffer == NULL) || (buffer->content == NULL) || (buffer->offset != 0)) { return NULL; } if (can_access_at_index(buffer, 4) && (strncmp((const char*)buffer_at_offset(buffer), "\xEF\xBB\xBF", 3) == 0)) { buffer->offset += 3; } return buffer; } /* Parse an object - create a new root, and populate. */ static cJSON *parse(const char * const json, internal_context * const context) { parse_buffer buffer = { 0, 0, 0, 0, default_context }; cJSON *item = NULL; /* reset global error position */ global_error.json = NULL; global_error.position = 0; if (json == NULL) { goto fail; } buffer.content = (const unsigned char*)json; buffer.length = strlen((const char*)json) + sizeof(""); buffer.offset = 0; buffer.context = *context; item = create_item(context); if (item == NULL) { goto fail; } if (!parse_value(item, buffer_skip_whitespace(skip_utf8_bom(&buffer)))) { /* parse failure. error position is set. */ goto fail; } if (!context->allow_data_after_json) { buffer_skip_whitespace(&buffer); if ((buffer.offset >= buffer.length) || buffer_at_offset(&buffer)[0] != '\0') { goto fail; } } context->end_position = buffer.offset; return item; fail: if (item != NULL) { delete_item(item, context); } if (json != NULL) { error local_error; local_error.json = (const unsigned char*)json; local_error.position = 0; if (buffer.offset < buffer.length) { local_error.position = buffer.offset; } else if (buffer.length > 0) { local_error.position = buffer.length - 1; } context->end_position = local_error.position; global_error = local_error; } return NULL; } /* Parse an object - create a new root, and populate. */ CJSON_PUBLIC(cJSON *) cJSON_ParseWithOpts(const char *json, const char **return_parse_end, cJSON_bool require_null_terminated) { internal_context context = global_context; cJSON *item = NULL; context.allow_data_after_json = !require_null_terminated; item = parse(json, &context); if (return_parse_end != NULL) { *return_parse_end = json + context.end_position; } return item; } /* Default options for cJSON_Parse */ CJSON_PUBLIC(cJSON *) cJSON_Parse(const char *json) { return parse(json, &global_context); } #define cjson_min(a, b) ((a < b) ? a : b) static unsigned char *print(const cJSON * const item, const internal_context * const context) { printbuffer buffer[1]; unsigned char *printed = NULL; memset(buffer, 0, sizeof(buffer)); /* create buffer */ buffer->buffer = (unsigned char*)allocate(context, context->buffer_size); buffer->length = context->buffer_size; buffer->context = *context; if (buffer->buffer == NULL) { goto fail; } /* print the value */ if (!print_value(item, buffer)) { goto fail; } update_offset(buffer); /* Reallocate the buffer so that it only uses as much as it needs. This can save up to 50% because ensure increases the buffer size by a factor of 2 */ /* check if reallocate is available */ if (context->allocators.reallocate != NULL) { printed = (unsigned char*)reallocate(context, buffer->buffer, buffer->offset + 1); buffer->buffer = NULL; if (printed == NULL) { goto fail; } } else /* otherwise copy the JSON over to a new buffer */ { printed = (unsigned char*)allocate(context, buffer->offset + 1); if (printed == NULL) { goto fail; } memcpy(printed, buffer->buffer, cjson_min(buffer->length, buffer->offset + 1)); printed[buffer->offset] = '\0'; /* just to be sure */ /* free the buffer */ deallocate(context, buffer->buffer); } return printed; fail: if (buffer->buffer != NULL) { deallocate(context, buffer->buffer); } if (printed != NULL) { deallocate(context, printed); } return NULL; } /* Render a cJSON item/entity/structure to text. */ CJSON_PUBLIC(char *) cJSON_Print(const cJSON *item) { return (char*)print(item, &global_context); } CJSON_PUBLIC(char *) cJSON_PrintUnformatted(const cJSON *item) { internal_context context = global_context; context.format = false; return (char*)print(item, &context); } CJSON_PUBLIC(char *) cJSON_PrintBuffered(const cJSON *item, int prebuffer, cJSON_bool format) { internal_context context = global_context; if (prebuffer < 0) { return NULL; } context.buffer_size = (size_t)prebuffer; context.format = format; return (char*)print(item, &context); } CJSON_PUBLIC(cJSON_bool) cJSON_PrintPreallocated(cJSON *item, char *buffer, const int length, const cJSON_bool format) { printbuffer p = { 0, 0, 0, 0, 0, default_context }; if ((length < 0) || (buffer == NULL)) { return false; } p.buffer = (unsigned char*)buffer; p.length = (size_t)length; p.offset = 0; p.noalloc = true; p.context = global_context; p.context.format = format; return print_value(item, &p); } /* Parser core - when encountering text, process appropriately. */ static cJSON_bool parse_value(cJSON * const item, parse_buffer * const input_buffer) { if ((input_buffer == NULL) || (input_buffer->content == NULL)) { return false; /* no input */ } if (!can_read(input_buffer, 1)) { return false; } /* parse the different types of values */ switch (buffer_at_offset(input_buffer)[0]) { /* number */ case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': return parse_number(item, input_buffer); /* string */ case '\"': return parse_string(item, input_buffer); /* array */ case '[': return parse_array(item, input_buffer); /* object */ case '{': return parse_object(item, input_buffer); /* null */ case 'n': if (can_read(input_buffer, 4) && (strncmp((const char*)buffer_at_offset(input_buffer), "null", 4) == 0)) { item->type = cJSON_NULL; input_buffer->offset += 4; return true; } return false; /* true */ case 't': if (can_read(input_buffer, 4) && (strncmp((const char*)buffer_at_offset(input_buffer), "true", 4) == 0)) { item->type = cJSON_True; item->valueint = 1; input_buffer->offset += 4; return true; } return false; /* false */ case 'f': if (can_read(input_buffer, 5) && (strncmp((const char*)buffer_at_offset(input_buffer), "false", 5) == 0)) { item->type = cJSON_False; input_buffer->offset += 5; return true; } return false; default: return false; } } /* Render a value to text. */ static cJSON_bool print_value(const cJSON * const item, printbuffer * const output_buffer) { unsigned char *output = NULL; if ((item == NULL) || (output_buffer == NULL)) { return false; } switch ((item->type) & 0xFF) { case cJSON_NULL: output = ensure(output_buffer, 5); if (output == NULL) { return false; } strcpy((char*)output, "null"); return true; case cJSON_False: output = ensure(output_buffer, 6); if (output == NULL) { return false; } strcpy((char*)output, "false"); return true; case cJSON_True: output = ensure(output_buffer, 5); if (output == NULL) { return false; } strcpy((char*)output, "true"); return true; case cJSON_Number: return print_number(item, output_buffer); case cJSON_Raw: { size_t raw_length = 0; if (item->valuestring == NULL) { if (!output_buffer->noalloc) { deallocate(&output_buffer->context, output_buffer->buffer); } return false; } raw_length = strlen(item->valuestring) + sizeof(""); output = ensure(output_buffer, raw_length); if (output == NULL) { return false; } memcpy(output, item->valuestring, raw_length); return true; } case cJSON_String: return print_string(item, output_buffer); case cJSON_Array: return print_array(item, output_buffer); case cJSON_Object: return print_object(item, output_buffer); default: return false; } } /* Build an array from input text. */ static cJSON_bool parse_array(cJSON * const item, parse_buffer * const input_buffer) { cJSON *head = NULL; /* head of the linked list */ cJSON *current_item = NULL; if (input_buffer->depth >= CJSON_NESTING_LIMIT) { return false; /* to deeply nested */ } input_buffer->depth++; if (buffer_at_offset(input_buffer)[0] != '[') { /* not an array */ goto fail; } input_buffer->offset++; buffer_skip_whitespace(input_buffer); if (can_access_at_index(input_buffer, 0) && (buffer_at_offset(input_buffer)[0] == ']')) { /* empty array */ goto success; } /* check if we skipped to the end of the buffer */ if (cannot_access_at_index(input_buffer, 0)) { input_buffer->offset--; goto fail; } /* step back to character in front of the first element */ input_buffer->offset--; /* loop through the comma separated array elements */ do { /* allocate next item */ cJSON *new_item = create_item(&(input_buffer->context)); if (new_item == NULL) { goto fail; /* allocation failure */ } /* attach next item to list */ if (head == NULL) { /* start the linked list */ current_item = head = new_item; } else { /* add to the end and advance */ current_item->next = new_item; new_item->prev = current_item; current_item = new_item; } /* parse next value */ input_buffer->offset++; buffer_skip_whitespace(input_buffer); if (!parse_value(current_item, input_buffer)) { goto fail; /* failed to parse value */ } buffer_skip_whitespace(input_buffer); } while (can_access_at_index(input_buffer, 0) && (buffer_at_offset(input_buffer)[0] == ',')); if (cannot_access_at_index(input_buffer, 0) || buffer_at_offset(input_buffer)[0] != ']') { goto fail; /* expected end of array */ } success: input_buffer->depth--; item->type = cJSON_Array; item->child = head; input_buffer->offset++; return true; fail: if (head != NULL) { delete_item(head, &input_buffer->context); } return false; } /* Render an array to text */ static cJSON_bool print_array(const cJSON * const item, printbuffer * const output_buffer) { unsigned char *output_pointer = NULL; size_t length = 0; cJSON *current_element = item->child; if (output_buffer == NULL) { return false; } /* Compose the output array. */ /* opening square bracket */ output_pointer = ensure(output_buffer, 1); if (output_pointer == NULL) { return false; } *output_pointer = '['; output_buffer->offset++; output_buffer->depth++; while (current_element != NULL) { if (!print_value(current_element, output_buffer)) { return false; } update_offset(output_buffer); if (current_element->next) { length = (size_t) (output_buffer->context.format ? 2 : 1); output_pointer = ensure(output_buffer, length + 1); if (output_pointer == NULL) { return false; } *output_pointer++ = ','; if(output_buffer->context.format) { *output_pointer++ = ' '; } *output_pointer = '\0'; output_buffer->offset += length; } current_element = current_element->next; } output_pointer = ensure(output_buffer, 2); if (output_pointer == NULL) { return false; } *output_pointer++ = ']'; *output_pointer = '\0'; output_buffer->depth--; return true; } /* Build an object from the text. */ static cJSON_bool parse_object(cJSON * const item, parse_buffer * const input_buffer) { cJSON *head = NULL; /* linked list head */ cJSON *current_item = NULL; if (input_buffer->depth >= CJSON_NESTING_LIMIT) { return false; /* to deeply nested */ } input_buffer->depth++; if (cannot_access_at_index(input_buffer, 0) || (buffer_at_offset(input_buffer)[0] != '{')) { goto fail; /* not an object */ } input_buffer->offset++; buffer_skip_whitespace(input_buffer); if (can_access_at_index(input_buffer, 0) && (buffer_at_offset(input_buffer)[0] == '}')) { goto success; /* empty object */ } /* check if we skipped to the end of the buffer */ if (cannot_access_at_index(input_buffer, 0)) { input_buffer->offset--; goto fail; } /* step back to character in front of the first element */ input_buffer->offset--; /* loop through the comma separated array elements */ do { /* allocate next item */ cJSON *new_item = create_item(&(input_buffer->context)); if (new_item == NULL) { goto fail; /* allocation failure */ } /* attach next item to list */ if (head == NULL) { /* start the linked list */ current_item = head = new_item; } else { /* add to the end and advance */ current_item->next = new_item; new_item->prev = current_item; current_item = new_item; } /* parse the name of the child */ input_buffer->offset++; buffer_skip_whitespace(input_buffer); if (!parse_string(current_item, input_buffer)) { goto fail; /* faile to parse name */ } buffer_skip_whitespace(input_buffer); /* swap valuestring and string, because we parsed the name */ current_item->string = current_item->valuestring; current_item->valuestring = NULL; if (cannot_access_at_index(input_buffer, 0) || (buffer_at_offset(input_buffer)[0] != ':')) { goto fail; /* invalid object */ } /* parse the value */ input_buffer->offset++; buffer_skip_whitespace(input_buffer); if (!parse_value(current_item, input_buffer)) { goto fail; /* failed to parse value */ } buffer_skip_whitespace(input_buffer); } while (can_access_at_index(input_buffer, 0) && (buffer_at_offset(input_buffer)[0] == ',')); if (cannot_access_at_index(input_buffer, 0) || (buffer_at_offset(input_buffer)[0] != '}')) { goto fail; /* expected end of object */ } success: input_buffer->depth--; item->type = cJSON_Object; item->child = head; input_buffer->offset++; return true; fail: if (head != NULL) { delete_item(head, &input_buffer->context); } return false; } /* Render an object to text. */ static cJSON_bool print_object(const cJSON * const item, printbuffer * const output_buffer) { unsigned char *output_pointer = NULL; size_t length = 0; cJSON *current_item = item->child; if (output_buffer == NULL) { return false; } /* Compose the output: */ length = (size_t) (output_buffer->context.format ? 2 : 1); /* fmt: {\n */ output_pointer = ensure(output_buffer, length + 1); if (output_pointer == NULL) { return false; } *output_pointer++ = '{'; output_buffer->depth++; if (output_buffer->context.format) { *output_pointer++ = '\n'; } output_buffer->offset += length; while (current_item) { if (output_buffer->context.format) { size_t i; output_pointer = ensure(output_buffer, output_buffer->depth); if (output_pointer == NULL) { return false; } for (i = 0; i < output_buffer->depth; i++) { *output_pointer++ = '\t'; } output_buffer->offset += output_buffer->depth; } /* print key */ if (!print_string_ptr((unsigned char*)current_item->string, output_buffer)) { return false; } update_offset(output_buffer); length = (size_t) (output_buffer->context.format ? 2 : 1); output_pointer = ensure(output_buffer, length); if (output_pointer == NULL) { return false; } *output_pointer++ = ':'; if (output_buffer->context.format) { *output_pointer++ = '\t'; } output_buffer->offset += length; /* print value */ if (!print_value(current_item, output_buffer)) { return false; } update_offset(output_buffer); /* print comma if not last */ length = (size_t) ((output_buffer->context.format ? 1 : 0) + (current_item->next ? 1 : 0)); output_pointer = ensure(output_buffer, length + 1); if (output_pointer == NULL) { return false; } if (current_item->next) { *output_pointer++ = ','; } if (output_buffer->context.format) { *output_pointer++ = '\n'; } *output_pointer = '\0'; output_buffer->offset += length; current_item = current_item->next; } output_pointer = ensure(output_buffer, output_buffer->context.format ? (output_buffer->depth + 1) : 2); if (output_pointer == NULL) { return false; } if (output_buffer->context.format) { size_t i; for (i = 0; i < (output_buffer->depth - 1); i++) { *output_pointer++ = '\t'; } } *output_pointer++ = '}'; *output_pointer = '\0'; output_buffer->depth--; return true; } static size_t get_array_size(const cJSON * const array) { cJSON *child = NULL; size_t size = 0; if (array == NULL) { return 0; } child = array->child; while (child != NULL) { size++; child = child->next; } return size; } /* Get Array size/item / object item. */ CJSON_PUBLIC(int) cJSON_GetArraySize(const cJSON *array) { size_t size = get_array_size(array); if (size > INT_MAX) { /* This is incorrect but can't be fixed without breaking the API */ return INT_MAX; } return (int)size; } static cJSON* get_array_item(const cJSON *array, size_t index) { cJSON *current_child = NULL; if (array == NULL) { return NULL; } current_child = array->child; while ((current_child != NULL) && (index > 0)) { index--; current_child = current_child->next; } return current_child; } CJSON_PUBLIC(cJSON *) cJSON_GetArrayItem(const cJSON *array, int index) { if (index < 0) { return NULL; } return get_array_item(array, (size_t)index); } static cJSON *get_object_item(const cJSON * const object, const char * const name, const internal_context * const context) { cJSON *current_element = NULL; if ((object == NULL) || (name == NULL)) { return NULL; } current_element = object->child; if (context->case_sensitive) { while ((current_element != NULL) && (strcmp(name, current_element->string) != 0)) { current_element = current_element->next; } } else { while ((current_element != NULL) && (case_insensitive_strcmp((const unsigned char*)name, (const unsigned char*)(current_element->string)) != 0)) { current_element = current_element->next; } } return current_element; } CJSON_PUBLIC(cJSON *) cJSON_GetObjectItem(const cJSON * const object, const char * const string) { internal_context context = default_context; context.case_sensitive = false; return get_object_item(object, string, &context); } CJSON_PUBLIC(cJSON *) cJSON_GetObjectItemCaseSensitive(const cJSON * const object, const char * const string) { internal_context context = default_context; context.case_sensitive = true; return get_object_item(object, string, &context); } CJSON_PUBLIC(cJSON_bool) cJSON_HasObjectItem(const cJSON *object, const char *string) { return cJSON_GetObjectItem(object, string) ? 1 : 0; } /* Utility for array list handling. */ static void suffix_object(cJSON *prev, cJSON *item) { prev->next = item; item->prev = prev; } /* Utility for handling references. */ static cJSON *create_reference(const cJSON *item, const internal_context * const context) { cJSON *reference = NULL; if (item == NULL) { return NULL; } reference = create_item(context); if (reference == NULL) { return NULL; } memcpy(reference, item, sizeof(cJSON)); reference->string = NULL; reference->type |= cJSON_IsReference; reference->next = reference->prev = NULL; return reference; } static cJSON_bool add_item_to_array(cJSON *array, cJSON *item) { cJSON *child = NULL; if ((item == NULL) || (array == NULL)) { return false; } child = array->child; if (child == NULL) { /* list is empty, start new one */ array->child = item; } else { /* append to the end */ while (child->next) { child = child->next; } suffix_object(child, item); } return true; } /* Add item to array/object. */ CJSON_PUBLIC(void) cJSON_AddItemToArray(cJSON *array, cJSON *item) { add_item_to_array(array, item); } #if defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))) #pragma GCC diagnostic push #endif #ifdef __GNUC__ #pragma GCC diagnostic ignored "-Wcast-qual" #endif /* helper function to cast away const */ static void* cast_away_const(const void* string) { return (void*)string; } #if defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 5)))) #pragma GCC diagnostic pop #endif static cJSON_bool add_item_to_object(cJSON * const object, const char * const string, cJSON * const item, const internal_context * const context, const cJSON_bool constant_key) { if ((object == NULL) || (string == NULL) || (item == NULL)) { return false; } if (!(item->type & cJSON_StringIsConst) && (item->string != NULL)) { deallocate(context, item->string); } if (constant_key) { item->string = (char*)cast_away_const(string); item->type |= cJSON_StringIsConst; } else { char *key = (char*)custom_strdup((const unsigned char*)string, context); if (key == NULL) { return false; } item->string = key; item->type &= ~cJSON_StringIsConst; } return add_item_to_array(object, item); } CJSON_PUBLIC(void) cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item) { add_item_to_object(object, string, item, &global_context, false); } /* Add an item to an object with constant string as key */ CJSON_PUBLIC(void) cJSON_AddItemToObjectCS(cJSON *object, const char *string, cJSON *item) { add_item_to_object(object, string, item, &global_context, true); } CJSON_PUBLIC(void) cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item) { if (array == NULL) { return; } add_item_to_array(array, create_reference(item, &global_context)); } CJSON_PUBLIC(void) cJSON_AddItemReferenceToObject(cJSON *object, const char *string, cJSON *item) { if ((object == NULL) || (string == NULL)) { return; } add_item_to_object(object, string, create_reference(item, &global_context), &global_context, false); } CJSON_PUBLIC(cJSON*) cJSON_AddNullToObject(cJSON * const object, const char * const name) { cJSON *null = cJSON_CreateNull(); if (add_item_to_object(object, name, null, &global_context, false)) { return null; } delete_item(null, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddTrueToObject(cJSON * const object, const char * const name) { cJSON *true_item = cJSON_CreateTrue(); if (add_item_to_object(object, name, true_item, &global_context, false)) { return true_item; } delete_item(true_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddFalseToObject(cJSON * const object, const char * const name) { cJSON *false_item = cJSON_CreateFalse(); if (add_item_to_object(object, name, false_item, &global_context, false)) { return false_item; } delete_item(false_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddBoolToObject(cJSON * const object, const char * const name, const cJSON_bool boolean) { cJSON *bool_item = cJSON_CreateBool(boolean); if (add_item_to_object(object, name, bool_item, &global_context, false)) { return bool_item; } delete_item(bool_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddNumberToObject(cJSON * const object, const char * const name, const double number) { cJSON *number_item = cJSON_CreateNumber(number); if (add_item_to_object(object, name, number_item, &global_context, false)) { return number_item; } delete_item(number_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddStringToObject(cJSON * const object, const char * const name, const char * const string) { cJSON *string_item = cJSON_CreateString(string); if (add_item_to_object(object, name, string_item, &global_context, false)) { return string_item; } delete_item(string_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddRawToObject(cJSON * const object, const char * const name, const char * const raw) { cJSON *raw_item = cJSON_CreateRaw(raw); if (add_item_to_object(object, name, raw_item, &global_context, false)) { return raw_item; } delete_item(raw_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddObjectToObject(cJSON * const object, const char * const name) { cJSON *object_item = cJSON_CreateObject(); if (add_item_to_object(object, name, object_item, &global_context, false)) { return object_item; } delete_item(object_item, &global_context); return NULL; } CJSON_PUBLIC(cJSON*) cJSON_AddArrayToObject(cJSON * const object, const char * const name) { cJSON *array = cJSON_CreateArray(); if (add_item_to_object(object, name, array, &global_context, false)) { return array; } delete_item(array, &global_context); return NULL; } CJSON_PUBLIC(cJSON *) cJSON_DetachItemViaPointer(cJSON *parent, cJSON * const item) { if ((parent == NULL) || (item == NULL)) { return NULL; } if (item->prev != NULL) { /* not the first element */ item->prev->next = item->next; } if (item->next != NULL) { /* not the last element */ item->next->prev = item->prev; } if (item == parent->child) { /* first element */ parent->child = item->next; } /* make sure the detached item doesn't point anywhere anymore */ item->prev = NULL; item->next = NULL; return item; } CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromArray(cJSON *array, int which) { if (which < 0) { return NULL; } return cJSON_DetachItemViaPointer(array, get_array_item(array, (size_t)which)); } CJSON_PUBLIC(void) cJSON_DeleteItemFromArray(cJSON *array, int which) { delete_item(cJSON_DetachItemFromArray(array, which), &global_context); } CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObject(cJSON *object, const char *string) { cJSON *to_detach = cJSON_GetObjectItem(object, string); return cJSON_DetachItemViaPointer(object, to_detach); } CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObjectCaseSensitive(cJSON *object, const char *string) { cJSON *to_detach = cJSON_GetObjectItemCaseSensitive(object, string); return cJSON_DetachItemViaPointer(object, to_detach); } CJSON_PUBLIC(void) cJSON_DeleteItemFromObject(cJSON *object, const char *string) { delete_item(cJSON_DetachItemFromObject(object, string), &global_context); } CJSON_PUBLIC(void) cJSON_DeleteItemFromObjectCaseSensitive(cJSON *object, const char *string) { delete_item(cJSON_DetachItemFromObjectCaseSensitive(object, string), &global_context); } /* Replace array/object items with new ones. */ CJSON_PUBLIC(void) cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem) { cJSON *after_inserted = NULL; if (which < 0) { return; } after_inserted = get_array_item(array, (size_t)which); if (after_inserted == NULL) { add_item_to_array(array, newitem); return; } newitem->next = after_inserted; newitem->prev = after_inserted->prev; after_inserted->prev = newitem; if (after_inserted == array->child) { array->child = newitem; } else { newitem->prev->next = newitem; } } CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemViaPointer(cJSON * const parent, cJSON * const item, cJSON * replacement) { if ((parent == NULL) || (replacement == NULL) || (item == NULL)) { return false; } if (replacement == item) { return true; } replacement->next = item->next; replacement->prev = item->prev; if (replacement->next != NULL) { replacement->next->prev = replacement; } if (replacement->prev != NULL) { replacement->prev->next = replacement; } if (parent->child == item) { parent->child = replacement; } item->next = NULL; item->prev = NULL; delete_item(item, &global_context); return true; } CJSON_PUBLIC(void) cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem) { if (which < 0) { return; } cJSON_ReplaceItemViaPointer(array, get_array_item(array, (size_t)which), newitem); } static cJSON_bool replace_item_in_object(cJSON *object, const char *string, cJSON *replacement, const internal_context * const context) { if ((replacement == NULL) || (string == NULL)) { return false; } /* replace the name in the replacement */ if (!(replacement->type & cJSON_StringIsConst) && (replacement->string != NULL)) { cJSON_free(replacement->string); } replacement->string = (char*)custom_strdup((const unsigned char*)string, &global_context); replacement->type &= ~cJSON_StringIsConst; cJSON_ReplaceItemViaPointer(object, get_object_item(object, string, context), replacement); return true; } CJSON_PUBLIC(void) cJSON_ReplaceItemInObject(cJSON *object, const char *string, cJSON *newitem) { internal_context context = global_context; context.case_sensitive = false; replace_item_in_object(object, string, newitem, &context); } CJSON_PUBLIC(void) cJSON_ReplaceItemInObjectCaseSensitive(cJSON *object, const char *string, cJSON *newitem) { internal_context context = global_context; context.case_sensitive = true; replace_item_in_object(object, string, newitem, &context); } /* Create basic types: */ CJSON_PUBLIC(cJSON *) cJSON_CreateNull(void) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_NULL; } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateTrue(void) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_True; } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateFalse(void) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_False; } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateBool(cJSON_bool boolean) { cJSON *item = create_item(&global_context); if(item) { item->type = boolean ? cJSON_True : cJSON_False; } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateNumber(double num) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_Number; item->valuedouble = num; item->valueint = double_to_saturated_integer(num); } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateString(const char *string) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_String; item->valuestring = (char*)custom_strdup((const unsigned char*)string, &global_context); if(!item->valuestring) { delete_item(item, &global_context); return NULL; } } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateStringReference(const char *string) { cJSON *item = create_item(&global_context); if (item != NULL) { item->type = cJSON_String | cJSON_IsReference; item->valuestring = (char*)cast_away_const(string); } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateObjectReference(const cJSON *child) { cJSON *item = create_item(&global_context); if (item != NULL) { item->type = cJSON_Object | cJSON_IsReference; item->child = (cJSON*)cast_away_const(child); } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateArrayReference(const cJSON *child) { cJSON *item = create_item(&global_context); if (item != NULL) { item->type = cJSON_Array | cJSON_IsReference; item->child = (cJSON*)cast_away_const(child); } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateRaw(const char *raw) { cJSON *item = create_item(&global_context); if(item) { item->type = cJSON_Raw; item->valuestring = (char*)custom_strdup((const unsigned char*)raw, &global_context); if(!item->valuestring) { delete_item(item, &global_context); return NULL; } } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateArray(void) { cJSON *item = create_item(&global_context); if(item) { item->type=cJSON_Array; } return item; } CJSON_PUBLIC(cJSON *) cJSON_CreateObject(void) { cJSON *item = create_item(&global_context); if (item) { item->type = cJSON_Object; } return item; } /* Create Arrays: */ CJSON_PUBLIC(cJSON *) cJSON_CreateIntArray(const int *numbers, int count) { size_t i = 0; cJSON *n = NULL; cJSON *p = NULL; cJSON *a = NULL; if ((count < 0) || (numbers == NULL)) { return NULL; } a = cJSON_CreateArray(); for(i = 0; a && (i < (size_t)count); i++) { n = cJSON_CreateNumber(numbers[i]); if (!n) { delete_item(a, &global_context); return NULL; } if(!i) { a->child = n; } else { suffix_object(p, n); } p = n; } return a; } CJSON_PUBLIC(cJSON *) cJSON_CreateFloatArray(const float *numbers, int count) { size_t i = 0; cJSON *n = NULL; cJSON *p = NULL; cJSON *a = NULL; if ((count < 0) || (numbers == NULL)) { return NULL; } a = cJSON_CreateArray(); for(i = 0; a && (i < (size_t)count); i++) { n = cJSON_CreateNumber((double)numbers[i]); if(!n) { delete_item(a, &global_context); return NULL; } if(!i) { a->child = n; } else { suffix_object(p, n); } p = n; } return a; } CJSON_PUBLIC(cJSON *) cJSON_CreateDoubleArray(const double *numbers, int count) { size_t i = 0; cJSON *n = NULL; cJSON *p = NULL; cJSON *a = NULL; if ((count < 0) || (numbers == NULL)) { return NULL; } a = cJSON_CreateArray(); for(i = 0;a && (i < (size_t)count); i++) { n = cJSON_CreateNumber(numbers[i]); if(!n) { delete_item(a, &global_context); return NULL; } if(!i) { a->child = n; } else { suffix_object(p, n); } p = n; } return a; } CJSON_PUBLIC(cJSON *) cJSON_CreateStringArray(const char **strings, int count) { size_t i = 0; cJSON *n = NULL; cJSON *p = NULL; cJSON *a = NULL; if ((count < 0) || (strings == NULL)) { return NULL; } a = cJSON_CreateArray(); for (i = 0; a && (i < (size_t)count); i++) { n = cJSON_CreateString(strings[i]); if(!n) { delete_item(a, &global_context); return NULL; } if(!i) { a->child = n; } else { suffix_object(p,n); } p = n; } return a; } /* Duplication */ CJSON_PUBLIC(cJSON *) cJSON_Duplicate(const cJSON *item, cJSON_bool recurse) { cJSON *newitem = NULL; cJSON *child = NULL; cJSON *next = NULL; cJSON *newchild = NULL; /* Bail on bad ptr */ if (!item) { goto fail; } /* Create new item */ newitem = create_item(&global_context); if (!newitem) { goto fail; } /* Copy over all vars */ newitem->type = item->type & (~cJSON_IsReference); newitem->valueint = item->valueint; newitem->valuedouble = item->valuedouble; if (item->valuestring) { newitem->valuestring = (char*)custom_strdup((unsigned char*)item->valuestring, &global_context); if (!newitem->valuestring) { goto fail; } } if (item->string) { newitem->string = (item->type&cJSON_StringIsConst) ? item->string : (char*)custom_strdup((unsigned char*)item->string, &global_context); if (!newitem->string) { goto fail; } } /* If non-recursive, then we're done! */ if (!recurse) { return newitem; } /* Walk the ->next chain for the child. */ child = item->child; while (child != NULL) { newchild = cJSON_Duplicate(child, true); /* Duplicate (with recurse) each item in the ->next chain */ if (!newchild) { goto fail; } if (next != NULL) { /* If newitem->child already set, then crosswire ->prev and ->next and move on */ next->next = newchild; newchild->prev = next; next = newchild; } else { /* Set newitem->child and move to it */ newitem->child = newchild; next = newchild; } child = child->next; } return newitem; fail: if (newitem != NULL) { delete_item(newitem, &global_context); } return NULL; } CJSON_PUBLIC(void) cJSON_Minify(char *json) { unsigned char *into = (unsigned char*)json; if (json == NULL) { return; } while (*json) { if (*json == ' ') { json++; } else if (*json == '\t') { /* Whitespace characters. */ json++; } else if (*json == '\r') { json++; } else if (*json=='\n') { json++; } else if ((*json == '/') && (json[1] == '/')) { /* double-slash comments, to end of line. */ while (*json && (*json != '\n')) { json++; } } else if ((*json == '/') && (json[1] == '*')) { /* multiline comments. */ while (*json && !((*json == '*') && (json[1] == '/'))) { json++; } json += 2; } else if (*json == '\"') { /* string literals, which are \" sensitive. */ *into++ = (unsigned char)*json++; while (*json && (*json != '\"')) { if (*json == '\\') { *into++ = (unsigned char)*json++; } *into++ = (unsigned char)*json++; } *into++ = (unsigned char)*json++; } else { /* All other characters. */ *into++ = (unsigned char)*json++; } } /* and null-terminate. */ *into = '\0'; } CJSON_PUBLIC(cJSON_bool) cJSON_IsInvalid(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_Invalid; } CJSON_PUBLIC(cJSON_bool) cJSON_IsFalse(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_False; } CJSON_PUBLIC(cJSON_bool) cJSON_IsTrue(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xff) == cJSON_True; } CJSON_PUBLIC(cJSON_bool) cJSON_IsBool(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & (cJSON_True | cJSON_False)) != 0; } CJSON_PUBLIC(cJSON_bool) cJSON_IsNull(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_NULL; } CJSON_PUBLIC(cJSON_bool) cJSON_IsNumber(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_Number; } CJSON_PUBLIC(cJSON_bool) cJSON_IsString(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_String; } CJSON_PUBLIC(cJSON_bool) cJSON_IsArray(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_Array; } CJSON_PUBLIC(cJSON_bool) cJSON_IsObject(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_Object; } CJSON_PUBLIC(cJSON_bool) cJSON_IsRaw(const cJSON * const item) { if (item == NULL) { return false; } return (item->type & 0xFF) == cJSON_Raw; } CJSON_PUBLIC(cJSON_Context) cJSON_DuplicateContext(const cJSON_Context context, const cJSON_Allocators * const allocators, void *allocator_userdata) { internal_context *duplicate = NULL; const cJSON_Allocators *local_allocators = &global_default_context.allocators; if (allocators != NULL) { if ((allocators->allocate == NULL) || (allocators->deallocate == NULL)) { return NULL; } local_allocators = allocators; } duplicate = (internal_context*)local_allocators->allocate(sizeof(internal_context), allocator_userdata); if (duplicate == NULL) { return NULL; } memcpy(duplicate, context, sizeof(internal_context)); return duplicate; } CJSON_PUBLIC(cJSON_Context) cJSON_CreateContext(const cJSON_Allocators * const allocators, void *allocator_userdata) { return cJSON_DuplicateContext((cJSON_Context)&global_default_context, allocators, allocator_userdata); } CJSON_PUBLIC(cJSON_Context) cJSON_SetAllocators(cJSON_Context context, const cJSON_Allocators allocators) { if ((context == NULL) || (allocators.allocate == NULL) || (allocators.deallocate == NULL)) { return NULL; } ((internal_context*)context)->allocators = allocators; ((internal_context*)context)->userdata = NULL; return context; } /* Change the allocator userdata attached to a cJSON_Context */ CJSON_PUBLIC(cJSON_Context) cJSON_SetUserdata(cJSON_Context context, void *userdata) { if (context == NULL) { return NULL; } ((internal_context*)context)->userdata = userdata; return context; } CJSON_PUBLIC(size_t) cJSON_GetParseEnd(cJSON_Context context) { if (context == NULL) { return 0; } return ((internal_context*)context)->end_position; } CJSON_PUBLIC(cJSON_Context) cJSON_SetPrebufferSize(cJSON_Context context, const size_t buffer_size) { if ((context == NULL) || (buffer_size == 0)) { return NULL; } ((internal_context*)context)->buffer_size = buffer_size; return context; } CJSON_PUBLIC(cJSON_Context) cJSON_SetFormat(cJSON_Context context, cJSON_Format format) { if (context == NULL) { return NULL; } switch (format) { case CJSON_FORMAT_MINIFIED: ((internal_context*)context)->format = false; break; case CJSON_FORMAT_DEFAULT: ((internal_context*)context)->format = true; break; default: return NULL; } return context; } CJSON_PUBLIC(cJSON_Context) cJSON_MakeCaseSensitive(cJSON_Context context, cJSON_bool case_sensitive) { if (context == NULL) { return NULL; } ((internal_context*)context)->case_sensitive = case_sensitive; return context; } CJSON_PUBLIC(cJSON_Context) cJSON_AllowDataAfterJson(cJSON_Context context, cJSON_bool allow_data_after_json) { if (context == NULL) { return NULL; } ((internal_context*)context)->allow_data_after_json = allow_data_after_json; return context; } static cJSON_bool compare(const cJSON * const a, const cJSON * const b, const internal_context * const context) { if ((a == NULL) || (b == NULL) || ((a->type & 0xFF) != (b->type & 0xFF)) || cJSON_IsInvalid(a)) { return false; } /* check if type is valid */ switch (a->type & 0xFF) { case cJSON_False: case cJSON_True: case cJSON_NULL: case cJSON_Number: case cJSON_String: case cJSON_Raw: case cJSON_Array: case cJSON_Object: break; default: return false; } /* identical objects are equal */ if (a == b) { return true; } switch (a->type & 0xFF) { /* in these cases and equal type is enough */ case cJSON_False: case cJSON_True: case cJSON_NULL: return true; case cJSON_Number: if (a->valuedouble == b->valuedouble) { return true; } return false; case cJSON_String: case cJSON_Raw: if ((a->valuestring == NULL) || (b->valuestring == NULL)) { return false; } if (strcmp(a->valuestring, b->valuestring) == 0) { return true; } return false; case cJSON_Array: { cJSON *a_element = a->child; cJSON *b_element = b->child; for (; (a_element != NULL) && (b_element != NULL);) { if (!compare(a_element, b_element, context)) { return false; } a_element = a_element->next; b_element = b_element->next; } /* one of the arrays is longer than the other */ if (a_element != b_element) { return false; } return true; } case cJSON_Object: { cJSON *a_element = NULL; cJSON *b_element = NULL; size_t a_size = get_array_size(a); size_t b_size = get_array_size(b); if (a_size != b_size) { return false; } cJSON_ArrayForEach(a_element, a) { /* TODO This has O(n^2) runtime, which is horrible! */ b_element = get_object_item(b, a_element->string, context); if (b_element == NULL) { return false; } if (!compare(a_element, b_element, context)) { return false; } } return true; } default: return false; } } CJSON_PUBLIC(cJSON_bool) cJSON_Compare(const cJSON * const a, const cJSON * const b, const cJSON_bool case_sensitive) { internal_context context = global_context; context.case_sensitive = case_sensitive; return compare(a, b, &context); } CJSON_PUBLIC(void *) cJSON_malloc(size_t size) { return global_allocators.malloc_fn(size); } CJSON_PUBLIC(void) cJSON_free(void *object) { global_allocators.free_fn(object); } ```
```c /*************************************************************************** * _ _ ____ _ * Project ___| | | | _ \| | * / __| | | | |_) | | * | (__| |_| | _ <| |___ * \___|\___/|_| \_\_____| * * * This software is licensed as described in the file COPYING, which * you should have received as part of this distribution. The terms * are also available at path_to_url * * You may opt to use, copy, modify, merge, publish, distribute and/or sell * copies of the Software, and permit persons to whom the Software is * furnished to do so, under the terms of the COPYING file. * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY * KIND, either express or implied. * ***************************************************************************/ #include "curl_setup.h" #ifdef USE_LIBRTMP #include "curl_rtmp.h" #include "urldata.h" #include "nonblock.h" /* for curlx_nonblock */ #include "progress.h" /* for Curl_pgrsSetUploadSize */ #include "transfer.h" #include "warnless.h" #include <curl/curl.h> #include <librtmp/rtmp.h> #include "curl_memory.h" /* The last #include file should be: */ #include "memdebug.h" #ifdef _WIN32 #define setsockopt(a,b,c,d,e) (setsockopt)(a,b,c,(const char *)d,(int)e) #define SET_RCVTIMEO(tv,s) int tv = s*1000 #else #define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0} #endif #define DEF_BUFTIME (2*60*60*1000) /* 2 hours */ static CURLcode rtmp_setup_connection(struct connectdata *conn); static CURLcode rtmp_do(struct connectdata *conn, bool *done); static CURLcode rtmp_done(struct connectdata *conn, CURLcode, bool premature); static CURLcode rtmp_connect(struct connectdata *conn, bool *done); static CURLcode rtmp_disconnect(struct connectdata *conn, bool dead); static Curl_recv rtmp_recv; static Curl_send rtmp_send; /* * RTMP protocol handler.h, based on path_to_url */ const struct Curl_handler Curl_handler_rtmp = { "RTMP", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMP, /* defport */ CURLPROTO_RTMP, /* protocol */ PROTOPT_NONE /* flags*/ }; const struct Curl_handler Curl_handler_rtmpt = { "RTMPT", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMPT, /* defport */ CURLPROTO_RTMPT, /* protocol */ PROTOPT_NONE /* flags*/ }; const struct Curl_handler Curl_handler_rtmpe = { "RTMPE", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMP, /* defport */ CURLPROTO_RTMPE, /* protocol */ PROTOPT_NONE /* flags*/ }; const struct Curl_handler Curl_handler_rtmpte = { "RTMPTE", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMPT, /* defport */ CURLPROTO_RTMPTE, /* protocol */ PROTOPT_NONE /* flags*/ }; const struct Curl_handler Curl_handler_rtmps = { "RTMPS", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMPS, /* defport */ CURLPROTO_RTMPS, /* protocol */ PROTOPT_NONE /* flags*/ }; const struct Curl_handler Curl_handler_rtmpts = { "RTMPTS", /* scheme */ rtmp_setup_connection, /* setup_connection */ rtmp_do, /* do_it */ rtmp_done, /* done */ ZERO_NULL, /* do_more */ rtmp_connect, /* connect_it */ ZERO_NULL, /* connecting */ ZERO_NULL, /* doing */ ZERO_NULL, /* proto_getsock */ ZERO_NULL, /* doing_getsock */ ZERO_NULL, /* domore_getsock */ ZERO_NULL, /* perform_getsock */ rtmp_disconnect, /* disconnect */ ZERO_NULL, /* readwrite */ ZERO_NULL, /* connection_check */ PORT_RTMPS, /* defport */ CURLPROTO_RTMPTS, /* protocol */ PROTOPT_NONE /* flags*/ }; static CURLcode rtmp_setup_connection(struct connectdata *conn) { RTMP *r = RTMP_Alloc(); if(!r) return CURLE_OUT_OF_MEMORY; RTMP_Init(r); RTMP_SetBufferMS(r, DEF_BUFTIME); if(!RTMP_SetupURL(r, conn->data->change.url)) { RTMP_Free(r); return CURLE_URL_MALFORMAT; } conn->proto.generic = r; return CURLE_OK; } static CURLcode rtmp_connect(struct connectdata *conn, bool *done) { RTMP *r = conn->proto.generic; SET_RCVTIMEO(tv, 10); r->m_sb.sb_socket = (int)conn->sock[FIRSTSOCKET]; /* We have to know if it's a write before we send the * connect request packet */ if(conn->data->set.upload) r->Link.protocol |= RTMP_FEATURE_WRITE; /* For plain streams, use the buffer toggle trick to keep data flowing */ if(!(r->Link.lFlags & RTMP_LF_LIVE) && !(r->Link.protocol & RTMP_FEATURE_HTTP)) r->Link.lFlags |= RTMP_LF_BUFX; (void)curlx_nonblock(r->m_sb.sb_socket, FALSE); setsockopt(r->m_sb.sb_socket, SOL_SOCKET, SO_RCVTIMEO, (char *)&tv, sizeof(tv)); if(!RTMP_Connect1(r, NULL)) return CURLE_FAILED_INIT; /* Clients must send a periodic BytesReceived report to the server */ r->m_bSendCounter = true; *done = TRUE; conn->recv[FIRSTSOCKET] = rtmp_recv; conn->send[FIRSTSOCKET] = rtmp_send; return CURLE_OK; } static CURLcode rtmp_do(struct connectdata *conn, bool *done) { RTMP *r = conn->proto.generic; if(!RTMP_ConnectStream(r, 0)) return CURLE_FAILED_INIT; if(conn->data->set.upload) { Curl_pgrsSetUploadSize(conn->data, conn->data->state.infilesize); Curl_setup_transfer(conn, -1, -1, FALSE, NULL, FIRSTSOCKET, NULL); } else Curl_setup_transfer(conn, FIRSTSOCKET, -1, FALSE, NULL, -1, NULL); *done = TRUE; return CURLE_OK; } static CURLcode rtmp_done(struct connectdata *conn, CURLcode status, bool premature) { (void)conn; /* unused */ (void)status; /* unused */ (void)premature; /* unused */ return CURLE_OK; } static CURLcode rtmp_disconnect(struct connectdata *conn, bool dead_connection) { RTMP *r = conn->proto.generic; (void)dead_connection; if(r) { conn->proto.generic = NULL; RTMP_Close(r); RTMP_Free(r); } return CURLE_OK; } static ssize_t rtmp_recv(struct connectdata *conn, int sockindex, char *buf, size_t len, CURLcode *err) { RTMP *r = conn->proto.generic; ssize_t nread; (void)sockindex; /* unused */ nread = RTMP_Read(r, buf, curlx_uztosi(len)); if(nread < 0) { if(r->m_read.status == RTMP_READ_COMPLETE || r->m_read.status == RTMP_READ_EOF) { conn->data->req.size = conn->data->req.bytecount; nread = 0; } else *err = CURLE_RECV_ERROR; } return nread; } static ssize_t rtmp_send(struct connectdata *conn, int sockindex, const void *buf, size_t len, CURLcode *err) { RTMP *r = conn->proto.generic; ssize_t num; (void)sockindex; /* unused */ num = RTMP_Write(r, (char *)buf, curlx_uztosi(len)); if(num < 0) *err = CURLE_SEND_ERROR; return num; } #endif /* USE_LIBRTMP */ ```
The women's 5000 metres at the 2023 World Athletics Championships was held at the National Athletics Centre in Budapest on 23 and 26 August 2023. Summary As temperatures rose in Budapest, meet officials moved the heats from Tuesday morning to Wednesday evening. Still, with the heats being conducted in the heat of 31°C (88°F) temperatures, the elite athletes had little interest in running a hard race except World U20 double Champion Agate Caune. In the first heat, the 19 year old captured the crowd by taking the pace out. After getting jostled in a tight slow moving pack for the first 200 metres, she stepped to the side and around the pack. Quickly, she separated from the pack, clicking off 70 second laps, while the peloton was doing more relaxed 73s. By the ninth lap, she had a 23 second, about 150 metre, advantage. Slowly the peloton began to reel in the rebellious teenager as the strain began to show on her face. 600 metres from the finish, 10,000 metre gold medalist Gudaf Tsegay started an extended kick for home. Returning silver medalist Beatrice Chebet and 2019 silver medalist Margaret Kipkemboi followed her and the pack stretched out, the gap to Caune was shrinking. She eventually was caught but kept her wits about her. Caune continued on to finish in fourth place qualifying her for the finals with a new personal best. In the second heat, they ran about 25 seconds faster, with the two favorites; new world record holder Faith Kipyegon and triple threat Olympic Champion Sifan Hassan sprinting it out to satisfy their competitive juices. Hassan won the heat by .02 seconds. With all the drama just to qualify for the final, could the final live up? Even before the start, Caune did not appear due to a pelvic injury. With new rules in place, she was replaced at the last minute by the #17 qualifier Francine Niyomukunzi. The race started off with Tsegay moving out to a 65-second first lap opening up a gap. Then she backed off the accelerator, letting Ejgayehu Taye take the pace down to 75. Then she sped up to a 70 as both the Ethiopian and Kenyan teams dominated the front, then slowed down to 77. Kipyegon came forward to guide the pace to the slower speeds. Lilian Kasait Rengeruk took a couple of laps until Tsegay moved up to the 71 and a couple of 70-second laps. Through all these maneuvers, 14 women were still in contact with the lead, waiting. With a lap and a half to home, it was racing time. Kipyegon hit the front. Hassan moved up from mid-pack to Kipyegon's shoulder. The pace was quickening but only a few were dropping off the back. From 300 to 200 to go, they sped into a controlled sprint. Tsegay couldn't keep up. A gap appeared with Kipyegon, Hassan, and Chebet as the likely medalists. All three sprinted for home but the order and spacing didn't change. 13 women finished in less than 7 and a half seconds. Kipyegon got her double. After falling in the 10,000 finish, Hassan is leaving with medals in her other two events, anticipating running the Chicago Marathon in just 6 weeks. Records Before the competition records were as follows: Qualification standard The standard to qualify automatically for entry was 14:57.00. Schedule The event schedule, in local time (UTC +2), was as follows: Results Heats First 8 of each heat (Q) qualified to the final. Final The final was started on 26 August at 20:50. References 5000 5000 metres at the World Athletics Championships
The Palace of Lourizán is a manor house in Herbalonga in the civil parish of Lourizán, in Pontevedra, Spain. History In the 15th century this property was transformed into a farm and belonged to the Montenegro family. The circular crenellated dovecote dates from this period. A fortified tower-house was built on the estate, where Luis de Góngora spent some time in 1609 and wrote part of his book Soledades. In the 17th century, the estate, known as Granja de la Sierra, was owned by the Marquisate of La Sierra. Later it had different owners, merchants and businessmen. In the 19th century, the palace belonged to Buenaventura Marcó del Pont Bori, after he bought it from the heirs of Francisco Genaro Ángel, his wife's brother. Later it was converted into a main residence and a summer cottage when Eugenio Montero Ríos lived there. In October 1876 he rented the estate and acquired it on 16 May 1879. At that time the estate was very close to the ria of Pontevedra and had its own pier. Between 1893 and 1894, the first major refurbishment of the manor house was carried out. It consisted of creating a wooden gallery in the south wing, which enclosed the building's chapel. The pazo became an ostentatious residence with representative institutional functions, as well as a living, leisure and recreational space. The Treaty of Paris was signed in its rooms after the war with the United States in 1898, in which Spain lost Cuba, Puerto Rico, the Philippines and Guam. Eugenio Montero Ríos commissioned the architect Jenaro de la Fuente Domínguez to completely renovate the palace in the early 20th century. The aim was to turn the pazo into a large residential palace, imitating and surpassing in size the typology of the hôtels particuliers in the fashion of the French Second Empire style. The project's façade plan dates from 20 February 1909 and it integrated and harmonised elements from different stages of construction to create an architectural unity. The refurbishment gave the palace a new appearance, both on the outside and inside. Work began in September 1909 and was completed in 1912. Originally, the marble statues on the great central staircase leading to the palace formed the so-called avenue of statues, but with this major refurbishment of the pazo they were relocated to the staircase. Eugenio Montero Ríos lived in the Lourizán Palace until his death in 1914. The Provincial Council of Pontevedra bought it in 1943 from the Provincial Savings Bank of Pontevedra and (a fifth) from the widowed Marquise of Alhucemas, daughter of Montero Ríos. That same year, the Provincial Council handed it over to the Ministry of Education to be used as a regional centre for teaching, research and forestry experiments, and in 1946 it became a higher technical school of forestry. The centre became part of the National Agricultural Research Institute (INIA) in 1973 and in 1984 it was transferred to the Xunta de Galicia. It is currently integrated into the Centre for Sustainable Development of the Regional Ministry of the Environment since 1991. The main objectives of the Environmental and Forestry Research Centre of Lourizán are the protection, conservation and improvement of Galicia's forestry heritage. On 19 May 2023, the palace became the property of the Xunta de Galicia. Description The building The present building has a romantic air and is the work of Jenaro de la Fuente Domínguez. It is an eclectic building with influences from Art Nouveau, Classicism and the French architecture of the Second Empire. The structure of the palace is symmetrical, monumental and with a predominance of horizontal volumes. It has a ground floor and two upper floors. The central body is U-shaped with three towers crowned by French mansards and slate roof. The facade has Ionic columns and pilasters. The central part is enhanced by a coat of arms and a clock, in the place where the coats of arms of Galician manor houses are usually found. In front of this central body, advanced in relation to the sides, is a large two-flight imperial stone staircase, surrounded by neoclassical white marble statues personifying justice and prudence and representing virtues, values and devotions. At the top of the staircase are the statues of Germanicus, Discobolus, the Dying Slave and Sophocles, while at the ends of the rotunda are the statues of Pallas Athena and Diana of Gabii. In the pavilions on the main façade, next to the entrance door to the vestibule, the allegories of Spring and Summer can be seen. The round staircase generates a belvedere from which to contemplate the views in imitation of the French baroque style. This staircase leads to the main entrance and to a semicircular terrace (which serves as a viewpoint) above an artificial grotto that simulates a volcanic limestone cave called the Grotto of Mirrors. At this point, two side wings open up, consisting of light long galleries of stone and glass that envelop the old pazo. On the first floor, the facades of the side wings give way to the central body and create terraces with balustrades. In these lateral sections, the high windows, pilasters, balconies, dormers and domes lined with zinc scales of the dôme à l'impériale type are repeated, which reinforce the elegance of the palace. The large number of windows and balconies stand out, bringing light and lightness to the structure. The decoration is remarkable for the fusion of neoclassical and Art Nouveau elements. The columns, balconies and ornaments show classical resources. The triangular pediment of the central body is decorated with the symbols of the profession of Eugenio Montero Ríos, and the attributes of justice, a shield with a book and a feather. The interior is accessed through a simple door with the initials on the glass of its former owners, "E and A", "Eugenio and Avelina". The interior of the palace is organised around the three floors visible from the outside, where the ground floor and the first floor contain the remains of the walls of the first house and pazo. The rooms are distributed according to two halves separated by a long corridor that runs along the entire length of the building, as in palatial architecture, leaving the rooms of higher rank, the rooms for receiving visitors, lounges and offices, towards the front facade facing the park, and the accessory parts such as servants' quarters, kitchen and pantries towards the rear facing the farmyard. The estate The manor house has 54 hectares of gardens and groves, which show the different uses to which it has been put over the centuries: farm, seigneurial botanical garden and forestry research centre. It has one of the most important tree groves in Europe, with plant species brought from other latitudes or even singular modifications of species to adapt them to the climate of Pontevedra, resulting in a unique forest ensemble. Many native trees grow here, such as oaks, chestnuts and Birches, sycamores and introduced and exotic trees, such as Cypresses, Araucarias, cedars, magnolias or common privet, many of which were brought by French gardeners. Several of these trees are included in the Catalogue of Singular Trees of the Galician Government. There are arboretums with all varieties of chestnut trees, pines, eucalyptus or camellias, with the tallest specimen in the world, a 20.5 metre tall Japanese camellia. There is also a rimu from New Zealand and a small Taiwanese garden. Around the palace there are ponds, granaries on stilts, a 15th-century dovecote, a glass greenhouse with an iron structure from 1900, a one-piece granite table (apparently extracted from a rock on the island of Tambo), white marble statues and several fountains, such as that of the Shell, that of the Three Channels, that of the Patio and that of the Cave of Mirrors. The estate is organised into avenues: the Camellia Avenue, the Eucalyptus Avenue and the Cave of Mirrors Avenue. The art nouveau greenhouse from the early 20th century is made of glass and wrought iron and the Galician attic with its threshing floor and dryer has 16 feet. The greenhouse is notable for its large, light structure with a rectangular ground plan. Its highest point is 7 metres in the central space, where the larger species are cultivated, leaving the side spaces for smaller plants. Culture The writer Lola Fernández Pazos published the novel El Pazo de Lourizán in 2022, which is set in the palace. The island of Tambo was once part of the palace's territory. Montero Ríos bought three fifths of the island in 1884 and another fifth in 1894. In 1940, his children sold it to the Navy for use by the Naval Military Academy. Gallery References See also Bibliography Related articles Arboretum of Lourizán Palace of the Deputation of Pontevedra Pazo External links The Lourizán Palace, on the website Visit-Pontevedra The pazo of trees, on the Diputación de Pontevedra website. Palaces in Galicia (Spain) Monuments and memorials in Spain Buildings and structures in Pontevedra Art Nouveau Eclectic architecture
```python #see license.txt for license details #history path_to_url __version__='3.3.0' __doc__=""" This module includes any mathematical methods needed for PIDDLE. It should have no dependencies beyond the Python library. So far, just Robert Kern's bezierArc. """ from math import sin, cos, pi, ceil def bezierArc(x1,y1, x2,y2, startAng=0, extent=90): """bezierArc(x1,y1, x2,y2, startAng=0, extent=90) --> List of Bezier curve control points. (x1, y1) and (x2, y2) are the corners of the enclosing rectangle. The coordinate system has coordinates that increase to the right and down. Angles, measured in degress, start with 0 to the right (the positive X axis) and increase counter-clockwise. The arc extends from startAng to startAng+extent. I.e. startAng=0 and extent=180 yields an openside-down semi-circle. The resulting coordinates are of the form (x1,y1, x2,y2, x3,y3, x4,y4) such that the curve goes from (x1, y1) to (x4, y4) with (x2, y2) and (x3, y3) as their respective Bezier control points.""" x1,y1, x2,y2 = min(x1,x2), max(y1,y2), max(x1,x2), min(y1,y2) if abs(extent) <= 90: arcList = [startAng] fragAngle = float(extent) Nfrag = 1 else: arcList = [] Nfrag = int(ceil(abs(extent)/90.)) fragAngle = float(extent) / Nfrag x_cen = (x1+x2)/2. y_cen = (y1+y2)/2. rx = (x2-x1)/2. ry = (y2-y1)/2. halfAng = fragAngle * pi / 360. kappa = abs(4. / 3. * (1. - cos(halfAng)) / sin(halfAng)) if fragAngle < 0: sign = -1 else: sign = 1 pointList = [] for i in range(Nfrag): theta0 = (startAng + i*fragAngle) * pi / 180. theta1 = (startAng + (i+1)*fragAngle) *pi / 180. if fragAngle > 0: pointList.append((x_cen + rx * cos(theta0), y_cen - ry * sin(theta0), x_cen + rx * (cos(theta0) - kappa * sin(theta0)), y_cen - ry * (sin(theta0) + kappa * cos(theta0)), x_cen + rx * (cos(theta1) + kappa * sin(theta1)), y_cen - ry * (sin(theta1) - kappa * cos(theta1)), x_cen + rx * cos(theta1), y_cen - ry * sin(theta1))) else: pointList.append((x_cen + rx * cos(theta0), y_cen - ry * sin(theta0), x_cen + rx * (cos(theta0) + kappa * sin(theta0)), y_cen - ry * (sin(theta0) - kappa * cos(theta0)), x_cen + rx * (cos(theta1) - kappa * sin(theta1)), y_cen - ry * (sin(theta1) + kappa * cos(theta1)), x_cen + rx * cos(theta1), y_cen - ry * sin(theta1))) return pointList ```
```java package com.yyydjk.gank.theme; import android.content.Context; import android.content.res.Resources; import android.util.AttributeSet; import android.view.View; import android.widget.RelativeLayout; /** * Created by chengli on 15/6/8. */ public class ColorRelativeLayout extends RelativeLayout implements ColorUiInterface { private int attr_background = -1; public ColorRelativeLayout(Context context) { super(context); } public ColorRelativeLayout(Context context, AttributeSet attrs) { super(context, attrs); this.attr_background = ViewAttributeUtil.getBackgroundAttibute(attrs); } public ColorRelativeLayout(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); this.attr_background = ViewAttributeUtil.getBackgroundAttibute(attrs); } @Override public View getView() { return this; } @Override public void setTheme(Resources.Theme themeId) { if (attr_background != -1) { ViewAttributeUtil.applyBackgroundDrawable(this, themeId, attr_background); } } } ```
The Compact Computer 40 or CC-40 is a portable computer developed by Texas Instruments. It started development in 1981, and was released in March 1983 for US$249. The CC-40 has a single-line 31 character LCD display, weighs , and is powered by an AC adapter or can operate for 200 hours on four AA batteries. Memory is not erased by turning the unit off; it can retain data for several months. The CC-40 lacks a way to store data more permanently. Software was only available on cartridge or by typing programs into its built-in BASIC interpreter. The BASIC interpreter is similar but not identical to that of the TI-99/4A. The CC-40 uses TI's TMS70C20 CPU, an 8-bit microprocessor that runs at 2.5 MHz. The system has 6 kilobytes of Random Access Memory (expandable to 18 KB),and 34 KB of Read Only Memory. Peripherals can be connected via a Hexbus port: an 80 column printer, printer/plotter, RS-232 interface, and modem. A licensed version of the Exatron Stringy Floppy as a digital "Wafertape" unit depicted on the computer's box was only released as a prototype, reportedly because it proved too unreliable. Development The Compact Computer 40 was developed under the internal codename "Lonestar". Reception BYTE heavily criticized the CC-40, noting that "there's no clock. No file system. Only one BASIC program at a time can reside in memory, and the user can only work with about 5200 bytes of that. And the keyboard is vile". It also noted the lack of any external storage because the TI Wafertape drive was not available, and the complete lack of software. The review suggested that the computer should be considered a "dandy scientific calculator" since good programmable calculators cost about the same as the CC-40's price, but that otherwise "virtually all of its competition vastly outstrips it in power and features", including the TI-99/4A. In a review for Creative Computing, Joe Devlin wrote, "The permanent memory and powerful Basic exceed the capabilities found in most hand-held computers." He recommended it as a convenient tool for learning BASIC or for someone who frequently does calculations with formulas. In 1983, MicroKids magazine included the CC-40 on a list of "Top 10 Great Gift Ideas." Legacy The Hex-Bus interface was also available for the TI-99/4A as an unreleased prototype expansion peripheral. It was built into the prototypes of the cancelled TI-99/2 and TI-99/8 computers. An improved model, the CC-40 Plus, was in the final stages of development and included a cassette port. The project was canceled when Texas Instruments discontinued the 99/4A and exited the home computer market. Most of the architecture of the CC-40 Plus was reused in the Texas Instruments TI-74. The TI-74 changed the physical footprint of the Hexbus port and rename it Dockbus. Old Hexbus peripherals could even be used on the TI-74 with an adapter. Also in development was the Compact Computer 70 (codenamed "Superstar"). The CC-70 was to have four cartridge ports, more RAM, and an 8 x 80 display with graphics capability. The CC-70 mock-up from Calculator division lead CB Wilson showed up on eBay in 2020. Engineer Steven Reid has stated that the first run of chips for the CC-70 failed, and TI discontinued the Home Computer division in October 1983 before the chip issues could be corrected. References Thomas, David (1983). Learn BASIC: A Guide to Programming the Texas Instruments Compact Computer 40. Texas Instruments, Inc. and McGraw Hill, Inc. . External links CC-40 images and documentaiton 99er page on the CC-40 OLD-COMPUTERS.COM online-museum CC-40 page Curtis McCain Page on the CC-40 Computer-related introductions in 1983 Portable computers
```objective-c #pragma once #include <Interpreters/Squashing.h> #include <Processors/ISimpleTransform.h> #include <Processors/IInflatingTransform.h> #include <Processors/Sinks/SinkToStorage.h> #include <Processors/Transforms/ApplySquashingTransform.h> namespace DB { class SquashingTransform : public ExceptionKeepingTransform { public: explicit SquashingTransform( const Block & header, size_t min_block_size_rows, size_t min_block_size_bytes); String getName() const override { return "SquashingTransform"; } void work() override; protected: void onConsume(Chunk chunk) override; GenerateResult onGenerate() override; void onFinish() override; private: Squashing squashing; Chunk cur_chunk; Chunk finish_chunk; }; class SimpleSquashingChunksTransform : public IInflatingTransform { public: explicit SimpleSquashingChunksTransform(const Block & header, size_t min_block_size_rows, size_t min_block_size_bytes); String getName() const override { return "SimpleSquashingTransform"; } protected: void consume(Chunk chunk) override; bool canGenerate() override; Chunk generate() override; Chunk getRemaining() override; private: Squashing squashing; Chunk squashed_chunk; }; } ```
M. S. Mani was an Indian politician and former Member of the Legislative Assembly of Tamil Nadu. He was elected to the Tamil Nadu legislative assembly from Tiruchirappalli - I constituency as a Dravida Munnetra Kazhagam candidate in 1962 and 1967 elections. References Dravida Munnetra Kazhagam politicians Possibly living people Year of birth missing Members of the Tamil Nadu Legislative Assembly
```smalltalk // ========================================================================== // Squidex Headless CMS // ========================================================================== // ========================================================================== using System.Globalization; using Squidex.Domain.Apps.Core.TestHelpers; using Squidex.Infrastructure; using Squidex.Infrastructure.EventSourcing; using Squidex.Infrastructure.Json; using Squidex.Infrastructure.Json.Objects; using Squidex.Infrastructure.Reflection; using Squidex.Infrastructure.States; namespace Squidex.Domain.Apps.Entities.Backup; public class BackupReaderWriterTests { private readonly IEventFormatter eventFormatter; private readonly IEventStreamNames eventStreamNames = A.Fake<IEventStreamNames>(); private readonly IJsonSerializer serializer = TestUtils.DefaultSerializer; private readonly TypeRegistry typeRegistry = new TypeRegistry(); [TypeName(nameof(MyEvent))] public sealed class MyEvent : IEvent { public Guid Id { get; set; } = Guid.NewGuid(); } public BackupReaderWriterTests() { typeRegistry.Add<IEvent, MyEvent>("MyEvent"); eventFormatter = new DefaultEventFormatter(typeRegistry, serializer); } [Fact] public async Task Should_not_write_blob_if_handler_failed() { var file = "File.json"; await TestReaderWriterAsync(BackupVersion.V1, async writer => { try { await using (var stream = await writer.OpenBlobAsync(file)) { throw new InvalidOperationException(); } } catch { return; } }, async reader => { await Assert.ThrowsAsync<FileNotFoundException>(() => ReadGuidAsync(reader, file)); }); } [Fact] public async Task Should_return_true_if_file_exists() { var file = "File.json"; var value = Guid.NewGuid(); await TestReaderWriterAsync(BackupVersion.V1, async writer => { await WriteJsonGuidAsync(writer, file, value); }, async reader => { var hasFile = await reader.HasFileAsync(file); Assert.True(hasFile); }); } [Fact] public async Task Should_return_file_if_file_does_not_exist() { var file = "File.json"; var value = Guid.NewGuid(); await TestReaderWriterAsync(BackupVersion.V1, async writer => { await Task.Yield(); }, async reader => { var hasFile = await reader.HasFileAsync(file); Assert.False(hasFile); }); } [Fact] public async Task Should_read_and_write_json_async() { var file = "File.json"; var value = Guid.NewGuid(); await TestReaderWriterAsync(BackupVersion.V1, async writer => { await WriteJsonGuidAsync(writer, file, value); }, async reader => { var read = await ReadJsonGuidAsync(reader, file); Assert.Equal(value, read); }); } [Fact] public async Task Should_read_and_write_blob_async() { var file = "File.json"; var value = Guid.NewGuid(); await TestReaderWriterAsync(BackupVersion.V1, async writer => { await WriteGuidAsync(writer, file, value); }, async reader => { var read = await ReadGuidAsync(reader, file); Assert.Equal(value, read); }); } [Fact] public async Task Should_throw_exception_if_json_not_found() { await TestReaderWriterAsync(BackupVersion.V1, writer => { return Task.CompletedTask; }, async reader => { await Assert.ThrowsAsync<FileNotFoundException>(() => reader.ReadJsonAsync<int>("404")); }); } [Fact] public async Task Should_throw_exception_if_blob_not_found() { await TestReaderWriterAsync(BackupVersion.V1, writer => { return Task.CompletedTask; }, async reader => { await Assert.ThrowsAsync<FileNotFoundException>(() => reader.OpenBlobAsync("404")); }); } [Theory] [InlineData(BackupVersion.V1)] [InlineData(BackupVersion.V2)] public async Task Should_write_and_read_events_to_backup(BackupVersion version) { var randomDomainIds = new List<DomainId>(); for (var i = 0; i < 100; i++) { randomDomainIds.Add(DomainId.NewGuid()); } DomainId RandomDomainId() { return randomDomainIds[Random.Shared.Next(randomDomainIds.Count)]; } var sourceEvents = new List<(string Stream, Envelope<MyEvent> Event)>(); for (var i = 0; i < 200; i++) { var @event = new MyEvent(); var envelope = Envelope.Create(@event); envelope.Headers.Add("Id", JsonValue.Create(@event.Id)); envelope.Headers.Add("Index", JsonValue.Create(i)); sourceEvents.Add(($"My-{RandomDomainId()}", envelope)); } await TestReaderWriterAsync(version, async writer => { foreach (var (stream, envelope) in sourceEvents) { var eventData = eventFormatter.ToEventData(envelope, Guid.NewGuid(), true); var eventStored = new StoredEvent(stream, "1", 2, eventData); var index = int.Parse(envelope.Headers["Index"].ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture); if (index % 17 == 0) { await WriteGuidAsync(writer, index.ToString(CultureInfo.InvariantCulture), envelope.Payload.Id); } else if (index % 37 == 0) { await WriteJsonGuidAsync(writer, index.ToString(CultureInfo.InvariantCulture), envelope.Payload.Id); } writer.WriteEvent(eventStored); } }, async reader => { var targetEvents = new List<(string Stream, Envelope<IEvent> Event)>(); await foreach (var @event in reader.ReadEventsAsync(eventStreamNames, eventFormatter)) { var index = int.Parse(@event.Event.Headers["Index"].ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture); var id = Guid.Parse(@event.Event.Headers["Id"].ToString()); if (index % 17 == 0) { var guid = await ReadGuidAsync(reader, index.ToString(CultureInfo.InvariantCulture)); Assert.Equal(id, guid); } else if (index % 37 == 0) { var guid = await ReadJsonGuidAsync(reader, index.ToString(CultureInfo.InvariantCulture)); Assert.Equal(id, guid); } targetEvents.Add(@event); } for (var i = 0; i < targetEvents.Count; i++) { var targetEvent = targetEvents[i].Event.To<MyEvent>(); var targetStream = targetEvents[i].Stream; var sourceEvent = sourceEvents[i].Event.To<MyEvent>(); var sourceStream = sourceEvents[i].Stream; Assert.Equal(sourceEvent.Payload.Id, targetEvent.Payload.Id); Assert.Equal(sourceStream, targetStream); } }); } private static Task<Guid> ReadJsonGuidAsync(IBackupReader reader, string file) { return reader.ReadJsonAsync<Guid>(file); } private static Task WriteJsonGuidAsync(IBackupWriter writer, string file, Guid value) { return writer.WriteJsonAsync(file, value); } private static async Task WriteGuidAsync(IBackupWriter writer, string file, Guid value) { await using (var stream = await writer.OpenBlobAsync(file)) { await stream.WriteAsync(value.ToByteArray()); } } private static async Task<Guid> ReadGuidAsync(IBackupReader reader, string file) { var read = Guid.Empty; await using (var stream = await reader.OpenBlobAsync(file)) { var buffer = new byte[16]; _ = await stream.ReadAsync(buffer); read = new Guid(buffer); } return read; } private async Task TestReaderWriterAsync(BackupVersion version, Func<IBackupWriter, Task> write, Func<IBackupReader, Task> read) { using (var stream = new MemoryStream()) { using (var writer = new BackupWriter(serializer, stream, true, version)) { await write(writer); } stream.Position = 0; using (var reader = new BackupReader(serializer, stream)) { await read(reader); } } } } ```
```objective-c #pragma once #include <unknwn.h> // Nested.HierarchyD uses classic COM interface IReferenceTrackerExtension ```
```objective-c /***************************************************************************** * * This program is free software: you can redistribute it and/or modify * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * along with this program. If not, see <path_to_url *****************************************************************************/ #ifndef LNM_FUELTOOL_H #define LNM_FUELTOOL_H #include "options/optiondata.h" #include <QCoreApplication> namespace atools { namespace fs { namespace perf { class AircraftPerf; } } } /* * Provides functions to conver fuel numbers to strings containing values for volume, gallons and * alternative units (lbs vs kg and gal vs. liter). * * All input units are gallons or lbs. */ class FuelTool { Q_DECLARE_TR_FUNCTIONS(FuelTool) public: explicit FuelTool(const atools::fs::perf::AircraftPerf *aircraftPerf); explicit FuelTool(const atools::fs::perf::AircraftPerf& aircraftPerf); explicit FuelTool(bool jetfuelParam, bool fuelAsVolumeParam); /* Make a string with fuel in lbs and gallons or kg and liter */ /* To currently user selected fuel units */ QString weightVolLocal(float valueLbsGal) const; /* To opposite of currently user selected fuel units */ QString weightVolOther(float valueLbsGal) const; /* Fuel flow in currently selected units */ QString flowWeightVolLocal(float valueLbsGal) const; /* Get a string with local optinally bold and other unit in brackets */ QString weightVolLocalOther(float valueLbsGal, bool localBold = false, bool otherSmall = true) const; /* Jetfuel or avgas */ QString getFuelTypeString() const; bool isJetfuel() const { return jetfuel; } bool isFuelAsVolume() const { return fuelAsVolume; } private: QString fuelWeightVol(opts::UnitFuelAndWeight unitFuelAndWeight, float valueLbsGal) const; bool jetfuel, fuelAsVolume; }; #endif // LNM_FUELTOOL_H ```
```turing #!perl -w BEGIN { chdir 't' if -d 't'; require "./test.pl"; set_up_inc('../lib'); } use strict; use utf8; use open qw( :utf8 :std ); plan 'no_plan'; # package klonk doesn't have a stash. package o; # No parents package ur; # 1 parent @ur::ISA = 'ko'; package ; # 2 parents @ur::ISA = ('ko', 'o'); package ck; # No parents, has @ISA @ck::ISA = (); package zzz; @zzz::ISA = ('ck', 'o'); package ; @::ISA = ('o', 'ck'); package main; require mro; my %expect = ( ko => [qw(ko)], ur => [qw(ur ko o)], o => [qw(o)], => [qw()], ck => [qw(ck)], zzz => [qw(zzz ck o)], => [qw( o ck)], ); foreach my $package (qw(ko ur o ck zzz )) { my $ref = bless [], $package; my $isa = $expect{$package}; is("@{mro::get_linear_isa($package)}", "@$isa", "\@ISA for $package"); foreach my $class ($package, @$isa, 'UNIVERSAL') { object_ok($ref, $class, $package); } } ```
```java package com.vladsch.flexmark.html2md.converter; import com.vladsch.flexmark.util.data.DataHolder; import com.vladsch.flexmark.util.dependency.Dependent; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Function; /** * Factory for instantiating new node renderers with dependencies */ class DelegatingNodeRendererFactoryWrapper implements Function<DataHolder, HtmlNodeRenderer>, Dependent, DelegatingNodeRendererFactory { final private HtmlNodeRendererFactory nodeRendererFactory; private List<DelegatingNodeRendererFactoryWrapper> nodeRenderers; private Set<Class<?>> myDelegates = null; public DelegatingNodeRendererFactoryWrapper(List<DelegatingNodeRendererFactoryWrapper> nodeRenderers, HtmlNodeRendererFactory nodeRendererFactory) { this.nodeRendererFactory = nodeRendererFactory; this.nodeRenderers = nodeRenderers; } @Override public HtmlNodeRenderer apply(DataHolder options) { return nodeRendererFactory.apply(options); } public HtmlNodeRendererFactory getFactory() { return nodeRendererFactory; } @Override public Set<Class<?>> getDelegates() { return nodeRendererFactory instanceof DelegatingNodeRendererFactory ? ((DelegatingNodeRendererFactory) nodeRendererFactory).getDelegates() : null; } @Nullable @Override final public Set<Class<?>> getAfterDependents() { return null; } @Nullable @Override public Set<Class<?>> getBeforeDependents() { if (myDelegates == null && nodeRenderers != null) { Set<Class<?>> delegates = getDelegates(); if (delegates != null) { myDelegates = new HashSet<>(); for (DelegatingNodeRendererFactoryWrapper factory : nodeRenderers) { if (delegates.contains(factory.getFactory().getClass())) { myDelegates.add(factory.getFactory().getClass()); } } } // release reference nodeRenderers = null; } return myDelegates; } @Override final public boolean affectsGlobalScope() { return false; } } ```
```sqlpl DROP TABLE IF EXISTS test__fuzz_22 SYNC; CREATE TABLE test__fuzz_22 (k Float32, v String) ENGINE = ReplicatedMergeTree('/clickhouse/03222/{database}/test__fuzz_22', 'r1') ORDER BY k SETTINGS index_granularity = 1; INSERT INTO test__fuzz_22 SELECT number, toString(number) FROM numbers(10_000); SET allow_experimental_parallel_reading_from_replicas = 2, max_parallel_replicas = 3, cluster_for_parallel_replicas='test_cluster_one_shard_three_replicas_localhost'; SELECT v FROM test__fuzz_22 ORDER BY v LIMIT 10, 10 SETTINGS merge_tree_min_rows_for_concurrent_read = 9223372036854775806; SELECT '---'; SELECT k, v FROM test__fuzz_22 ORDER BY k LIMIT 100, 10 SETTINGS optimize_read_in_order=1, merge_tree_min_rows_for_concurrent_read = 9223372036854775806; DROP TABLE test__fuzz_22 SYNC; ```
```emacs lisp ;;; lsp-awk.el --- AWK client -*- lexical-binding: t; -*- ;; Author: Konstantin Kharlamov <Hi-Angel@yandex.ru> ;; Keywords: languages lsp awk ;; This program is free software; you can redistribute it and/or modify ;; (at your option) any later version. ;; This program is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; along with this program. If not, see <path_to_url ;;; Commentary: ;; ;; LSP client for AWK language. ;; ;;; Code: (require 'lsp-mode) (defgroup lsp-awk nil "LSP support for AWK." :group 'lsp-mode :link '(url-link "path_to_url")) (defcustom lsp-awk-executable '("awk-language-server") "Command to run the AWK language server." :group 'lsp-awk :risky t :type 'list) (lsp-register-client (make-lsp-client :new-connection (lsp-stdio-connection (lambda () lsp-awk-executable)) :activation-fn (lsp-activate-on "awk") :priority -1 :server-id 'awkls)) (provide 'lsp-awk) ;;; lsp-awk.el ends here ```
```objective-c /* * MTCommandOpcode.h * */ #ifndef LLGL_MT_COMMAND_OPCODE_H #define LLGL_MT_COMMAND_OPCODE_H #include <cstdint> namespace LLGL { enum MTOpcode : std::uint8_t { MTOpcodeNop = 0, MTOpcodeExecute, MTOpcodeCopyBuffer, MTOpcodeCopyBufferFromTexture, MTOpcodeCopyTexture, MTOpcodeCopyTextureFromBuffer, MTOpcodeCopyTextureFromFramebuffer, MTOpcodeGenerateMipmaps, MTOpcodeSetGraphicsPSO, MTOpcodeSetComputePSO, MTOpcodeSetViewports, MTOpcodeSetScissorRects, MTOpcodeSetBlendColor, MTOpcodeSetStencilRef, MTOpcodeSetUniforms, MTOpcodeSetVertexBuffers, MTOpcodeSetIndexBuffer, MTOpcodeSetResourceHeap, MTOpcodeSetResource, MTOpcodeBeginRenderPass, MTOpcodeEndRenderPass, MTOpcodeClearRenderPass, MTOpcodeDraw, MTOpcodeDrawIndexed, MTOpcodeDispatchThreadgroups, MTOpcodeDispatchThreadgroupsIndirect, MTOpcodePushDebugGroup, MTOpcodePopDebugGroup, MTOpcodePresentDrawables, MTOpcodeFlush, }; } // /namespace LLGL #endif // ================================================================================ ```