text
stringlengths
1
1.04M
language
stringclasses
25 values
import React, {Component} from 'react' import {fetchCategories} from '../store/category' import {connect} from 'react-redux' import {withRouter} from 'react-router-dom' class DropDownMenu extends Component { componentDidMount() { this.props.fetchCategories() } handleChange = event => { const value = Number(event.target.value) this.props.history.push(`/category/${value}`) } render() { const {categories} = this.props return ( <div> <select name="categories" defaultValue="" onChange={this.handleChange}> <option disabled="disabled" value=""> Select a Category </option> {categories ? ( categories.map(category => { return ( <option key={category.id} value={category.id}> {category.title} </option> ) }) ) : ( <option>Fetching Categories</option> )} </select> </div> ) } } const mapStateToProps = state => ({ categories: state.categories }) const mapDispatchToProps = dispatch => ({ fetchCategories: () => dispatch(fetchCategories()) }) export default withRouter( connect(mapStateToProps, mapDispatchToProps)(DropDownMenu) )
javascript
<filename>src/main/resources/json/PH4311.json {"acadYear":"2019/2020","preclusion":"CL3204. This is taught in Mandarin, those who could do this module should not be taking the proposed module. Reverse preclusion is not needed, as someone who\npassed the proposed module, if their Mandarin improved sufficiently, could still benefit from CL3204 and will not find that too easy.","description":"This module introduces students to Classical Chinese through close reading and practice at translation of selected passages from philosophical texts, including a\nphilosophically oriented grammatical introduction in English to the Classical Chinese language. It is intended for students who have little or only average second language reading ability in Mandarin. Topics include the fundamentals of Classical Chinese grammar and readings from philosophical texts written in Classical Chinese from different periods. This module will provide the language foundation required for students intending to do graduate work in Chinese Philosophy, and enable them to work with primary materials.","title":"Classical Chinese Through Philosophical Texts","department":"Philosophy","faculty":"Arts and Social Science","workload":[0,3,0,0,9.5],"prerequisite":"Completed 80MC, including 28 MCs in PH or 28MCs in GL/GL recognised nonlanguage modules, with a minimum CAP of 3.20 or be on the Honours track.\nPH2301 or PH2302.","moduleCredit":"5","moduleCode":"PH4311","semesterData":[]}
json
<filename>model.py from network import * from metrics import * from loss import * class GDNInpainting(object): def __init__(self, config): self.config = config self.img_size = config.INPUT_SIZE self.res_num = config.RES_NUM self.base_channel = config.BASE_CHANNEL self.sample_num = config.SAMPLE_NUM self.exp_base = config.EXPBASE self.gamma = config.GAMMA self.model_name = 'Inpainting' self.psnr = PSNR(255.0) self.gen_optimizer = tf.train.AdamOptimizer( learning_rate=float(config.LR), beta1=float(config.BETA1), beta2=float(config.BETA2) ) self.dis_optimizer = tf.train.AdamOptimizer( learning_rate=float(config.LR) * float(config.D2G_LR), beta1=float(config.BETA1), beta2=float(config.BETA2) ) def build_whole_model(self, images, masks): # normalization [0, 255] to [0, 1] images = images / 255 masks = masks / 255 # masked images_masked = (images * (1 - masks)) + masks # inpaint outputs, gen_loss, dis_loss = self.inpaint_model(images, images_masked, masks) outputs_merged = (outputs * masks) + (images * (1 - masks)) # recover [0, 1] to [0, 255] images = images * 255 images_masked = images_masked * 255 outputs_merged = outputs_merged * 255 outputs = outputs * 255 # summary whole_image = tf.concat([images, images_masked, outputs, outputs_merged], axis=2) psnr = self.psnr(images, outputs_merged) tf.summary.image('train_image', whole_image, max_outputs=1) tf.summary.scalar('psnr', psnr) return gen_loss, dis_loss, psnr def build_validation_model(self, images, masks): # normalization [0, 255] to [0, 1] images = images / 255 masks = masks / 255 # masked images_masked = (images * (1 - masks)) + masks inputs = tf.concat([images_masked, masks], axis=3) outputs = self.inpaint_generator(inputs, self.res_num, self.base_channel, self.sample_num, reuse=True) outputs_merged = (outputs * masks) + (images * (1 - masks)) pred_masks, annotations, weight = self.discriminator(outputs, reuse=True) weight = tf.pow(tf.constant(float(self.exp_base)), weight) # mask hole ratio hr = tf.reduce_sum(masks, axis=[1, 2, 3]) / (self.img_size * self.img_size) # calculate validation loss gen_loss = 0 dis_loss = 0 with tf.variable_scope('validation_loss'): # discriminator loss dis_seg_loss = focal_loss(annotations, masks, hr, self.gamma) dis_loss += dis_seg_loss # generator l1 loss gen_l1_loss = l1_loss(weight * outputs, weight * images) gen_loss += gen_l1_loss gen_seg_loss = l1_loss(outputs, images) # recover [0, 1] to [0, 255] images = images * 255 images_masked = images_masked * 255 outputs_merged = outputs_merged * 255 outputs = outputs * 255 # summary whole_image = tf.concat([images, images_masked, outputs, outputs_merged], axis=2) tf.summary.image('validation_image', whole_image, max_outputs=1) psnr = self.psnr(images, outputs_merged) return gen_l1_loss, gen_seg_loss, dis_loss, psnr def build_optim(self, gen_loss, dis_loss): g_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.model_name + '_generator') d_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.model_name + '_discriminator') g_gradient = self.gen_optimizer.compute_gradients(gen_loss, var_list=g_vars) d_gradient = self.dis_optimizer.compute_gradients(dis_loss, var_list=d_vars) return self.gen_optimizer.apply_gradients(g_gradient), self.dis_optimizer.apply_gradients(d_gradient) def inpaint_model(self, images, images_masked, masks): # input model inputs = tf.concat([images_masked, masks], axis=3) # process outputs output = self.inpaint_generator(inputs, self.res_num, self.base_channel, self.sample_num) outputs_merged = (output * masks) + (images * (1 - masks)) gen_loss = 0 dis_loss = 0 # create discriminator prediction, annotations, weight = self.discriminator(output) weight = tf.pow(tf.constant(float(self.exp_base)), weight) # mask hole ratio hr = tf.reduce_sum(masks, axis=[1, 2, 3]) / (self.img_size * self.img_size) with tf.variable_scope('inpaint_loss'): # discriminator loss dis_seg_loss = focal_loss(annotations, masks, hr, self.gamma) dis_loss += dis_seg_loss # generator l1 loss gen_weighted_loss = l1_loss(weight * output, weight * images) gen_loss += gen_weighted_loss gen_l1_loss = l1_loss(output, images) # summary all of loss tf.summary.scalar('loss/dis_loss', dis_loss) tf.summary.scalar('loss/gen_l1_loss', gen_l1_loss) tf.summary.scalar('loss/gen_weighted_loss', gen_weighted_loss) return output, gen_loss, dis_loss def inpaint_generator(self, x, residual_num, channel, sample, reuse=False): with tf.variable_scope('inpaint_generator', reuse=reuse): with tf.variable_scope('encoder_1'): x = conv(x, channel, kernel=7, stride=1, pad=3, pad_type='reflect', use_bias=False, scope='conv') x = instance_norm(x, scope='ins_norm') x = relu(x) # Down-Sampling for i in range(2, sample + 2): with tf.variable_scope('encoder_downsample_' + str(i)): channel = channel * 2 x = conv(x, channel, kernel=4, stride=2, pad=1, use_bias=False, scope='conv_1') x = instance_norm(x, scope='ins_norm_1') x = relu(x) # Bottleneck for i in range(1, residual_num + 1): x = resblock(x, channel, rate=2, use_bias=False, scope='resblock_' + str(i)) # Up-Sampling for i in range(0, sample): with tf.variable_scope('decoder_upsample_' + str(i + 1)): channel = channel // 2 x = deconv(x, channel, kernel=4, stride=2, use_bias=False, scope='deconv') x = instance_norm(x, scope='ins_norm_1') x = relu(x) with tf.variable_scope('decoder' + str(sample + 1)): x = conv(x, channels=3, kernel=7, stride=1, pad=3, pad_type='reflect', use_bias=False, scope='conv') x = (tf.nn.tanh(x) + 1) / 2 return x def discriminator(self, x, layer=2, reuse=False): with tf.variable_scope('inpaint_discriminator', reuse=reuse): conv1 = tf.layers.conv2d(x, 32, kernel_size=4, strides=1, padding='SAME', name='conv1') conv1 = tf.nn.leaky_relu(conv1, alpha=0.2) conv2 = tf.layers.conv2d(conv1, 64, kernel_size=4, strides=1, padding='SAME', name='conv2') conv2 = tf.nn.leaky_relu(conv2, alpha=0.2) conv3 = tf.layers.conv2d(conv2, 128, kernel_size=4, strides=2, padding='SAME', name='conv3') conv3 = tf.nn.leaky_relu(conv3, alpha=0.2) conv4 = tf.layers.conv2d(conv3, 256, kernel_size=4, strides=2, padding='SAME', name='conv4') conv4 = tf.nn.leaky_relu(conv4, alpha=0.2) conv5 = tf.layers.conv2d(conv4, 256, kernel_size=4, strides=1, padding='SAME', name='conv5') conv5 = tf.nn.leaky_relu(conv5, alpha=0.2) x = deconv(conv5, 128, kernel=4, stride=2, use_bias=True, scope='deconv_1') x = deconv(x, layer, kernel=4, stride=2, use_bias=True, scope='deconv_2') output = tf.cast(tf.argmax(x, dimension=3, name="prediction"), dtype=tf.float32) map = tf.nn.softmax(x, axis=-1) output = tf.concat([output, map[:, :, :, 1]], axis=2) output = tf.expand_dims(output, dim=-1) return output, x, tf.expand_dims(map[:, :, :, 1], dim=-1)
python
<filename>content/setup/info.json<gh_stars>0 { "sitename": "Knight Times News Articles", "sitedescription": "Articles for Knight Times News", "siteicon": "/images/uploads/ktlogo.png", "showmenu": true, "emailsignup": false, "altlayout": false, "menu": [ { "name": "Knight Times News Home", "position": "1", "link": "https://knighttimesnews.com" }, { "name": "Podcasts", "position": "2", "link": "https://anchor.fm/knight-times-news" }, { "name": "Hosted By Netlify", "position": "3", "link": "https://www.netlify.com/" }, { "name": "NeoCity Academy", "position": "4", "link": "https://www.osceolaschools.net/neoc" } ], "linkcolor": "#000000" }
json
New Delhi: Researchers at the University of California, United States, have successfully tested an experimental brain implant that translates brain signals into words on a computer screen. The implant was tested on a 36-year-old man, who successfully used it to communicate words and sentences in English, 15 years after he had suffered a stroke that caused his speech to be impaired. In addition to anarthria (loss of ability to speak), the man also suffers from spastic quadriparesis (a subset of spastic cerebral palsy that affects all four limbs). Although the study was published in the New England Journal of Medicine Wednesday, the experiment had begun in February 2019, when the researchers implanted a subdural, high-density multielectrode in the 36-year-old’s brain (he was 36 in 2019), over the part of the brain that controls speech. The study was carried out in 50 sessions over 81 weeks, during which he was asked to form both isolated words and complete sentences. The words consisted of common English words and phrases, including those that he would regularly use to communicate with his caregivers — before the transplant he communicated using an assistive computer-based typing interface, controlled by his head movement. The study was partly funded by Facebook’s Sponsored Academic Research Agreement, and on Wednesday, the social media platform celebrated the achievement with a blog post. “Today we’re excited to celebrate the next chapter of this work and a new milestone that the UCSF team has achieved and published in The New England Journal of Medicine: the first time someone with severe speech loss has been able to type out what they wanted to say almost instantly, simply by attempting speech. In other words, UCSF has restored a person’s ability to communicate by decoding brain signals sent from the motor cortex to the muscles that control the vocal tract — a milestone in neuroscience,” the blog explained. The researchers began collecting data in April 2019, when they used a digital-signal processing system to acquire signals from the implanted device and transmit them to a computer with a custom-built software for real-time analysis. Using this process, brain signals were translated into words that could be read on a screen. In each of the trial tasks, words were shown to the man, which he had to then attempt to speak. The researchers created speech detection and word classification models and used deep learning techniques to make predictions from his neural activity. Deep-learning technique is machine-learning, based on artificial neural networks. “The speech-detection model processed each time point of neural activity during a task and detected onsets and offsets of word-production attempts in real-time,” the study explained. The researchers successfully decoded sentences made by the participant at the rate of 15. 2 words per minute, with a median error rate of 26. 6 per cent. Ninety-eight per cent of the participant’s attempts to form individual words were successfully detected and researchers decoded them with a 47. 1 per cent accuracy. (Edited by Poulomi Banerjee)
english
<reponame>ChibahAbdelwahab/QeNoBi<gh_stars>0 { "_from": "inherits-js@^0.1.1", "_id": "inherits-js@0.1.1", "_inBundle": false, "_integrity": "sha1-jNUIZ6nbHHBdB7zmNfCi0HFKpXA=", "_location": "/inherits-js", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, "raw": "inherits-js@^0.1.1", "name": "inherits-js", "escapedName": "inherits-js", "rawSpec": "^0.1.1", "saveSpec": null, "fetchSpec": "^0.1.1" }, "_requiredBy": [ "/hurl", "/influent" ], "_resolved": "https://registry.npmjs.org/inherits-js/-/inherits-js-0.1.1.tgz", "_shasum": "8cd50867a9db1c705d07bce635f0a2d0714aa570", "_spec": "inherits-js@^0.1.1", "_where": "/Users/miro/Projects/qetch-pub/Server/node_modules/influent", "author": { "name": "<NAME>", "email": "<EMAIL>" }, "bugs": { "url": "https://github.com/gobwas/inherits.js/issues" }, "bundleDependencies": false, "deprecated": false, "description": "Backbone inspired standalone inheritance function.", "devDependencies": { "browserify": "*", "chai": "*", "dateformat": "*", "gulp": "*", "gulp-clean": "*", "gulp-rename": "*", "gulp-uglify": "*", "gulp-wrap": "*", "lodash": "*", "mocha": "*", "run-sequence": "*", "vinyl-buffer": "*", "vinyl-source-stream": "*" }, "homepage": "https://github.com/gobwas/inherits.js", "keywords": [ "inherit", "inheritance", "oop", "class", "prototype", "backbone", "extend" ], "license": "MIT", "main": "./src/inherits.js", "name": "inherits-js", "repository": { "type": "git", "url": "git+https://github.com/gobwas/inherits.js.git" }, "scripts": { "test": "mocha test" }, "version": "0.1.1" }
json
// Code generated by mockery v1.0.0 package store import api "github.com/slok/ragnarok/api" import mock "github.com/stretchr/testify/mock" // Store is an autogenerated mock type for the Store type type Store struct { mock.Mock } // Add provides a mock function with given fields: obj func (_m *Store) Add(obj api.Object) error { ret := _m.Called(obj) var r0 error if rf, ok := ret.Get(0).(func(api.Object) error); ok { r0 = rf(obj) } else { r0 = ret.Error(0) } return r0 } // Delete provides a mock function with given fields: obj func (_m *Store) Delete(obj api.Object) error { ret := _m.Called(obj) var r0 error if rf, ok := ret.Get(0).(func(api.Object) error); ok { r0 = rf(obj) } else { r0 = ret.Error(0) } return r0 } // Get provides a mock function with given fields: obj func (_m *Store) Get(obj api.Object) (api.Object, bool, error) { ret := _m.Called(obj) var r0 api.Object if rf, ok := ret.Get(0).(func(api.Object) api.Object); ok { r0 = rf(obj) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(api.Object) } } var r1 bool if rf, ok := ret.Get(1).(func(api.Object) bool); ok { r1 = rf(obj) } else { r1 = ret.Get(1).(bool) } var r2 error if rf, ok := ret.Get(2).(func(api.Object) error); ok { r2 = rf(obj) } else { r2 = ret.Error(2) } return r0, r1, r2 } // GetByKey provides a mock function with given fields: key func (_m *Store) GetByKey(key string) (api.Object, bool, error) { ret := _m.Called(key) var r0 api.Object if rf, ok := ret.Get(0).(func(string) api.Object); ok { r0 = rf(key) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(api.Object) } } var r1 bool if rf, ok := ret.Get(1).(func(string) bool); ok { r1 = rf(key) } else { r1 = ret.Get(1).(bool) } var r2 error if rf, ok := ret.Get(2).(func(string) error); ok { r2 = rf(key) } else { r2 = ret.Error(2) } return r0, r1, r2 } // Update provides a mock function with given fields: obj func (_m *Store) Update(obj api.Object) error { ret := _m.Called(obj) var r0 error if rf, ok := ret.Get(0).(func(api.Object) error); ok { r0 = rf(obj) } else { r0 = ret.Error(0) } return r0 }
go
When is the best time to visit Hampi? The best time to visit Hampi is from October to February. This world heritage site is cool though not cold nor sweaty. Dry weather prevails all through the year in Hampi. Summers are scorching while rains are heavy. Winter of course! From October to February you can see pleasant weather. Try to visit either end of the year or the initial month of the year. ( Oct to Feb are more favourable time to visit). The best time would be during winters that is from October to February. Hampi gets very hot during summers so I would suggest to plan it in winters only. Earn credits when your answers are upvoted by others. when is the best time to visit Angkor wat? when is the best time to visit Manali? when is the best time to visit Tamil Nadu? When is the best time to visit Dalhousie? When is the best time to visit Kerala?
english
Rishab Shetty can be called the hero and director of the year 2022 for scoring blockbuster hit Kantara. Hombale banner made the film at the cost of Rs 16 Cr and ended up making a gross collection of more than Rs 400 Cr from the theatres itself. Many wonder to know the total remuneration Rishab Shetty received for this film. Before getting into the details of his payment, we have to discuss the payments received by other key actors in this film. Kishore, who played the Forest Officer and the heroine Saptami Gowda were paid Rs 1 Cr each. The main antagonist Achyuth Kumar who played the king's descendant was paid Rs 40 lakh. There is another actor Pramod Shetty who played the role of Sudharaka. He took home Rs 60 lakh. Now coming to Rishab Shetty, he was paid Rs 4 Cr for his work as the protagonist and director of the film. That's the remuneration he received from a film that minted Rs 406 Cr at the box office. There is no news if the production banner offered anything decent for him after lifting such a fortune.
english
Prime Minister Narendra Modi on Friday offered rudrabhishek (first prayers) at Uttarakhand’s Kedarnath shrine and laid the foundation stone for a ropeway to be built at a cost of ₹1267 crore for the pilgrims. The 9. 7 km ropeway to Kedarnath will cut the time pilgrims take to reach the shrine from Gaurikund. Dressed in traditional Uttarakhandi clothes, Modi offered prayers at the Samadhi Sthal of Adi Shankaracharya. Governor Gurmit Singh and chief minister Pushkar Singh Dhami accompanied him. Modi took stock of the reconstruction and development works in Kedarnath. Dhami briefed Modi about the progress of the work. Modi also interacted with the workers engaged in the work before leaving for Badrinath to offer prayers there. He will take stock of the works being undertaken under the Badrinath Master Plan, address a public meeting at Mana, around 5 km away, and lay the foundation stone for a ropeway in Hemkund. Officials said the reconstruction works under the first phase in Kedarnath have been completed. In the second phase, 21 works worth ₹188 crores are underway and will be completed by December 2023. Last month, Modi reviewed the reconstruction and development works in Badrinath and Kedarnath virtually from New Delhi. He stressed on development of the surrounding areas of the Himalayan shrines. He said the number of devotees in Kedarnath and Badrinath would increase rapidly as nearby places will also be developed to promote spiritual tourism. In October 2017, Modi laid the foundation stones of five reconstruction projects at Kedarnath, including that of the Samadhi Sthal of Adi Shankaracharya, an early eighth-century seer and the founder of the Vedanta school of philosophy. Modi also visited the Rudra meditation cave in Kedarnath in May 2019. Tens of thousands of pilgrims annually visit the Kedarnath, Badrinath, Yamunotri, and Gangotri shrines collectively called Char Dham. A record 4. 2 million devotees visited the shrines this year. The Char Dham pilgrimage begins from Yamunotri in the west. It then proceeds to Gangotri and finally to Kedarnath and Badrinath. Nearly 3,500 metres above the sea level, Kedarnath is located near the Mandakini river in the Rudraprayag district. The Pandavas are believed to have built the shrine and Adi Shankaracharya revived it. Kedarnath suffered damages when flash floods hit in 2013.
english
<gh_stars>0 #!python3 __version__ = '0.0.12'
python
require("dotenv").config(); const regex = /^.+?\s+(\d+(?=\s))?\s*(.+?)(?=\s+(--.+)|\s*$)/g; const argregex = /--(\S)\s+(.+?)(?=\s+--|\s*$)/g; const fetch = require("node-fetch"); const api = require('../service/api') const state = require('../service/state'); const getToken = require('../service/authtoken'); const args = { 'r': null }; function CampaignNotSetError() { this.message = 'Campaign not set; use $campaign to set'; } function CharacterNotSetError() { this.message = 'Character not set; use $character to set'; } function ParseError() { this.message = "Usage: $additem <item name> [--r <remark>]"; } function ItemNotFoundError(itemName) { this.message = `Item containing '${itemName}' not found`; } function MultipleMatchError(itemName, data) { this.message = `Multiple items containing '${itemName}' found:`; data.forEach(item => this.message = this.message + `\nID: ${item.id} Name: ${item.name}`); } function ItemIdNotFoundError(itemId) { this.message = `Item with ID of ${itemId} not found`; } module.exports = (message) => { const tokenPromise = getToken(); const itemPromise = module.exports.parseMessage(message) .then(matches => module.exports.parseCommand(matches)) .then(searchParam => { return (typeof searchParam === 'number') ? module.exports.findItemById(searchParam) : module.exports.findItemByName(searchParam); }) .then(result => result.json()) .catch(error => { message.channel.send(error.message); throw error; }); const argsPromise = module.exports.parseMessage(message) .then(matches => module.exports.parseArguments(matches)) .catch(error => { message.channel.send(error.message); throw error; }); return Promise.all([tokenPromise, itemPromise, argsPromise]) .then(values => { const item = values[1]; const arguments = values[2]; const token = values[0]; return module.exports.postTransaction(message, item, arguments, token); }) .catch(error => error); }; module.exports.parseMessage = (message, error) => { return new Promise(resolve => { if (state.getCampaignId() === null) { throw new CampaignNotSetError(); } if (state.getCharacterId() === null) { throw new CharacterNotSetError(); } const matches = [...message.content.matchAll(regex)]; if (matches[0]) { resolve(matches[0]); } throw error ? error : new ParseError(); }); }; module.exports.parseCommand = (matches) => { return new Promise(resolve => { const searchParam = Number(matches[2]).valueOf(); resolve(isNaN(searchParam) ? matches[2] : searchParam); }) }; module.exports.parseArguments = (matches) => { for (const key of Object.keys(args)) { args[key] = null; } const parsed = { quantity: matches[1] ? matches[1] : 1, arguments: matches[3] } return new Promise(resolve => { if (parsed.arguments == null) { parsed.arguments = args; resolve(parsed); } const matches = [...parsed.arguments.matchAll(argregex)]; matches.forEach(match => { if (match[1] in args) { args[match[1]] = match[2]; } }); parsed.arguments = args; resolve(parsed); }); }; module.exports.getItems = () => { return api.get(process.env.API_URL) .then(response => response.json()) .then(apiindex => api.get(apiindex._links['loot-api:lootitems'].href)); } module.exports.findItemByName = (itemName) => { return module.exports.getItems() .then(response => response.json()) .then(items => { const embeddedItem = items._embedded['loot-api:lootitem'].filter(item => item.name.toLowerCase().includes(itemName.toLowerCase())); if (embeddedItem.length === 1) { return api.get(embeddedItem.pop()._links.self.href); } else if (embeddedItem.length < 1) { throw new ItemNotFoundError(itemName); } throw new MultipleMatchError(itemName, embeddedItem); }); }; module.exports.findItemById = (itemId) => { return module.exports.getItems() .then(response => response.json()) .then(items => { const embeddedItem = items._embedded['loot-api:lootitem'].filter(embeddedItem => embeddedItem.id === itemId); if (embeddedItem.length === 1) { return api.get(embeddedItem.pop()._links.self.href); } throw new ItemIdNotFoundError(itemId); }); }; module.exports.postTransaction = (message, item, arguments, token) => { const newTransaction = { item: item.id, quantity: arguments.quantity, remark: arguments.arguments.r }; const authHeader = `bearer ${token.access_token}`; return new Promise(resolve => { fetch(`${process.env.API_URL}campaigns/${state.getCampaignId()}/characters/${state.getCharacterId()}/itemtransactions/`, { method: "post", body: JSON.stringify(newTransaction), headers: { "Content-Type": "application/hal+json", "Authorization": authHeader, } }) .then(response => response.json()) .then(character => { updatedItem = character.inventory.filter(inventoryItem => inventoryItem.name === item.name); if (updatedItem[0]) { arguments.quantity > 0 ? message.channel.send(`Added ${arguments.quantity} ${item.name} to ${character.name}\nNow has ${updatedItem[0].quantity} ${updatedItem[0].name}`) : message.channel.send(`Dropped ${-arguments.quantity} ${item.name} from ${character.name}\nNow has ${updatedItem[0].quantity} ${updatedItem[0].name}`); } else { message.channel.send(`Dropped ${-arguments.quantity} ${item.name} from ${character.name}\nNow has 0 ${item.name}`); } resolve(character); }); }); };
javascript
<reponame>rolocampusano/nber { "id": 13964, "cites": 31, "cited_by": 31, "reference": [ "[2] Baliga, Sandeep and Tomas Sj\u00c3\u0083\u00c2\u00a1str\u00c3\u0083\u00c2\u00a1m. 2004. Arms Races and Negotiations. Review of Ecomomic Stndies 71(2): 351-369.", "[3] <NAME> and <NAME>. 1993. Global Games and Equilibrium Selection. Ecomometrica 61(5): 989-1018.", "[4] <NAME>. 2007. Fear of Miscoordination and the Sustainability of Cooperation in Dynamic Global Games with Exit. Working Paper.", "[5] <NAME>. 2008. Uniform Selection in Global Games. Jonrmal of Ecomomic Theory 139(1): 222-241.", "[6] <NAME>, and <NAME>\u00c3\u0083\u00c2\u00b3 i Miquel. 2007. Mutual Fears and Civil War. Working Paper.", "[7] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2003. Brealcimg the Comflict Trap: Civil War amd Developmemt Policy. Oxford and Washington DC: Oxford University Press and The World Bank.", "[8] Doyle, Michael and <NAME>. 2006. Malcimg War ~ Bnildimg Peace: Umited Natioms Peace Operatioms. Princeton, NJ: Princeton University Press.", "[9] <NAME>. 1995. Rationalist Explanations for War. Imtermatiomal Orgamizatiom 49(3): 379-414.", "[10] <NAME> and <NAME>. 2005. Darfnr: A Short History of a Lomg War. London and New York: Zed Books.", "[11] <NAME>. 1990. Arming as a Strategic Investment in a Cooperative Equilibrium Americam Ecomomic Review 80(4): 50-68.", "[12] <NAME>. 1991. A General Equilibrium Model of Insurrections. Americam Ecomomic Review 81(4): 912-921. 34", "[13] <NAME> and <NAME>. 1988. A Gemeral Theory of Eqnilibrinm Selectiom im Games, MIT Press.", "[14] <NAME>. 1995. Theorizing about Conflict. in Hartley, Keith and Sandler, Todd eds. Hamdboolc of Defemse Ecomomics. Amsterdam: Elsevier Science.", "[15] <NAME>. and <NAME>. 2007. Political Bias and War. Americam Ecomomic Review 97(4): 1353-1373.", "[16] <NAME>. 1976. Perceptiom amd Misperceptiom im Imtermatiomal Politics. New Jersey: Princeton University Press.", "[17] <NAME>. 1978. Cooperation under the Security Dilemma. World Politics 30(1): 167-2 14.", "[18] <NAME>. 1979. Deterrence Theory Revisited. World Politics 31(1): 289-324.", "[19] <NAME>. 1989. The Meamimg of the Nnclear Revolntiom. Ithaca, N.Y: Cornell University Press.", "[20] <NAME> and <NAME>. 1999. Civil War and the Security Dilemma. in Snyder, Jack and <NAME>, eds. Civil War, Imsecnrity, amd Imtervemtiom. New York: Columbia University Press.", "[21] <NAME>, <NAME> and <NAME>. 1993. The Macroeconomics of Dr. Strangelove. Amen cam Ecomomic Review 83(1): 43-62.", "[22] <NAME>. 1997. Game Theory and the Spiral Model. World Politics 49(3): 371-400.", "[23] <NAME> and <NAME>. 2001. Global Games: Theory and Applications. Cowles Foundation Discussion Paper No. 1275R. http : //ssrn. com/abstract~2848 13.", "[24] <NAME> and <NAME>. 1990. Rationalizability, Learning, and Equilibrium in Games with Strategic Complementarities. Ecomometrica 58(6): 1255-1277.", "[25] Oxfam, IANSA and Safeworld. 2007. Africas Missing Billions: International Arms Flows and the Cost of Conflict. Briefimg Papers 107.", "[26] <NAME>. 1993. The Security Dilemma and Ethnic Conflict. Snrvival 35(1): 27-47. 35", "[27] <NAME>. 1990. Nnclear Deterremce Theory: The Search for Credibility. New York: Cambridge University Press.", "[28] <NAME>. 2004. The Inefficient Use of Power: Costly Conflict with Complete Information. Amen cam Political Sciemce Review 98(3): 231-241.", "[29] <NAME>. 1960. The Strategy of Comflict. Cambridge, MA: Harvard University Press.", "[30] <NAME>. 1966. Arms amd Imflnemce. New Haven, Conn.: Yale University Press.", "[31] <NAME>. 1992. Cooperation, Conflict, and Power in the Absence of Property Rights. Amen cam Ecomomic Review 82(4): 720-739.", "[32] <NAME>. 1990. Nash equilibrium with strategic complementarities . Jonrmal of Mathematical Ecomomics 19(3): 305-321. 36" ] }
json
<filename>Blob_Lib/glfw-3.3.7/glfw/docs/html/search/variables_7.js version https://git-lfs.github.com/spec/v1 oid sha256:dfe8067e32d70806198b8fd156a8a23b580f1d93a7c23cba20c905c3a0b9bfea size 219
javascript
Organisers usually offer ‘chai’ to the journalists during the events. So when the BJP organised its ‘Chai Pe Charcha’ (interaction on tea) with its Prime Ministerial candidate Narendra Modi across the country last week, journalists in the city were sure to relish a cup of tea at the venue. But the event managers were in a tight spot, as the arrangements to make tea for a big audience got delayed, and soon the organisers were seen scurrying to a nearby tea joint for the tea. “I didn’t have tea for the last two hours. Please get a few cups for me and reporters from anywhere,” an organiser was heard telling his colleague. Before there were any red faces , hot steaming cups of ‘Adrak ki chai’ came in and served to jounalists. Obfuscation is taken to a different level by the staff at CPDCL’s emergency control room, often ready with simple answers for major problems. The instant answer with them for the consumers, who call the ECR when power is out in their locality, is ‘tree cutting’. Recently, major repairs at the 132kv substation in Bandlaguda necessitated power cuts from 8 a. m. to 2 p. m. The same was also publicised through media. However, some anxious consumers called the ECR when they didn’t get the power back even after 4 p. m. Pat came the reply ‘tree cutting’! It must have been pretty bewildering for the consumers to imagine CPDCL staff toiling for eight hours just cutting tree branches! The forthcoming elections have already been throwing a few surprises to voters. While many were expecting a tough competition between Hyderabad MP Asaduddin Owaisi and TDP politburo member Zahed Ali Khan, the latter’s decision to retire from active politics had snuffed out the interest. The curiosity has taken a further beating with the Majlis rival - Majlis Bachao Tehreek (MBT) - offering to extend full support to the MIM and let either of the Owaisi brothers to contest from the Amberpet Assembly constituency, if it accepts the challenge of BJP State president and MLA G. Kishan Reddy. As the election scene is all set to generate more heat in the coming days, many more surprises are in store for everyone. An explosion in front of the stage from which TRS president K. Chandrasekhar Rao addressed at a meeting of Telangana journalists at Exhibition grounds on Sunday invited a spontaneous response from him. The organisers of the function burst shreds of paper with a loud explosion and accompanying fire to welcome Mr. Rao. His immediate reaction: “It was paper because they are our (Telangana) journalists. If they were Andhra journalists, something else would have been used”. He is suave, smooth and deft at handling tricky situations and questions. Mayor, Mohammed Majid Hussain during his more than two years tenure handled the GHMC Council with 150 corporators, officials and media in a rather disarming fashion. So when he had announced the decision to quit the Mayor post, he was asked about his next move for the coming elections, want to be MLA or MP? “I loved being the corporator,” he quipped, and then came the next question, will he miss the status of being the First Citizen of the city? Quick was the response, “I always strive to be the first servant of the city”. Show stopper or traffic stopper? Up coming aviation show in Hyderabad surely looks like a show stopper and a traffic stopper too! Commuters had a taste of what’s in store for them in the coming few days on Monday. In the afternoon, just after the Begumpet old airport flyover, motorists suddenly stopped their vehicles by the wayside and started gawking at the skies. This was enough to create a mini traffic jam, as one after the other, inquisitive motorists started stopping their vehicles and gazed upwards. Some fished out their mobiles to take videos. The object of everybody’s attention was the acrobatic display of two light aircrafts near the Begumpet airport, where the aviation show will be held. As the two planes engaged in a phoney duel and hard to believe somersaults, commuters were awestruck and needless to say, traffic went haywire. Within minutes, the two planes disappeared, leaving everyone craving for more. Looks like, all roads will lead to the Begumpet airport, when the exhibition will be thrown open to the general public. Nemmani Sreedhar, Swathi V. Asif Yar Khan, N. Rahul, T. Lalith Singh,
english
<reponame>Safal08/Hacktoberfest-1 { "word": "Priority", "definitions": [ "The fact or condition of being regarded or treated as more important than others.", "A thing that is regarded as more important than others.", "The right to proceed before other traffic." ], "parts-of-speech": "Noun" }
json
This season’s jeans are Wide Leg and/or slouchy. At first glance alike, details tell. Look for preferred length, stretch, color and closure. Some are wide and straight-legged, others boot cut. One pulls on. There may be a designer that fits you better. Light to dark: Featured items are selected by our editors simply because we like them and think you will too. Woman Around Town was not paid for featuring any item in this post. The post Wide Leg Jeans appeared first on Woman Around Town.
english
#include <iostream> #include <vector> #include "binary_tree.h" using namespace std; class Solution { public: TreeNode* bstFromPreorder(vector<int>& preorder) { if (preorder.size() == 0) return nullptr; TreeNode* root = new TreeNode {preorder[0]}; for (int i = 1; i < preorder.size(); i++) { TreeNode *parent = nullptr; TreeNode *child = root; // TreeNode *child = root; int val = preorder[i]; while (child) { parent = child; if (child->val < val) { child = child->right; } else { child = child->left; } } if (parent->val < val) { parent->right = new TreeNode {val}; } else { parent->left = new TreeNode {val}; } } return root; } }; int main() { vector<int> v {8, 5, 1, 7, 10, 12}; TreeNode* root = Solution().bstFromPreorder(v); prettyPrintTree(root); }
cpp
Every consumer would agree that the hidden costs of printing in cartridges, toner ink, and other consumables far outnumber the actual printer costs. enough to interest analysts. 1,940 crore in FY 08, recording an annual growth of 68% from the previous year. From an installed base perspective, inkjet cartridges enjoyed 46% market share, dot matrix and line printers, and drum printing. printer consumables declined, as they catered only to a niche market. for photo printing. laser printers marked the burgeoning laser catridges market. And, therefore, HP, gearing up to cater the growing laser cartridges market. The consumables industry has always followed the razor-and-blade model, cost by correcting their pricing strategies and introducing new technologies. their business and maintain lead. crore, while compatibles (includes refillers, remanufactures, and compatibles) had a 52% market share at Rs 1,009 crore. Within the compatibles segment, 97 crore. 10%. Other established vendors like Xerox, Lexmark, Minolta, Kores, Samsung, Brother, and Kodak together made up the remainder. cartridges at a cheaper cost, as the organized parts suppliers such as Static, Uninet, and Future Systems, based out of Europe, Japan, and the US, at lower costs. cartridges. While quality remains inferior to the European part suppliers, resulting in its declining market share. remanufactures, they are not a big cause of worry. Last year saw the formation of Cartridge Recyclers and Traders Association (CRTAI) quality refilling without tampering with cartridges. remanufacturing is an established industry with players like Xerox, IBM, services to seventy-four cities across India. costs around Rs 900. prevent them from using refills or compatible cartridges. or toner. all its partners and moved from a regional to a national distribution strategy. certain standards. vendors and serve as a secret lifeline.
english
Yesterday (April 24) was the polling day in all 39 Lok Sabha constituencies of Tamilnadu The state politics and Tamil cinema always have a close connection and yesterday the spotlight was on the Tinsel town stars who casted their votes in various polling stations across the state. Superstar Rajinikanth was the first to vote among tinsel town celebrities. If the polling started at 7 am, the star reached the polling booth at Stella Maris College at 6:45 am and waited for the polling process to get ready. Since the electoral staff was in a hurry to start the polling in order to reduce the waiting time of the Superstar, they forgot to ink the index finger of the actor before he voted. However the alerted polling staff called back Rajini who was about to leave and inked his finger. RajinikanthÂs good friend Kamal Haasan reached the polling booth at a corporation school in Teynampet. The Alwarpet resident was in the polling station with actor Gauthami as early as 7:45 am since he had to catch the flight to Australia where the song shoot of his upcoming film ÂUttama Villain is scheduled to happen. ÂThala Ajith a resident of Thiruvanmiyur, is reported to have reached the polling station along with his parents and wife Shalini at around 6:30 am. Though he was offered the luxury of entering the voting room directly, the ÂVeeram actor refused it and waited in queue for almost 40 minutes and cast his vote at 7:20 am. Onlookers revealed that he did not even wave his hand because if he does that the fans will roar and whistle in cheer leading to the calmness of the venue getting affected. Sources say that equal credit should go to the public who had gathered to vote, for not disturbing AjithÂs privacy. Ajith was also spotted talking with the person standing behind him in the queue. His wife Shalini was standing next to him. Actor and the leader of DMDK political party Vijayakanth voted in school in Virugambakkam. Actor Vijay reached the Corporation school near Adyar Telephone exchange to cast his vote around 9:45 am. Suriya, his brother Karthi and father Sivakumar exercised their franchise in Hindi Prachar Sabha located at T. nagar at around 12 noon. The actor had flew down from Goa, where he was busy shooting the climax of his upcoming film ÂAnjaan directed by Lingusamy. The actor was noticed with a leg injury that he sustained a few days back while shooting a risky stunt shot. Younger actors, Jiiva, Vishal, Sivakarthikeyan, Jeyam Ravi and many others turned up to the polling booths for voting. Actresses who turned up to perform their democratic duty were no less in number. DMK party member Kushbu cast her vote along with hubby director Sundar. C in Santhome at around 8 am in the morning. Trisha voted in Stella Maris College while Sneha, Radhika and many other actors and actresses voted in their respective constituencies. Follow us on Google News and stay updated with the latest!
english
<filename>vrmslearn/ModelGenerator.py<gh_stars>1-10 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Class to generate seismic models and labels for training. """ import numpy as np import copy from scipy.signal import gaussian from scipy.interpolate import interp1d import argparse from vrmslearn.ModelParameters import ModelParameters class ModelGenerator(object): """ Generate a seismic model with the generate_model method and output the labels, with generate_labels. As of now, this class generates a 1D layered model, and the labels correspond to the rms velocity. """ def __init__(self, model_parameters=ModelParameters()): """ This is the constructor for the class. @params: model_parameters (ModelParameters) : A ModelParameter object @returns: """ self.pars = model_parameters self.vp =None def generate_model(self): """ Output the media parameters required for seismic modelling, in this case vp, vs and rho. To create 1D model, set pars.flat to True. For 2D dipping layer models, set it to False. @params: @returns: vp (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for vp. vs (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for vs. rho (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for rho values. """ if self.pars.flat: vp, vs, rho = generate_random_1Dlayered(self.pars) else: vp, vs, rho, _, _, _ = generate_random_2Dlayered(self.pars) self.vp = copy.copy(vp) return vp, vs, rho def generate_labels(self): """ Output the labels attached to modelling of a particular dataset. In this case, we want to predict vrms from a cmp gather. @params: @returns: vrms (numpy.ndarray) : numpy array of shape (self.pars.NT, ) with vrms values in meters/sec. valid (numpy.ndarray) : numpy array of shape (self.pars.NT, ) with 1 before the last reflection, 0 afterwards refs (numpy.ndarray) : Two way travel-times of the reflections """ vp = self.vp[:, 0] vrms = calculate_vrms(vp, self.pars.dh, self.pars.Npad, self.pars.NT, self.pars.dt, self.pars.tdelay, self.pars.source_depth) refs = generate_reflections_ttime(vp, self.pars) # Normalize so the labels are between 0 and 1 vrms = (vrms - self.pars.vp_min) / (self.pars.vp_max - self.pars.vp_min) indt = np.argwhere(refs > 0.1).flatten()[-1] valid = np.ones(len(vrms)) valid[indt:] = 0 return vrms, valid, refs def calculate_vrms(vp, dh, Npad, NT, dt, tdelay, source_depth): """ This method inputs vp and outputs the vrms. The global parameters in common.py are used for defining the depth spacing, source and receiver depth etc. This method assumes that source and receiver depths are same. The convention used is that the velocity denoted by the interval (i, i+1) grid points is given by the constant vp[i+1]. @params: vp (numpy.ndarray) : 1D vp values in meters/sec. dh (float) : the spatial grid size Npad (int) : Number of absorbing padding grid points over the source NT (int) : Number of time steps of output dt (float) : Time step of the output tdelay (float): Time before source peak source_depth (float) The source depth in meters @returns: vrms (numpy.ndarray) : numpy array of shape (NT, ) with vrms values in meters/sec. """ NZ = vp.shape[0] # Create a numpy array of depths corresponding to the vp grid locations depth = np.zeros((NZ,)) for i in range(NZ): depth[i] = i * dh # Create a list of tuples of (relative depths, velocity) of the layers # following the depth of the source / receiver depths, till the last layer # before the padding zone at the bottom last_depth = dh * (NZ - Npad - 1) rdepth_vel_pairs = [(d - source_depth, vp[i]) for i, d in enumerate(depth) if d > source_depth and d <= last_depth] first_layer_vel = rdepth_vel_pairs[0][1] rdepth_vel_pairs.insert(0, (0.0, first_layer_vel)) # Calculate a list of two-way travel times t = [2.0 * (rdepth_vel_pairs[index][0] - rdepth_vel_pairs[index - 1][ 0]) / vel for index, (_, vel) in enumerate(rdepth_vel_pairs) if index > 0] t.insert(0, 0.0) total_time = 0.0 for i, time in enumerate(t): total_time += time t[i] = total_time # The last time must be 'dt' * 'NT', so adjust the lists 'rdepth_vel_pairs' # and 't' by cropping and adjusting the last sample accordingly rdepth_vel_pairs = [(rdepth_vel_pairs[i][0], rdepth_vel_pairs[i][1]) for i, time in enumerate(t) if time <= NT * dt] t = [time for time in t if time <= NT * dt] last_index = len(t) - 1 extra_distance = (NT * dt - t[last_index]) * rdepth_vel_pairs[last_index][ 1] / 2.0 rdepth_vel_pairs[last_index] = ( extra_distance + rdepth_vel_pairs[last_index][0], rdepth_vel_pairs[last_index][1]) t[last_index] = NT * dt # Compute vrms at the times in t vrms = [first_layer_vel] sum_numerator = 0.0 for i in range(1, len(t)): sum_numerator += (t[i] - t[i - 1]) * rdepth_vel_pairs[i][1] * \ rdepth_vel_pairs[i][1] vrms.append((sum_numerator / t[i]) ** 0.5) # Interpolate vrms to uniform time grid tgrid = np.asarray(range(0, NT)) * dt vrms = np.interp(tgrid, t, vrms) vrms = np.reshape(vrms, [-1]) # Adjust for time delay t0 = int(tdelay / dt) vrms[t0:] = vrms[:-t0] vrms[:t0] = vrms[t0] # Return vrms return vrms def generate_random_1Dlayered(pars, seed=None): if seed is not None: np.random.seed(seed) if pars.num_layers == 0: nmin = pars.layer_dh_min nmax = int(pars.NZ / pars.layer_num_min) n_layers = np.random.choice(range(pars.layer_num_min, int(pars.NZ/nmin))) else: nmin = pars.layer_dh_min nmax = int(pars.NZ / pars.layer_num_min) n_layers = int(np.clip(pars.num_layers, nmin, nmax)) NZ = pars.NZ NX = pars.NX dh = pars.dh top_min = int(pars.source_depth / dh + 2 * pars.layer_dh_min) layers = (nmin + np.random.rand(n_layers) * (nmax - nmin)).astype(np.int) tops = np.cumsum(layers) ntos = np.sum(layers[tops <= top_min]) if ntos > 0: layers = np.concatenate([[ntos], layers[tops > top_min]]) vels = (pars.vp_min + np.random.rand() * (pars.vp_max - pars.vp_min - pars.dvmax) + np.random.rand(len(layers)) * pars.dvmax) ramp = np.abs(np.max(vels) - pars.vp_max) * np.random.rand() + 0.1 vels = vels + np.linspace(0, ramp, vels.shape[0]) vels[vels > pars.vp_max] = pars.vp_max vels[vels < pars.vp_min] = pars.vp_min if pars.marine: vels[0] = pars.velwater + (np.random.rand() - 0.5) * 2 * pars.d_velwater layers[0] = int(pars.water_depth / pars.dh + ( np.random.rand() - 0.5) * 2 * pars.dwater_depth / pars.dh) vel1d = np.concatenate([np.ones(layers[n]) * vels[n] for n in range(len(layers))]) if len(vel1d) < NZ: vel1d = np.concatenate([vel1d, np.ones(NZ - len(vel1d)) * vel1d[-1]]) elif len(vel1d) > NZ: vel1d = vel1d[:NZ] if pars.rho_var: rhos = (pars.rho_min + np.random.rand() * ( pars.rho_max - pars.rho_min - pars.drhomax) + np.random.rand(len(layers)) * pars.drhomax) ramp = np.abs(np.max(rhos) - pars.rho_max) * np.random.rand() + 0.1 rhos = rhos + np.linspace(0, ramp, rhos.shape[0]) rhos[rhos > pars.rho_max] = pars.rho_max rhos[rhos < pars.rho_min] = pars.rho_min rho1d = np.concatenate([np.ones(layers[n]) * rhos[n] for n in range(len(layers))]) if len(rho1d) < NZ: rho1d = np.concatenate( [rho1d, np.ones(NZ - len(rho1d)) * rho1d[-1]]) elif len(rho1d) > NZ: rho1d = rho1d[:NZ] else: rho1d = vel1d * 0 + pars.rho_default vp = np.transpose(np.tile(vel1d, [NX, 1])) vs = vp * 0 rho = np.transpose(np.tile(rho1d, [NX, 1])) return vp, vs, rho def texture_1lay(NZ, NX, lz=2, lx=2): """ Created a random model with bandwidth limited noise. @params: NZ (int): Number of cells in Z NX (int): Number of cells in X lz (int): High frequency cut-off size in z lx (int): High frequency cut-off size in x @returns: """ noise = np.fft.fft2(np.random.random([NZ, NX])) noise[0, :] = 0 noise[:, 0] = 0 noise[-1, :] = 0 noise[:, -1] = 0 iz = lz ix = lx maskz = gaussian(NZ, iz) maskz = np.roll(maskz, [int(NZ / 2), 0]) maskx = gaussian(NX, ix) maskx = np.roll(maskx, [int(NX / 2), 0]) noise = noise * np.reshape(maskz, [-1, 1]) noise *= maskx noise = np.real(np.fft.ifft2(noise)) noise = noise / np.max(noise) return noise def generate_reflections_ttime(vp, pars, tol=0.015, window_width=0.45): """ Output the reflection travel time at the minimum offset of a CMP gather @params: vp (numpy.ndarray) : A 1D array containing the Vp profile in depth pars (ModelParameter): Parameters used to generate the model tol (float): The minimum relative velocity change to consider a reflection window_width (float): time window width in percentage of pars.peak_freq @returns: tabel (numpy.ndarray) : A 2D array with pars.NT elements with 1 at reflecion times +- window_width/pars.peak_freq, 0 elsewhere """ vp = vp[int(pars.source_depth / pars.dh):] vlast = vp[0] ind = [] for ii, v in enumerate(vp): if np.abs((v - vlast) / vlast) > tol: ind.append(ii - 1) vlast = v if pars.minoffset != 0: dt = 2.0 * pars.dh / vp t0 = np.cumsum(dt) vrms = np.sqrt(t0 * np.cumsum(vp ** 2 * dt)) tref = np.sqrt( t0[ind] ** 2 + pars.minoffset ** 2 / vrms[ind] ** 2) + pars.tdelay else: ttime = 2 * np.cumsum(pars.dh / vp) + pars.tdelay tref = ttime[ind] if pars.identify_direct: dt = 0 if pars.minoffset != 0: dt = pars.minoffset / vp[0] tref = np.insert(tref, 0, pars.tdelay + dt) tlabel = np.zeros(pars.NT) for t in tref: imin = int(t / pars.dt - window_width / pars.peak_freq / pars.dt) imax = int(t / pars.dt + window_width / pars.peak_freq / pars.dt) if imin <= pars.NT and imax <= pars.NT: tlabel[imin:imax] = 1 return tlabel def two_way_travel_time(vp, pars): """ Output the two-way travel-time for each cell in vp @params: vp (numpy.ndarray) : A 1D array containing the Vp profile in depth pars (ModelParameter): Parameters used to generate the model @returns: vp (numpy.ndarray) : A 1D array containing the Vp profile in depth, cut to have the same size of t t (numpy.ndarray) : The two-way travel time of each cell """ vpt = vp[int(pars.source_depth / pars.dh):] t = 2 * np.cumsum(pars.dh / vpt) + pars.tdelay t = t[t < pars.NT * pars.dt] vpt = vpt[:len(t)] return vpt, t def interval_velocity_time(vp, pars): """ Output the interval velocity in time @params: vp (numpy.ndarray) : A 1D array containing the Vp profile in depth pars (ModelParameter): Parameters used to generate the model @returns: vint (numpy.ndarray) : The interval velocity in time """ vpt, t = two_way_travel_time(vp, pars) interpolator = interp1d(t, vpt, bounds_error=False, fill_value="extrapolate", kind="nearest") vint = interpolator(np.arange(0, pars.NT, 1) * pars.dt) return vint def generate_random_2Dlayered(pars, seed=None): """ This method generates a random 2D model, with parameters given in pars. Important parameters are: Model size: -pars.NX : Number of grid cells in X -pars.NZ : Number of grid cells in Z -pars.dh : Cell size in meters Number of layers: -pars.num_layers : Minimum number of layers contained in the model -pars.layer_dh_min : Minimum thickness of a layer (in grid cell) -pars.source_depth: Depth in meters of the source. Velocity above the source is kept constant. Layers dip -pars.angle_max: Maximum dip of a layer in degrees -pars.dangle_max: Maximum dip difference between adjacent layers Model velocity -pars.vp_max: Maximum Vp velocity -pars.vp_min: Minimum Vp velocity -pars.dvmax: Maximum velocity difference of two adajcent layers Marine survey parameters -pars.marine: If True, first layer is water -pars.velwater: water velocity -pars.d_velwater: variance of water velocity -pars.water_depth: Mean water depth -pars.dwater_depth: variance of water depth Non planar layers pars.max_osci_freq: Maximum spatial frequency (1/m) of a layer interface pars.min_osci_freq: Minimum spatial frequency (1/m) of a layer interface pars.amp_max: Minimum amplitude of the ondulation of the layer interface pars.max_osci_nfreq: Maximum number of frequencies of the interface Add texture in layers pars.texture_zrange pars.texture_xrange pars.max_texture @params: pars (str) : A ModelParameters class containing parameters for model creation. seed (str) : The seed for the random number generator @returns: vp, vs, rho, vels, layers, angles vp (numpy.ndarray) : An array containing the vp model vs (numpy.ndarray) : An array containing the vs model (0 for the moment) rho (numpy.ndarray) : An array containing the density model (2000 for the moment) vels (numpy.ndarray) : 1D array containing the mean velocity of each layer layers (numpy.ndarray) : 1D array containing the mean thickness of each layer, at the center of the model angles (numpy.ndarray) : 1D array containing slope of each layer """ if seed is not None: np.random.seed(seed) # Determine the minimum and maximum number of layers if pars.num_layers == 0: nmin = pars.layer_dh_min nmax = int(pars.NZ / pars.layer_num_min) if nmin < nmax: n_layers = np.random.choice(range(nmin, nmax)) else: n_layers = nmin else: nmin = pars.layer_dh_min nmax = int(pars.NZ / pars.layer_num_min) n_layers = int(np.clip(pars.num_layers, nmin, nmax)) # Generate a random number of layers with random thicknesses NZ = pars.NZ NX = pars.NX dh = pars.dh top_min = int(pars.source_depth / dh + 2 * pars.layer_dh_min) layers = (nmin + np.random.rand(n_layers) * (nmax - nmin)).astype(np.int) tops = np.cumsum(layers) ntos = np.sum(layers[tops <= top_min]) if ntos > 0: layers = np.concatenate([[ntos], layers[tops > top_min]]) # Generate random angles for each layer n_angles = len(layers) angles = np.zeros(layers.shape) angles[1] = -pars.angle_max + np.random.rand() * 2 * pars.angle_max for ii in range(2, n_angles): angles[ii] = angles[ii - 1] + ( 2.0 * np.random.rand() - 1.0) * pars.dangle_max if np.abs(angles[ii]) > pars.angle_max: angles[ii] = np.sign(angles[ii]) * pars.angle_max # Generate a random velocity for each layer. Velocities are somewhat biased # to increase in depth vels = (pars.vp_min + np.random.rand() * (pars.vp_max - pars.vp_min - pars.dvmax) + np.random.rand(len(layers)) * pars.dvmax) ramp = np.abs(np.max(vels) - pars.vp_max) * np.random.rand() + 0.1 vels = vels + np.linspace(0, ramp, vels.shape[0]) vels[vels > pars.vp_max] = pars.vp_max vels[vels < pars.vp_min] = pars.vp_min if pars.marine: vels[0] = pars.velwater + (np.random.rand() - 0.5) * 2 * pars.d_velwater layers[0] = int(pars.water_depth / pars.dh + ( np.random.rand() - 0.5) * 2 * pars.dwater_depth / pars.dh) # Generate the 2D model, from top layers to bottom vel2d = np.zeros([NZ, NX]) + vels[0] tops = np.cumsum(layers) osci = create_oscillation(pars.max_osci_freq, pars.min_osci_freq, pars.amp_max, pars.max_osci_nfreq, NX) texture = texture_1lay(2 * NZ, NX, lz=pars.texture_zrange, lx=pars.texture_xrange) for ii in range(0, len(layers) - 1): if np.random.rand() < pars.prob_osci_change: osci += create_oscillation(pars.max_osci_freq, pars.min_osci_freq, pars.amp_max, pars.max_osci_nfreq, NX) texture = texture / np.max(texture) * ( np.random.rand() + 0.001) * pars.max_texture * vels[ii + 1] for jj in range(0, NX): # depth of the layer at location x dz = int((np.tan(angles[ii + 1] / 360 * 2 * np.pi) * ( jj - NX / 2) * dh) / dh) # add oscillation component if pars.amp_max > 0: dz = int(dz + osci[jj]) # Check if the interface is inside the model if 0 < tops[ii] + dz < NZ: vel2d[tops[ii] + dz:, jj] = vels[ii + 1] if not (pars.marine and ii == 0) and pars.max_texture > 0: vel2d[tops[ii] + dz:, jj] += texture[tops[ii]:NZ - dz, jj] elif tops[ii] + dz <= 0: vel2d[:, jj] = vels[ii + 1] if not (pars.marine and ii == 0) and pars.max_texture > 0: vel2d[:, jj] += texture[:, jj] # Output the 2D model vel2d[vel2d > pars.vp_max] = pars.vp_max vel2d[vel2d < pars.vp_min] = pars.vp_min vp = vel2d vs = vp * 0 rho = vp * 0 + 2000 return vp, vs, rho, vels, layers, angles def create_oscillation(max_osci_freq, min_osci_freq, amp_max, max_osci_nfreq, Nmax): nfreqs = np.random.randint(max_osci_nfreq) freqs = np.random.rand(nfreqs) * ( max_osci_freq - min_osci_freq) + min_osci_freq phases = np.random.rand(nfreqs) * np.pi * 2 amps = np.random.rand(nfreqs) x = np.arange(0, Nmax) osci = np.zeros(Nmax) for ii in range(nfreqs): osci += amps[ii] * np.sin(freqs[ii] * x + phases[ii]) dosci = np.max(osci) if dosci > 0: osci = osci / dosci * amp_max * np.random.rand() return osci if __name__ == "__main__": import matplotlib.pyplot as plt # Initialize argument parser parser = argparse.ArgumentParser() parser.add_argument( "--ND", type=int, default=1, help="Dimension of the model to display" ) # Parse the input for training parameters args, unparsed = parser.parse_known_args() pars = ModelParameters() pars.layer_dh_min = 20 pars.num_layers = 0 if args.ND == 1: vp, vs, rho = generate_random_1Dlayered(pars) vp = vp[:, 0] vint = interval_velocity_time(vp, pars) vrms = calculate_vrms(vp, pars.dh, pars.Npad, pars.NT, pars.dt, pars.tdelay, pars.source_depth) plt.plot(vint) plt.plot(vrms) plt.show() else: vp, vs, rho = generate_random_2Dlayered(pars) plt.imshow(vp) plt.show()
python
Defamation charges framed against Priya Ramani IANS New Delhi Last Updated : 10 Apr 2019 11:58:19 AM IST Defamation charges framed against Priya Ramani (File photo) A Delhi court on Wednesday framed charges against journalist Priya Ramani in a defamation case filed by former Union Minister MJ Akbar. Additional Chief Metropolitan Magistrate Samar Vishal framed the charges. As Ramani pleaded not guilty and claimed trial, the court listed the matter for May 4 for recording of evidences in the case. "I plead truth as my defence, made in good faith in public interest and for the public good. I will prove my defence during trial. I plead not guilty," Ramani told the court. Ramani was the first in a long list of female journalists to accuse the former Minister of State for External Affairs and journalist of sexual harassment. Akbar has denied the allegations made against him saying they were imaginary. The statements of seven witnesses, including that of Akbar, who is now with the Bharatiya Janata Party, have been recorded in the defamation case.
english
Baripada: Even as tiger population in the country is on the decline, ‘India For Tiger – A rally on Wheel’ promoted by the Union Ministry of Forest, Environment and Climate Change as part of celebrating Azadi Ki Amrut Mahotsav, concluded at a ceremony at Baripada Friday. The rally on wheels was aimed at promoting tiger conservation and biodiversity across the country through public awareness. The rally on wheels, which was kicked off in Telangana, passed through various parts of the country before reaching Kuliana Friday night where the forest department received the rally participants. A bicycle rally was also flagged off at Bhanjapur in Baripada town to mark Gandhi Jayanti Saturday. The rally was concluded at Murgabadi. A ceremony was organized at Prayas Hall here. Both the rallies assumed importance for public awareness amid sporadic reports of poaching and wildlife smuggling in Similipal and other areas. Field Director of Similipal Tiger Project, M Jagjayanand presided over the meeting while former member of NTCA Debabrata Swain was present. Speaking on the occasion, Swain said Similipal sanctuary can be well protected if forest officials come forward and relieve the burden of tribals and people living around the sanctuary. “If siali lata (a kind of creeper) and bamboo are regenerated aplenty inside the forests, wildlife can stay safe inside and would not come out into towns and villages,” he noted. Dwelling upon the subject of protection of environment and wildlife, Zilla Parishad chairperson Sujata Murmu said, “There is a need to focus on 3T – tribals, trees and tigers. Focusing on these three factors will ensure welfare of tribals and tigers, she added. Udala MLA Prakash Madhei suggested steps to organise Similipal Mahotsav to create public awareness on protection of the sanctuary. Morada MLA Rajkishore Das, deputy director of STP (South) Samrat Gouda, deputy director of STP (North) Sai Kiran, deputy director of NSTR (Andhra Pradesh), C Chaitanya, Baripada DFO Santosh Joshi and member of Save Elephant Foundation Rajanikant Jena underscored the need for various protective measures for Similipal and wildlife. On the occasion, four sub-divisional level forest officials were awarded Biju Pantaik Puraskar for their remarkable contributions towards protection of Similipal sanctuary. Two frontline officers engaged in protection of elephants and 14 officials of STP and forest ranges of Similipal were also awarded.
english
Love's Journey is a story about the journey of Anni and Ethan, two college students who meet and initially clash. Despite their differences, they fall in love and embark on a journey together. Along the way, they face challenges in their careers and personal lives, including business struggles, family responsibilities, and the temptation of attraction to others. However, they always manage to come back to each other and continue building their relationship. The story also features a cast of supporting characters, including friends, family, and caretakers, who play important roles in Anni and Ethan's lives. Through it all, their love prevails and they go on to have many adventures together. Currently there are no reviews available for this book. Be the first one to write a review for the book Love's Journey.
english
Bengaluru: Karnataka police Friday detained Congress state president DK Shivakumar and other party leaders who were protesting against the Surat Court verdict against Rahul Gandhi. A number of protest has erupted by the grand-old party after a court in Gujarat convicted Congress MP Rahul Gandhi in a 2019 defamation case. The case pertained to a remark of Rahul Gandhi at a Lok Sabha polls rally in Karnataka on April 13, 2019. “Why all the thieves, be it Nirav Modi, Lalit Modi or Narendra Modi, have Modi in their names,” Gandhi allegedly said during the rally. On Thursday, the court awarded imprisonment of 2 years to Congress MP Rahul Gandhi. Later, the court granted bail to Gandhi. Meanwhile, the order has been suspended for next 30 days so that the Congress MP can appeal in the High Court. Meanwhile, the Congress has decided to hold a march against the court’s verdict on Friday at Vijay Chowk in Delhi. As per media reports, AICC general secretary Jairam Ramesh informed that the march will begin from Parliament till Vijay Chowk. “We will hold protests across Delhi and in other states on Monday on this issue. The matter is just not a legal issue, but also a serious political issue linked with the future of democracy in the country,” PTI quoted Jairam Ramesh.
english
Shamshera director Karan Malhotra has finally reacted to the film's comparison with Yash-starrer KGF and Game of Thrones. The film stars Ranbir Kapoor, Vaani Kapoor and Sanjay Dutt. By Anindita Mukherjee: Three more days for Shamshera’s release and we can’t keep calm! The movie marks Ranbir Kapoor’s three-year hiatus from films after the 2018 blockbuster, Sanju. Directed by Karan Malhotra, the film also stars Vaani Kapoor and Sanjay Dutt in key roles. Ever since Shamshera’s trailer release, the film has been compared to Yash’s KGF and Game of Thrones. Director Karan shared his take on the comparisons the film’s been receiving at a press conference in Delhi. Shamshera releases on July 22. The film stars Ranbir Kapoor, Vaani Kapoor and Sanjay Dutt in lead roles. Ranbir Kapoor-starrer Shamshera is produced by Yash Raj Films. It is reportedly set in the fictitious city of Kaza, where a warrior tribe is imprisoned, enslaved and tortured by a ruthless authoritarian general Shudh Singh. In Shamshera, Ranbir Kapoor is playing a larger-than-life quintessential Hindi film hero for the first time in his career.
english
module.exports = function(controller) { controller.middleware.send.use((bot, message, next) => { if (Array.isArray(message.text)) { message.text = message.text[Math.floor(Math.random() * message.text.length)]; } next(); }); controller.hears('random','message', async(bot, message) => { bot.reply(message,{ text: [ 'Random response 1', 'Random response 2', 'Random response 3' ] }); }); }
javascript
<reponame>braposo/braid-design-system<gh_stars>1-10 import isEqual from 'lodash/isEqual'; import { Tokens } from '../../themes/theme'; interface Params { tokens: Tokens; selector: string; mobileRules: object; desktopRules: object; } export default ({ tokens, selector, mobileRules, desktopRules }: Params) => { const css = { [selector]: mobileRules, }; if (!isEqual(mobileRules, desktopRules)) { css[`@media screen and (min-width: ${tokens.responsiveBreakpoint}px)`] = { [selector]: desktopRules, }; } return css; };
typescript
<reponame>frouioui/tagenal package server import ( "context" "fmt" "log" "os" "time" "github.com/frouioui/tagenal/api/articles/db" "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" otlog "github.com/opentracing/opentracing-go/log" "github.com/go-redis/redis/extra/redisotel" "github.com/go-redis/redis/v8" ) var rdc *redis.ClusterClient const ( defMasterHostname = "redis-cluster.redis" defMasterPort = "6379" ) type redisServiceConfig struct { hostname string port string } type redisClusterConfig struct { master redisServiceConfig } func newRedisServiceConfig(hostname, port, defHostname, defPort string) redisServiceConfig { if hostname == "" { hostname = defHostname } if port == "" { port = defPort } return redisServiceConfig{ hostname: hostname, port: port, } } func (rdcc *redisClusterConfig) getAddrsArray() []string { return []string{ fmt.Sprintf("%s:%s", rdcc.master.hostname, rdcc.master.port), } } func initRedisClusterClient() error { rdcc := redisClusterConfig{ master: newRedisServiceConfig( os.Getenv("REDIS_MASTER_HOSTNAME"), os.Getenv("REDIS_MASTER_PORT"), defMasterHostname, defMasterPort, ), } rdc = redis.NewClusterClient(&redis.ClusterOptions{ Addrs: rdcc.getAddrsArray(), NewClient: func(opt *redis.Options) *redis.Client { node := redis.NewClient(opt) node.AddHook(redisotel.TracingHook{}) return node }, }) rdc.AddHook(redisotel.TracingHook{}) return nil } func wrapperTracing(ctx context.Context, action string) (opentracing.Span, context.Context) { parentSpan := opentracing.SpanFromContext(ctx) if parentSpan == nil { return nil, ctx } span := opentracing.StartSpan("redis "+action, opentracing.ChildOf(parentSpan.Context())) return span, opentracing.ContextWithSpan(context.Background(), span) } func setCacheArticles(ctx context.Context, query string, data db.ArticleArray) error { span, sctx := wrapperTracing(ctx, "set cache") defer span.Finish() var err error err = rdc.Set(sctx, query, &data, time.Minute).Err() if err != nil { if err != redis.Nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) } return err } return nil } func setCacheArticle(ctx context.Context, query string, data db.Article) error { span, sctx := wrapperTracing(ctx, "set cache") defer span.Finish() var err error err = rdc.Set(sctx, query, &data, time.Minute).Err() if err != nil { if err != redis.Nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) } return err } return nil } func getCacheArticles(ctx context.Context, query string, data db.ArticleArray) (db.ArticleArray, error) { span, sctx := wrapperTracing(ctx, "get cache") defer span.Finish() var err error str := rdc.Get(sctx, query) if err = str.Err(); err != nil { if err != redis.Nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) log.Println(err) } return nil, err } err = str.Scan(&data) if err != nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) log.Println(err) return nil, err } return data, nil } func getCacheArticle(ctx context.Context, query string, data db.Article) (db.Article, error) { span, sctx := wrapperTracing(ctx, "get cache") defer span.Finish() var err error str := rdc.Get(sctx, query) if err = str.Err(); err != nil { if err != redis.Nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) log.Println(err) } return db.Article{}, err } err = str.Scan(&data) if err != nil { ext.Error.Set(span, true) span.LogFields(otlog.String("error", err.Error())) log.Println(err) return db.Article{}, err } return data, nil }
go
--- layout: post title: "GPSP" subtitle: "Supplemental pdf for GPSP" date: 2018-01-04 9:00:00 author: "wenyu" --- <script> window.location.replace("../../../../pdf/GPSP.pdf"); </script>
markdown
West Indies captain Dwayne Bravo had an embargo regarding any questions on their ongoing dispute with the WICB, but the all-rounder said that the team stuck together in difficult times and maintained a “must-win attitude”. “The good thing is that the team is sticking together and supporting each other well. We have a must-win attitude in the team, which is great to see,” Bravo said when asked how tough it is to lead the side despite off-field controversies. “Curtly adds a different dimension to our team. The players really respect him and look up to him. I have grown up watching these legends and it is good to have them around where players can go to them and get some advice,” Bravo told reporters ahead of the second ODI between India and West Indies here. The West Indies Media manager, Philip Spooner had requested the reporters to strictly restrict their questions to only “cricket and tomorrow’s match”. Bravo complimented his players for excelling in all departments of the game in the first ODI at Kochi and take a 1-0 lead in the five-match series. “That was our best all-round game we have played in a long time. As a captain and as a group, you always ask your players to take up responsibility. You want one of the top four to try and bat deep into the innings and get a hundred. Samuels coming back into the team and scoring a hundred was great. Denesh Ramdin played his part as well and Dwayne Smith got a quick 46, which helped us put a good total on the board. We backed ourselves to defend it and it was an all-round performance at the end of the day. But we can’t stop there...need to repeat it,” insisted the skipper. Asked if West Indies, as team, are getting into the groove with the World Cup being just four months away, Bravo said he did not want to look too far ahead. “World Cup is still a long way to go. Playing against one of the better teams in the world gives us an opportunity to see where we are as a team. At the moment, we are here in India and the focus is on this tour. We want to see how we play in the next four games and how we can do as an ODI team. We can’t think about what happens four months hence. Before that we have a tour of South Africa coming up, and that will be tough as well. So we need to just focus on the present.” he was quoted saying. This website uses cookies so that we can provide you with the best user experience possible. Cookie information is stored in your browser and performs functions such as recognising you when you return to our website and helping our team to understand which sections of the website you find most interesting and useful. Strictly Necessary Cookie should be enabled at all times so that we can save your preferences for cookie settings. If you disable this cookie, we will not be able to save your preferences. This means that every time you visit this website you will need to enable or disable cookies again.
english
<reponame>baltpeter/skr-json { "id": "5046e18297d949cf481f7e19f071b8a3", "name": "Abgaben f\u00fcr Altersversorgung", "type": "Aufwand", "code": "6275", "sort-code": 6275, "description": "Abgaben f\u00fcr Altersversorgung", "categories": [ "6250 Soziale Abgaben", "6200-6279 Personalaufwand", "6170-6499 AUSGABEN / AUFWENDUNGEN", "6000-6499 SONSTIGE ZWECKBETRIEBE 1", "6000-6989 SONSTIGE ZWECKBETRIEBE", "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe" ], "leaf": true, "hierarchicalCategories.lvl5": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe > 6000-6989 SONSTIGE ZWECKBETRIEBE > 6000-6499 SONSTIGE ZWECKBETRIEBE 1 > 6170-6499 AUSGABEN / AUFWENDUNGEN > 6200-6279 Personalaufwand > 6250 Soziale Abgaben", "hierarchicalCategories.lvl4": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe > 6000-6989 SONSTIGE ZWECKBETRIEBE > 6000-6499 SONSTIGE ZWECKBETRIEBE 1 > 6170-6499 AUSGABEN / AUFWENDUNGEN > 6200-6279 Personalaufwand", "hierarchicalCategories.lvl3": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe > 6000-6989 SONSTIGE ZWECKBETRIEBE > 6000-6499 SONSTIGE ZWECKBETRIEBE 1 > 6170-6499 AUSGABEN / AUFWENDUNGEN", "hierarchicalCategories.lvl2": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe > 6000-6989 SONSTIGE ZWECKBETRIEBE > 6000-6499 SONSTIGE ZWECKBETRIEBE 1", "hierarchicalCategories.lvl1": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe > 6000-6989 SONSTIGE ZWECKBETRIEBE", "hierarchicalCategories.lvl0": "6 Erfolgskonten f\u00fcr andere ertragssteuerfreie Zweckbetriebe", "parent": "e8c81fbbe4bbb8273a1726d112deaf4d" }
json
<gh_stars>0 # vim:fileencoding=utf-8:noet from __future__ import absolute_import import os from powerline.bindings.vim import getbufvar def help(matcher_info): return str(getbufvar(matcher_info['bufnr'], '&buftype')) == 'help' def cmdwin(matcher_info): name = matcher_info['buffer'].name return name and os.path.basename(name) == '[Command Line]' def quickfix(matcher_info): return str(getbufvar(matcher_info['bufnr'], '&buftype')) == 'quickfix'
python
{"url": "https://www.marieclaire.com/celebrity/a20875044/emilia-clarke-star-wars-screening-royal-wedding/", "text": "<NAME> was in England over the weekend, but sadly, she didn't snag an invite to royal wedding, unlike some celebrities. The Game of Thrones actress actually had a conflict that day\u2014a screening of her new movie Star Wars: A Solo Story. But, like billions of others across the globe, Clarke just couldn't resist the pull of those lovebirds and ended up ditching her own screening to watch <NAME> and <NAME> tie the knot instead.\n\n\"I had a screening of the Star Wars movie that I'm in for my friends and family on the day and I didn't get my dates right, so I didn't realize [they were on the same day],\" Clarke told Tonight Show host <NAME>. \"So I sort of thought no one would turn up, but then they did, so then they watched the movie and I watched the wedding.\"\n\nPriorities, am I right?\n\n\n\nFallon admitted to have only watched the highlights, so Clarke told him exactly how perfect the day went down. \"I think she kind of smashed it,\" Clarke said of Meghan's wedding gown, which Fallon jokingly clarified is the British way of saying \"killed it.\"\n\nWatch the whole clip for yourself, below.", "images": ["https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/oprah-winfrey-royal-weddinggettyimages-960025996-1526721100.jpg?crop=1.00xw:0.335xh;0,0.0403xh&resize=300:*", "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/got-1513354622.jpg?crop=1.00xw:1.00xh;0,0&resize=300:*", "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=1xw:0.6813880126182965xh;center,top&resize=1200:*", "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"], "top_img": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=1xw:0.6813880126182965xh;center,top&resize=1200:*", "keywords": [], "authors": ["<NAME>"], "canonical_link": "https://www.marieclaire.com/celebrity/a20875044/emilia-clarke-star-wars-screening-royal-wedding/", "title": "<NAME>, Star of 'Star Wars,' Skipped Her Own Screening to Watch the Royal Wedding", "meta_data": {"x-ua-compatible": "IE=edge,chrome=1", "viewport": "width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no", "msapplication-tap-highlight": "no", "theme-color": "#EC008C", "og": {"type": "article", "site_name": "<NAME>", "title": "<NAME>, Star of 'Star Wars,' Skipped Her Own Screening to Watch the Royal Wedding", "description": "Priorities.", "url": "https://www.marieclaire.com/celebrity/a20875044/emilia-clarke-star-wars-screening-royal-wedding/", "image": {"identifier": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=1xw:0.6813880126182965xh;center,top&resize=1200:*", "width": 1200, "height": 600}}, "fb": {"app_id": 128598260542816, "pages": 10799255126}, "article": {"publisher": "https://www.facebook.com/MarieClaire", "modified_time": "2018-05-22 07:16:50", "published_time": "2018-05-22 07:16:00", "section": "Celebrity"}, "twitter": {"site": "@marieclaire", "card": "summary_large_image", "image": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=1xw:0.6813880126182965xh;center,top&resize=640:*"}, "google-site-verification": "<KEY>", "title": "Emilia Clarke Skipped Her Star Wars Screening to Watch the Royal Wedding - Celebrities and Royal Wedding", "description": "<NAME> told Jimmy Fallon she skipped a screen of her new 'Star Wars' movie to watch the royal wedding instead.", "keywords": "emilia clarke stars wars, emilia clarke royal wedding, the royal wedding, <NAME>, <NAME> skipped star wars showing, <NAME>, celebrities royal wedding, meghan markle wedding gown, <NAME>ke meghan markle", "auto-publish": "timely", "thumbnail": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=0.7337962962962963xw:1xh;center,top&resize=320:*", "sailthru.contenttype": "standard-article", "sailthru.tags": "Celebrity,celebrity", "sailthru.date": "2018-05-22T19:16:00Z", "sailthru.image.full": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=1xw:0.6813880126182965xh;center,top", "sailthru.image.thumb": "https://hips.hearstapps.com/hmg-prod.s3.amazonaws.com/images/gettyimages-958744556-1527015775.jpg?crop=0.7337962962962963xw:1xh;center,top", "sailthru.socialtitle": "<NAME>, Star of 'Star Wars,' Skipped Her Own Screening to Watch the Royal Wedding", "m1": ".content-hed", "m2": ".content-dek p"}, "movies": [], "publish_date": 1526987760.0, "source": "https://www.marieclaire.<EMAIL>", "summary": ""}
json
<filename>examples/worldFlights/data/places/Saginaw^United_States.json [["1881", "continental airlines", "saginaw", "united states", "chicago", "united states", "0"], ["1881", "continental airlines", "chicago", "united states", "saginaw", "united states", "0"], ["2009", "delta air lines", "detroit", "united states", "saginaw", "united states", "0"], ["2009", "delta air lines", "saginaw", "united states", "detroit", "united states", "0"], ["2009", "delta air lines", "saginaw", "united states", "minneapolis", "united states", "0"], ["2009", "delta air lines", "minneapolis", "united states", "saginaw", "united states", "0"], ["5209", "united airlines", "saginaw", "united states", "chicago", "united states", "0"], ["5209", "united airlines", "chicago", "united states", "saginaw", "united states", "0"]]
json
{"npo.js":"<KEY>,"npo.src.js":"<KEY>}
json
<reponame>walnutdust/specjs /* eslint-disable no-console, mocha/no-setup-in-describe */ const originalLogFunction = console.log; let output; export function suspendConsole() { output = ''; console.log = (msg) => { output += msg + '\n'; }; } export function restoreConsole() { console.log = originalLogFunction; // undo dummy log function if (this.currentTest.state === 'failed') { console.log(output); } } /* eslint-enable no-console, mocha/no-setup-in-describe */
javascript
{"other_ids": [], "mRNA_sequence": [{"index": 0, "type": "five_prime_UTR", "sequence": "ACGCGTCGTAACACATTTTAATATTACGCGTAACTTTAAGATGTGTTGTTTGTGTACGTGCATGTCAACAATGGTTGTTTACTTTCAAAGTTATCCTTTGCGCACAACACATTGCTATAGAAAAGCCCTTAAAAGCTGAAAAGAATAGTAGTGTTATTTAATTCTCTACAAAAGGAAATAAATCCTTTGTGATTTTCCTTTTGTTGGAAGAGTTGAACTACATTATCATCTGCATATTATTTATTATTTAGGAAAACCGGAGATATCAAAGTGAAAACGGGATTTTAACAGCTAATAAGCAAAAAAATATAACTTTATTTACATTCAGGTTTCAGTTAAAG"}, {"index": 1, "type": "CDS", "sequence": "ATGGATTCATCAACCACCATCCCAATTACTCCAACGCGAACACCATGTTTTTTTAATATATCCTCTTCGTTTAATGAGCATTCTCCATTAAATTTCTACGATGAACCCATTTACAATTTTTCGTCTGGCCATGAAGAAAATCAGAGCCATAAGTCATCAAAACTGACATTCTTTAAACCTTCCAATACAAAAAGAAGTCCACATACCCCTATGCAAAATAATGCAAAAGCCATCAGATTATCCACTACAGTTCGCCACGGAATTTTTAAGAATTCAGATCTCGATGGGTGTAGCAAACCATTTGCATTTTCGAGCGGCTTGAAGTTATCCAAAAAAATTGTGGATGCTTCAACGCCAATTGATTTAAAACGAAAAAGGGCGGTGACTTCTTTAAGCACAGGACTGTTATCAAAACGTGAAAAGTGGTCATTATGGGAAGGGAATTTGACTAATCCAAGAAGTGAACAACCTCATACACCTTGTAAAAAGGGAACGAAAATAAAATTAAAACCACCTCAATCTCCGCTGTCACCCACTACCTCTCTTTTAGCTCGCAAATGTAAACATATCGACCTAGATACTTTTAGTCGGTTGGATCATCCAAACTCTGACAGCTCGGATGAAACGTTTGAAATGGAAGAACTTCCTTCTTTGTCTTATGGTTCAGAGGACTTGTTGGAATTTTGCGAAACGCCTTGCAAGAGTCAGCCTATTTTCCTTTCAAGTTCCCACGTTAATAACTGGGATGAGAAAGACGTACCTTCTTCATTAAGTTGGACACCTACCAGTCCAATTTTTTTAAACATCAACAGCGCAGATGATTACGAAGAAGAGGAAGATTGGACTAGTGATCTTCGAATTCGATTCCAACAAGTTAAGCCGATACACGAGAGTGATTTTTCATTTGTATATCATGTGTCTTCGATCAACCCACCTACTGAAACGGTTTACGTTGTAAAAATGTTAAAAAAAAATGCAGCCAAGTTTACAGGAAAAGAGCGTCACTTACAGGAAGTATCTATTTTGCAACGCTTGCAAGCATGTCCATTTGTAGTAAATCTAGTGAACGTTTGGAGCTACAATGATAATATATTTTTGCAATTAGATTATTGTGAAAACGGAGACCTAAGTTTATTTTTAAGCGAATTGGGATTATTGCAGGTCATGGATCCTTTTCGCGTGTGGAAAATGTTGTTTCAACTAACTCAA"}, {"index": 2, "type": "intron", "sequence": "GTAAGTTATTACATATTTAAAAGTCGCGTTCCACAGACACGTCATTTGAATTCACTAACCGAACATAG"}, {"index": 3, "type": "CDS", "sequence": "GCATTAAATTTCATTCATCTTTTAGAATTTGTTCATTTAGACGTTAAGCCTTCCAACGTTCTTATAACTCGTGATGGTAACTTAAAGTTGGGAGACTTCGGACTAGCAACTAGTTTACCGGTCTCCTCGATGGTTGATCTCGAAGGCGACAGAGTCTATATAGCTCCCGAAATATTAGCTTCGCATAATTATGGGAAACCAGCCGATGTATATTCACTAGGCCTCAGTATGATTGAAGCCGCAACAAATGTAGTCCTGCCAGAAAATGGTGTTGAATGGCAACGACTAAGGTCTGGAGATTACTCAAATTTGCCAAATCTTAAAGACCTCCTTTTGTCGAAAGAAAAGGTTCAAATAAACAAGGTGAGGTGTGCTGAATCATTACAGTGTTTGCTTCAGCGTATGACGCATCCGTACGTGGATTGTCGACCCACAACACAAGACTTGTTAGCAATGCCGGAAATGATATTCATCTCTGAGCATTCTCAAAAAGCTGCAATTATCTACGAAGACCATAACAGTTGGTTAGAAACTTGA"}, {"index": 4, "type": "three_prime_UTR", "sequence": "GGGATTCTGTGTGCGAATATTTTAGCCCCACAATGAGGGAATGAATTTTGCGTTCTTCCATGGTTCATTAATTTGTATTTTATACCAATATTTTTATATTTTTAAATTTTAATGTTTCTTTTGTTCGTCTGTTCGAGTGACCTTGTGTTTACCCCAATTCAATTGGTGCAAAAAATTACCCCGGGTCGATCTTTTTTTAATACCCCCTGGCAATCCATCTTTTTGGTCATTTTTAAAAGAACATCTTTTGACGGCTAAACGTCAATCGTCTAAGTCTTTTGATCAACTGCTGGGTATTTTACGTTTAAAAATCCACCTAATGTCGCATTTTGTGTTTTAGTGAAAAAACTTGAGCGTTTTTTTTATGCAGGGAATTGCTGATATTGTGATGGATAATTATTTTAAGCTTCCTAGTCTTTCGCAGAAGTGCTGTTTTAAGGAACTCAAAATATGGAAGTGCCTTGTCTTTGTATACGTATTCCGCCTGATCTATCGTTACAATTGCTGAATTATGAAGCCTTCCGGTATAGCTGTAAGATTATATAATTATGTAAAAATTTTCATATTTAGTTATTGAGTTACAAGCAATAATATCAACAAATCAT"}], "other_names": [], "id": "SPBC660.14", "name": "mik1"}
json
Reliance Industries Limited has announced the Jio Glass - a mixed reality headset. The announcement was made the first-ever virtual AGM event for Reliance. The latest product will enhance the virtual space and will make it more interactive by utilising 3D avatars, holographic content and also video conferencing features. The Jio Glass carries a weight of around 75 grams and features personalised audio. For users who wish to access content on the device, Jio will be shipping a cable which can be attached to the smartphone and will allow users to access the content. The Jio Glass can also be used to make 3D avatars which are aimed at making interactions better virtually. The company is also bringing the feature in discussions using 3D avatars. According to Jio, the Jio Glass will support 25 apps as of now and the Glass can also be utilised for educational purposes by leveraging holographic content. Reliance also announced a new stakeholder in their Jio Platforms. The compact announced at the online AGM that Google will be their new strategic partner and stated that the search engine giant will be investing Rs 33,737 crore - holding a stake of 7. . 7 per cent.
english
India's first chess International Master Manuel Aaron, and 1975 hockey Word cup winner V. J. Philips, were honoured with Lifetime Achievement awards at the Arise Steel-TNSJA (Tamil Nadu Sports Journalists’ Association) awards function, here on Sunday. "I can proudly say that TNSJA is the only organisation to have properly recognised me for my achievement, that too, after 42 years," said Philips in his acceptance speech. Tamil Nadu cricketer Dinesh Karthik and athlete G. Lakshmanan were named the ‘Players of the Year’. "This is a big motivation. I believe I'm a hard-worker. I still have to improve a lot. My aim is to compete in the Olympics," said Lakshmanan. The Lifetime Achievement award for Journalists was awarded, posthumously to Nirmal Shekar, former sports editor of 'The Hindu', and to R. Mohan, resident editor of 'Deccan Chronicle'. S. Loganathan, who coached Lakshmanan to the 5000m gold in the Asian athletic championships in July, was named the ‘Coach of the Year’. The Tamil Nadu limited overs cricket team which won the Vijay Hazare trophy was the ‘Team of the Year’. S. Nandakumar (football) and S. Selena Deepthi (table tennis) were named ‘Young Achievers’. BCCI chairman of selectors M.S.K. Prasad graced the occasion, and the function was hosted by T.V. anchor Sumanth C. Raman. Arise Steel-TNSJA annual award winners: Players of the year: Dinesh Karthik (cricket); G. Lakshmanan (athletics); Award of excellence: P. Anitha (basketball); S. Arokia Rajiv (athletics); K. Jennitha Anto (chess); Hall of fame: V.J. Philips (hockey); Manuel Aaron (chess); R. Mohan (journalism); Nirmal Shekar (journalism); Young players of the year: S. Nanda Kumar (football); S. Selena Deepthi (table tennis); Team of the year: Tamil Nadu limited overs cricket team; Young team of the year: Tamil Nadu U-14 rugby team; Coach of the year: S. Loganathan (athletics). Arise Steel-TNSJA scholarship winners (Rs. 30,000 each) : R. Mohan Kumar (athletics); Sankar Muthusamy (badminton); P. Baladhaneswar (basketball); P. Jeevanantham (basketball); P. Iniyan (chess); K. Priyanka (chess); R. Adithi (football); B. Jothika (rowing); S. Shrikrishna (snooker); S. Yashini (table tennis); Preyesh Suresh Raj (table tennis); Dhakshineswar Suresh (tennis).
english
<reponame>frflefevre/defiiv {"py/object": "rasa_core.featurizers.MaxHistoryTrackerFeaturizer", "state_featurizer": {"py/object": "rasa_core.featurizers.BinarySingleStateFeaturizer", "user_feature_len": 19, "slot_feature_len": 0, "num_features": 54, "input_state_map": {"intent_about_costa_rica": 0, "intent_activities_offered": 1, "intent_add_to_mytrip": 2, "intent_contact_us": 3, "intent_greet": 4, "intent_main_menu": 5, "intent_more_info_natural_exploration": 6, "intent_more_info_subwing": 7, "intent_more_info_zipline_tour": 8, "intent_other_activities": 9, "intent_plan_my_trip": 10, "intent_testimonials": 11, "intent_tips_1": 12, "intent_tips_2": 13, "intent_tips_3": 14, "intent_tips_and_tricks": 15, "intent_tour_details_natural_exploration": 16, "intent_tour_details_subwing": 17, "intent_tour_details_zipline": 18, "prev_action_listen": 19, "prev_action_restart": 20, "prev_action_default_fallback": 21, "prev_action_deactivate_form": 22, "prev_action_revert_fallback_events": 23, "prev_action_default_ask_affirmation": 24, "prev_action_default_ask_rephrase": 25, "prev_action_back": 26, "prev_utter_greet": 27, "prev_utter_first_message": 28, "prev_utter_welcome_message": 29, "prev_utter_plan_trip_one": 30, "prev_utter_activities": 31, "prev_action_activities_offerd": 32, "prev_utter_zipline_tour": 33, "prev_utter_natural_exploration": 34, "prev_utter_subwing": 35, "prev_utter_zipline_tour_details": 36, "prev_utter_natural_tour_details": 37, "prev_utter_subwing_tour_details": 38, "prev_utter_add_trip": 39, "prev_utter_contact": 40, "prev_utter_testimonials": 41, "prev_action_testimonials": 42, "prev_utter_about": 43, "prev_action_about_costa_rica": 44, "prev_utter_costa_rica": 45, "prev_utter_tips_tricks": 46, "prev_utter_tip1": 47, "prev_utter_tips2": 48, "prev_utter_tips3": 49, "prev_action_otherActivities_costa_rica": 50, "prev_utter_default": 51, "prev_trip_plan_form": 52, "active_form_trip_plan_form": 53}}, "use_intent_probabilities": false, "max_history": 3, "remove_duplicates": true}
json
{"id": 2288, "title": "Ticket #2288: INVALID_LOG_FILES config variable default might be broken", "description": "<blockquote>\nLIGO was surprised by core files being removed by preen. This is what they stated:\n\n<p></p><div class=\"verbatim\">\n<pre>Unfortunately, I can't find one. It looks like the default behavior of\nINVALID_LOG_FILES may have changed for 7.6?\n\nI thought in the past it was sufficient to comment out the following in the\nconfiguration file,\n\n## What files should condor_preen remove from the log directory?\n#INVALID_LOG_FILES = core\n\nhowever, with this commented out I see both,\n\n[root@ldas-pcdev1 condor]# condor_config_val INVALID_LOG_FILES\ncore\n[root@ldas-pcdev1 condor]# condor_config_val INVALID_LOG_FILES -schedd\ncore\n\nIs now necessary to set this to a null string instead of commenting it out (in\norder to have PREEN not remove core files)?\n\nIndependent of this, does it make sense that condor_config_val without the\n-schedd option would report any value at all when there is no uncommented line\nin the configuration file?\n\n[root@ldas-pcdev1 ~]# grep INVALID_LOG_FILES /etc/condor/condor_config.local\n[root@ldas-pcdev1 ~]# grep INVALID_LOG_FILES /etc/condor/condor_config\n#INVALID_LOG_FILES = core\n</pre></div>\n</blockquote>", "remarks": "<blockquote>\n<em>2011-Jul-08 14:48:35 by psilord:</em> <br/>\n\nI changed the default in the param info table and adesmet reviewed it.</blockquote>", "derived_tickets": "", "attachments": "<html><head></head><body></body></html>", "check_ins": "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\">\n<tbody><tr><td align=\"right\" valign=\"top\" width=\"160\">2011-Jul-08 14:32</td>\n<td align=\"center\" valign=\"top\" width=\"30\">\n<span class=\"icon dot\">\u00a0</span></td>\n<td align=\"left\" valign=\"top\"> \nCheck-in <span class=\"chng\"><a href=\"https://github.com/htcondor/htcondor/commit/59cea157f018a25344461f36a9706400e7fd8ddd\">[22387]</a></span>: ===GT=== <span class=\"ticket\"><a class=\"defer\" href=\"/wiki-archive/tickets/?ticket=2288\" onclick=\"get_ticket_and_populate_wrapper('2288'); return false;\" title=\"INVALID_LOG_FILES config variable default might be broken\">#2288</a></span> ===GT:Fixed=== <span class=\"ticket\"><a class=\"defer\" href=\"/wiki-archive/tickets/?ticket=2288\" onclick=\"get_ticket_and_populate_wrapper('2288'); return false;\" title=\"INVALID_LOG_FILES config variable default might be broken\">#2288</a></span> When the param table went in, the default value for INVALID_LOG_FILES was mistakenly (though not unreasonably) taken from condor_config.generic instead of from the code base itself. This resulted in condor daemon core files being removed by preen unexpectedly. That\u00a0[...]\n (By <NAME> )</td></tr>\n</tbody></table>", "type": "defect", "last_change": "2011-Jul-12 13:12", "status": "defer", "created": "2011-Jul-08 11:20", "fixed_version": "2011-Jul-08 11:20", "broken_version": "v070601", "priority": "5", "subsystem": "", "assigned_to": "psilord", "derived_from": "", "creator": "psilord", "rust": "", "customer_group": "ligo", "visibility": "public", "notify": "<EMAIL>, <EMAIL>,<EMAIL>", "due_date": "20110708"}
json
While there are hundreds of coronaviruses that cause diseases in animals such as pigs, camels, bats and cats, till date seven different types of coronaviruses have been identified that infect humans. What are they and what kind of symptoms do they cause? Coronaviruses are a large family of single-stranded RNA viruses that cause diseases in animals and humans. In humans, the viruses usually cause mild to moderate upper-respiratory tract illnesses such as the common cold. In the last two decades, more aggressive coronaviruses have emerged that are capable of causing serious illness and even death in humans. These include SARS-CoV, MERS and now SARS-CoV-2. Human coronaviruses were first characterised in the mid-1960s and they are mostly considered to be responsible for causing upper respiratory tract infections in children. In 1965, scientists DJ Tyrrell and ML Bynoe were the first ones to identify a human coronavirus, which they isolated from the nasal washing of a male child who had symptoms of common cold. They termed the strain B814 and later in 1968 the term “coronavirus” was accepted. These viruses are named so because of spikes found on their surface that give them the appearance of a crown when looked through an electron microscope. In animals, coronaviruses can cause diarrhea in cows and pigs and upper respiratory tract disease in chickens. The first coronavirus was isolated in 1937 and it was the infectious bronchitis virus (IBV) that caused respiratory disease in chickens. Broadly, coronaviruses (CoV) are the largest group of viruses that belong to the Nidovirales order, which includes Coronaviridae among three others. Coronavirinae are one of the two subfamilies of Coronaviridea, with the other being Torovirinae. Coronavirinae can be further subdivided into alpha, beta, gamma and delta coronaviruses. The Coronavirus Study Group of the International Committee for Taxonomy of Viruses is responsible for classifying them and roughly seven years ago they classified them into the aforementioned divisions instead of the serological groups of three. According to a paper published in the Journal of Virology, while coronaviruses from all four categories can be found in mammals, bat coronaviruses are the likely gene source of alpha and beta coronaviruses, while avian coronaviruses are the probable gene sources of gamma and delta coronaviruses. While there are hundreds of coronaviruses, there are seven that we know can infect humans. Out of the seven, two are alpha coronaviruses (229E and NL63) and four are beta coronaviruses (OC43, HKU1, MERS and SARS-CoV). The classification of the viruses is based on their phylogeny, which is to say it reflects how these virus strains evolved from their common ancestors. Essentially, this means whenever a virus newly emerges, its classification depends on how it relates to other known viruses and if it is distinct enough to be called a new species or if it belongs to an existing species. For instance, SARS-CoV and SARS-CoV-2 are genetically linked. Alternatively, coronaviruses may be classified based on serology (monitoring the immune system’s antibody response to viral exposure) as per which they can be divided into three groups from I to III. Groups I and II refer to mammalian coronaviruses and Group III includes avian coronaviruses. 229E is included in Group I, which largely includes animal pathogens. Group II largely consists of pathogens of veterinary relevance and includes OC43, HKU1 and NL63. SARS coronaviruses are classified in Group II as well. Around the world, people commonly get infected by 229E, HKU1, NL63 and OC43. Sometimes, coronaviruses that infect animals can evolve and become a human coronavirus, which include MERS, SARS-CoV-1 and SARS-CoV-2. 229E: One of the first coronaviruses strains to be described in the mid-60s, possibly by D Hamre and JJ Procknow in their 1966 paper titled, “A new virus isolated from the human respiratory tract”, published in Experimental Biology and Medicine. OC43: Discovered in 1967 according to the Journal of Virology. However, a paper in Virology Journal has described it as the first human coronavirus to be discovered in 1965, citing a 1966 paper written by Tyrrell and Bynoe who worked with the nasal swab titled B814. NL63 and HKU1: First identified in the Netherlands in 2004, probably after it was isolated from a seven-month-old infant showing respiratory symptoms. During this time, there was an increase in research on human coronaviruses, which led to the discovery of NL63 and HKU1 in Hong Kong in early 2005. SARS-CoV: 2003 in China (animal source not yet known, bats thought to have given it to other animals, probably civet cats) MERS: 2012 in Saudi Arabia (transmitted by dromedary camels) SARS-CoV-2: 2019 in Wuhan (source not yet known, possibly bats) Before SARS-CoV-2 and MERS, SARS-CoV was the first example of a human coronavirus that could cause serious illness in humans in the form of severe acute respiratory syndrome. Other human coronaviruses such as OC43 and 229E are known to cause the common cold, whereas NL63 is associated with serious respiratory symptoms such as upper respiratory tract infection and pneumonia. According to a paper published in Microbiology and Molecular Biology Reviews in 2005, while NL63 is primarily associated with infections among children, it has also been detected in immuno-compromised adults with respiratory tract infections. OC43 can also cause gastroenteritis. SARS-CoV on the other hand, was identified after the 2003 outbreaks in China. It is thought to have come from an as yet unknown animal source, probably bats. Symptoms of SARS include cough, shortness of breath, diarrhea. In severe cases, the symptoms can progress to respiratory distress, which may require intensive care. MERS is another viral respiratory disease caused by a human coronavirus, which was first identified in Saudi Arabia in 2012. Typical symptoms include fever, cough and shortness of breath. Further, while SARS-CoV-2 is considered to be milder than SARS-CoV and MERS, it is especially difficult to control its outbreak, given its high infectiousness. Don’t miss these articles on Coronavirus from the Explained section: ‣ Besides a face cover, should I wear gloves when I go outdoors?
english
import sys sys.setrecursionlimit(10**6) def main(): input = sys.stdin.readline N = int(input()) H = list(map(int, input().split())) c = 0 for h in H: if h < c: return 'No' c = max(c, h-1) return 'Yes' if __name__ == '__main__': print(main())
python
package io.mycat.backend.nio; import io.mycat.backend.MySQLDataSource; import io.mycat.net.NetSystem; import io.mycat.server.config.node.DBHostConfig; import io.mycat.server.executors.ResponseHandler; import java.io.IOException; import java.nio.channels.SocketChannel; public class MySQLBackendConnectionFactory { private final MySQLBackendConnectionHandler nioHandler = new MySQLBackendConnectionHandler(); public MySQLBackendConnection make(MySQLDataSource pool, ResponseHandler handler, String schema) throws IOException { DBHostConfig dsc = pool.getConfig(); SocketChannel channel = SocketChannel.open(); channel.configureBlocking(false); MySQLBackendConnection c = new MySQLBackendConnection(channel, pool.isReadNode()); NetSystem.getInstance().setSocketParams(c, false); // 设置NIOHandler c.setHandler(nioHandler); c.setHost(dsc.getIp()); c.setPort(dsc.getPort()); c.setUser(dsc.getUser()); c.setPassword(<PASSWORD>()); c.setSchema(schema); c.setPool(pool); c.setResponseHandler(handler); c.setIdleTimeout(pool.getConfig().getIdleTimeout()); NetSystem.getInstance().getConnector().postConnect(c); return c; } }
java
version https://git-lfs.github.com/spec/v1 oid sha256:b15d168384341f17e4d5bddb6ff60a75d45d761b94387f0b67ea833fb833cb69 size 171
json
import React, { useRef } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { Path, Raphael, Set, Text } from 'react-raphael'; import Checkbox from 'components/common/checkbox'; import { FADES_MS, OPTIONS, PENALTY_MS } from 'components/constants'; import { actions as optionsActions, selectors as optionsSelectors } from 'redux/options'; import './styles.less'; function Options(props) { const dispatch = useDispatch(); const feedback = useSelector(optionsSelectors.isFeedback); const highlighting = useSelector(optionsSelectors.isHighlighting); const numberFirst = useSelector(optionsSelectors.isNumberFirst); const optionsVisible = useSelector(optionsSelectors.isVisible); const penalty = useSelector(optionsSelectors.isPenalty); const removeNotes = useSelector(optionsSelectors.isRemoveNotes); const timer = useSelector(optionsSelectors.isTimer); const setOption = (option, value) => dispatch(optionsActions.SET_OPTION({ option, value })) const hide = !optionsVisible; const elToFront = el => optionsVisible ? el.toFront() : el.toBack(); let isLoaded = useRef(false); let animation = isLoaded.current ? optionsVisible ? Raphael.animation({ opacity: 1 }, FADES_MS.FAST) : Raphael.animation({ opacity: 0 }, FADES_MS.FAST) : Raphael.animation({ opacity: 0 }) isLoaded.current = true; return ( <Set> {/* Back Button */} <Set> <Text text={'Save Options'} x={70} y={25} styleName={'save text'} animate={animation} click={() => setOption(OPTIONS.VISIBLE, false)} update={elToFront} /> <Path d={'M15,15 L15,35 L5,25 Z'} styleName={'save path'} animate={animation} click={() => setOption(OPTIONS.VISIBLE, false)} /> </Set> {/* Timer */} <Set> <Text text={'Enable Timer'} x={270} y={175} styleName={'option'} animate={animation} click={() => setOption(OPTIONS.TIMER, !timer)} update={elToFront} /> <Checkbox x={350} y={160} click={() => setOption(OPTIONS.TIMER, !timer)} hide={hide} animate={animation} value={timer} /> </Set> {/* Highlighting */} <Set> <Text text={'Number highlighting'} x={232} y={225} styleName={`option`} animate={animation} click={() => setOption(OPTIONS.HIGHLIGHTING, !highlighting)} update={elToFront} /> <Checkbox x={350} y={210} click={() => setOption(OPTIONS.HIGHLIGHTING, !highlighting)} hide={hide} animate={animation} value={highlighting} /> </Set> {/* Notes */} <Set> <Text text={'Auto Remove Notes'} x={244} y={275} styleName={`option`} animate={animation} click={() => setOption(OPTIONS.REMOVE_NOTES, !removeNotes)} update={elToFront} /> <Checkbox x={350} y={260} click={() => setOption(OPTIONS.REMOVE_NOTES, !removeNotes)} hide={hide} animate={animation} value={removeNotes} /> </Set> {/* Number First */} <Set> <Text text={'Select Number First'} x={233} y={325} styleName={`option`} animate={animation} click={() => setOption(OPTIONS.NUMBER_FIRST, !numberFirst)} update={elToFront} /> <Text text={'(instead of selecting cell and then number)'} x={180} y={343} styleName={`option description`} animate={animation} click={() => setOption(OPTIONS.NUMBER_FIRST, !numberFirst)} update={elToFront} /> <Checkbox x={350} y={310} click={() => setOption(OPTIONS.NUMBER_FIRST, !numberFirst)} hide={hide} animate={animation} value={numberFirst} /> </Set> {/* Instant Feedback */} <Set> <Text text={'Instant Feedback'} x={248} y={378} styleName={`option`} animate={animation} click={() => setOption(OPTIONS.FEEDBACK, !feedback)} update={elToFront} /> <Checkbox x={350} y={360} click={() => setOption(OPTIONS.FEEDBACK, !feedback)} hide={hide} animate={animation} value={feedback} /> </Set> {/* Penalty */} <Set> <Text text={'Penalty for wrong number'} x={206} y={425} styleName={`option`} animate={animation} click={() => setOption(OPTIONS.PENALTY, !penalty)} update={elToFront} /> <Text text={`(+${PENALTY_MS / 1000} seconds, when instant feedback enabled)`} x={180} y={440} styleName={`option description`} animate={animation} click={() => setOption(OPTIONS.PENALTY, !penalty)} update={elToFront} /> <Checkbox x={350} y={410} click={() => setOption(OPTIONS.PENALTY, !penalty)} hide={hide} animate={animation} value={penalty} /> </Set> </Set> ) } export default Options;
javascript
Volume 45 All articles Published: February 2020 Article ID 0049 Original Article (Mechanical Sciences) Stirrups of reinforced concrete members are very prone to corrosion compared with longitudinal reinforcements, resulting from their small concrete covers, which lead to concrete cracking and spalling. Due to the adverse effects of corrosion, this article aims to investigate the amount of reduction in the capacity ofreinforced concrete specimens in different corrosion degrees. For this purpose, an experimental investigation is carried out on 22 reinforced and non-reinforced rectangular prism specimens, of which 12 reinforced specimens are corroded. The test variables contain the corrosion percentage, and the stirrup diameter and spacing. Eventually, all specimens are tested for compressive strength for 90 days. The experimental results show that the reduction of compressive strength depends on the corrosion percentage and stirrup diameter. According to thisconclusion, a new formulation is proposed to express the relationship between compressive strength reduction and its effects. The present study investigated the durability indices of slag-modified concrete during the selfhealing process. For this purpose, slag powder was substituted by 0, 15, 30, and 45% by weight of cement. Then, 28-day moist cured cylindrical specimens were prepared to investigate their autogenous self-healing ability. Two gaps with 100 and 200 microns were created in the specimens, and for re-curing, cracked specimens wereexposed to tap water for another 28 days. RCPT, water passing, and electrical resistance tests were employed to evaluate the self-healing progress in the gaps and their durability indices. Moreover, XRD and SEM were utilized to achieve further information about the process of concrete self-healing. Results showed that the slagmodified concretes can heal the wide gaps, and the self-healing ability improves by increasing slag substitution. Also, it is proposed that the durability of the self-healed slag modified concretes can be assumed more than that of the sound control concretes under chloride attack. Moreover, XRD and SEM observations revealed that aportion of portlandite was leaked into the curing water during the self-healing process.
english
<reponame>tobiasschaefer/Showcase package org.educama.shipment.boundary; import org.educama.shipment.model.Shipment; import java.util.Collection; public interface ShipmentBoundaryService { /** * Creates a shipment * * @param shipment which is to be created as Case * @return the created shipment */ Shipment createShipment(Shipment shipment); /** * Retrieves all shipments. * * @return a collection of all shipments */ Collection<Shipment> findAll(); }
java
West Indies captain Darren Sammy said his side expect to win their upcoming one-day series with England after finishing the Test campaign with a flourish. West Indies captain Darren Sammy said his side expect to win their upcoming one-day series with England after finishing the Test campaign with a flourish. Although rain washed out three of the five days in the third Test here at Edgbaston, the West Indies’ total of 426 featured an unbeaten century from wicket-keeper Denesh Ramdin, a stylish 76 by Marlon Samuels and Tino Best’s 95 — the highest score by a Test match No 11. The three-match series may have already been lost after defeats at Lord’s and Trent Bridge but, with Best topping the 90mph mark against opponents he could meet again when a trio of one-dayers begins at Southampton on Saturday, it was a timely reminder to England of the perils of taking the West Indies lightly. West Indies’ recent results suggest they are far more of a force in one-day cricket and with Chris Gayle back on board after more than a year out following a dispute with Caribbean cricket chiefs, they have one of the world’s leading limited overs batsmen in their line-up. Kieron Pollard, Andre Russell and Dwayne and Darren Bravo could also pose problems to an England side captained by Alastair Cook in one-dayers now his Test skipper and fellow opener Andrew Strauss has quit the white-ball game. West Indies pushed Australia all the way in a five-match one-day series in March that ended all square at 2-2 after the third ODI in St Vincent was tied. Sammy said West Indies coach Ottis Gibson, formerly England’s bowling coach, had identified one-day cricket as an area where the side, who’ve now won just two of their last 33 Tests, could make big strides. “When Ottis came on board we set goals and as a one-day team, looking to win the 2015 World Cup (in Australia and New Zealand), that’s possible,” said Sammy, who added success at this year’s World Twenty20 in Sri Lanka in September and October was also a realistic target. “In Test cricket, where you need a lot of concentration, it might take more time for us to get back and win games consistently because we’ve been losing at that level for a long time,” said Sammy, whose maiden Test century came during a nine-wicket second Test defeat at Trent Bridge. But against England the tourists showed enough glimpses of talent to suggest that, if they get their strongest side on the field, they could yet make headway in Tests, even if there is no immediate prospect of a return to the heights scaled by the celebrated West Indies teams of the 1970s and 1980s. “We competed but we didn’t compete for long enough over an extended period of time and hence we lost 2-0,” said medium-pacer Sammy. Samuels — the West Indies’ man of the series after making 386 runs at an average of 96. 50 — proved a thorn in England’s side throughout while Ramdin and Best stunned the hosts with a last-wicket stand of 143 at Edgbaston. “We said we would come here with a never-say-die attitude and that last-wicket partnership was just a perfect example,” said Samuels. “It was great to watch and on that same flat wicket we got five wickets. (AFP)
english
Mysuru: The Department of Electronics and Communication Engineering, ATME College of Engineering, Mysuru, will be conducting ‘URJA 2k19,’ an annual National-level Technical Festival with multiple events in association with the Institution of Electronics and Telecommunication Engineering (IETE), India, on Mar. 21 and 22. The motive of this event is to develop various skills in students in co-curricular activities and to expose them to the current trends in the Technical and Professional fields. URJA 2k19, consists of several innovative events such as Prezento – National-Level Technical Paper and Poster Presentation contest; AeroDRONE – Drone design contest, where the drones will have to be designed to be driven through a track provided by us to complete in shortest time; Robo-Vertigo – All Terrain Vehicle to be driven in an ATV styled track and must complete the track in shortest time to win; ProjectXpo – Project exhibition for all electronics based streams; Arduino-IOT-Spire – 24 hours Hackathon involving five on-spot problems that need to be solved using only Arduino; Tech-Jumanji – Technical treasure hunt; SimZone – Simulation contest involving a problem statement that will have to be simulated in stipulated amount of time in provided software; Tech-Rig – Circuit debugging contest using analog and digital hardware components only; Fast-track – Non-technical contest with time constraints; ExhiTrash – Non-technical event to create an innovative model from the scratch; QUIZitive – Technical quiz with four levels; PixStream – Non-Technical Video making contest based on given theme with video preparation duration six hours for making a video not exceeding a run time of twelve minutes; ACE – If a single college wins more than eight contests and participated in all the events then they will be rewarded the ACE.
english
package org.javacord.api.interaction; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ServerApplicationCommandPermissionsBuilder { private final long commandId; private final List<ApplicationCommandPermissions> permissions; /** * Creates an instance of this server application command permissions builder. * * @param commandId The command ID which should be updated. * @param permissions The permissions for the command which should be updated. */ public ServerApplicationCommandPermissionsBuilder(long commandId, List<ApplicationCommandPermissions> permissions) { this.commandId = commandId; this.permissions = new ArrayList<>(permissions); } /** * Gets the command ID. * * @return The command ID. */ public long getCommandId() { return commandId; } /** * Gets a list of the application command permissions. * * @return A list containing the application command permissions. */ public List<ApplicationCommandPermissions> getPermissions() { return Collections.unmodifiableList(permissions); } }
java
<gh_stars>1-10 [{"namaKab":"KOTA BAU BAU","originalFilename":"IMG-20180717-WA0003.jpg","namaPartai":"Persatuan Indonesia","id":218179,"noUrut":1,"nama":"<NAME>","stringJenisKelamin":"Perempuan"},{"namaKab":"KOTA BAU BAU","originalFilename":"49.jpg","namaPartai":"Persatuan Indonesia","id":246848,"noUrut":2,"nama":"<NAME>","stringJenisKelamin":"Laki-Laki"},{"namaKab":"KOTA BAU BAU","originalFilename":"DSC_4171.JPG","namaPartai":"Persatuan Indonesia","id":230502,"noUrut":3,"nama":"<NAME>","stringJenisKelamin":"Laki-Laki"},{"namaKab":"KOTA BAU BAU","originalFilename":"BJBCG.jpg","namaPartai":"Persatuan Indonesia","id":246073,"noUrut":4,"nama":"<NAME>","stringJenisKelamin":"Perempuan"},{"namaKab":"KOTA BAU BAU","originalFilename":"DSC_2546.JPG","namaPartai":"Persatuan Indonesia","id":301967,"noUrut":5,"nama":"<NAME>","stringJenisKelamin":"Laki-Laki"},{"namaKab":"KOTA BAU BAU","originalFilename":"DSC_0118.JPG","namaPartai":"Persatuan Indonesia","id":247707,"noUrut":6,"nama":"THAMRIN","stringJenisKelamin":"Laki-Laki"},{"namaKab":"KOTA BAU BAU","originalFilename":"4x6.jpg","namaPartai":"Persatuan Indonesia","id":248476,"noUrut":7,"nama":"<NAME>, SKM","stringJenisKelamin":"Perempuan"}]
json
A girl was allegedly beaten to death by her parents at Timmampeta, a suburb of Jafarghadh mandal of Warangal district. According to reports, the parents had finalized a marriage alliance, which she refused to marry the boy. Angered by her decision, the parents badly battered her resulting in her death. The police are yet to ascertain the facts in the incident.
english
import os os.environ["CSW_CRITERIA_UNIT_TESTING"] = "1"
python
{"meta": {"code": 200, "requestId": "595b1838351e3d64aea6e845"}, "notifications": [{"item": {"unreadCount": 0}, "type": "notificationTray"}], "response": {"photos": {"items": [{"height": 537, "source": {"url": "https://foursquare.com/download/#/iphone", "name": "Foursquare for iOS"}, "width": 720, "prefix": "https://igx.4sqi.net/img/general/", "createdAt": 1332766665, "id": "4f7067c9e4b07e41f0f88386", "venue": {"url": "http://www.mcdonalds.com.sg", "like": false, "location": {"formattedAddress": ["#01-18/18A, IMM Building (2 Jurong East St 21)", "609601", "Singapore"], "city": "Singapore", "lng": 103.745887, "cc": "SG", "country": "Singapore", "crossStreet": "2 Jurong East St 21", "lat": 1.334872, "address": "#01-18/18A, IMM Building", "labeledLatLngs": [{"label": "display", "lat": 1.334872, "lng": 103.745887}], "postalCode": "609601"}, "categories": [{"icon": {"prefix": "https://ss3.4sqi.net/img/categories_v2/food/fastfood_", "suffix": ".png"}, "name": "Fast Food Restaurant", "id": "4bf58dd8d48988d16e941735", "pluralName": "Fast Food Restaurants", "primary": true, "shortName": "Fast Food"}], "verified": false, "contact": {"formattedPhone": "+65 6899 2698", "phone": "+6568992698"}, "allowMenuUrlEdit": true, "stats": {"tipCount": 8, "usersCount": 2021, "checkinsCount": 3836}, "id": "4b769b63f964a52029542ee3", "name": "McDonald's", "beenHere": {"lastCheckinExpiredAt": 0}}, "suffix": "/P3AOlFVXC44NdknksP57RVZmfncZ9fInmdIR5mvCwUs.jpg", "visibility": "public"}, {"height": 720, "source": {"url": "https://foursquare.com/download/#/iphone", "name": "Foursquare for iOS"}, "width": 537, "prefix": "https://igx.4sqi.net/img/general/", "createdAt": 1332759757, "id": "4f704ccde4b09e735722488a", "venue": {"location": {"formattedAddress": ["28B Harding Rd (Dempsey Hill)", "249549", "Singapore"], "city": "Singapore", "lng": 103.81111216329016, "cc": "SG", "country": "Singapore", "crossStreet": "Dempsey Hill", "lat": 1.3045829146652086, "address": "28B Harding Rd", "labeledLatLngs": [{"label": "display", "lat": 1.3045829146652086, "lng": 103.81111216329016}], "postalCode": "249549"}, "contact": {"twitter": "psharding", "facebookName": "PS.Cafe", "formattedPhone": "+65 9070 8782", "phone": "+6590708782", "facebookUsername": "PS.Cafe", "facebook": "165349006863217"}, "verified": true, "allowMenuUrlEdit": true, "stats": {"tipCount": 188, "usersCount": 6797, "checkinsCount": 10285}, "id": "4b08ac7df964a520951023e3", "storeId": "", "url": "http://www.pscafe.com", "like": false, "categories": [{"icon": {"prefix": "https://ss3.4sqi.net/img/categories_v2/food/cafe_", "suffix": ".png"}, "name": "Caf\u00e9", "id": "4bf58dd8d48988d16d941735", "pluralName": "Caf\u00e9s", "primary": true, "shortName": "Caf\u00e9"}], "beenHere": {"lastCheckinExpiredAt": 0}, "name": "PS.Cafe", "venuePage": {"id": "54923688"}}, "suffix": "/4FrQmqh80DgxW0QrKEGH9YVvYVw-nair14737duPWas.jpg", "visibility": "public"}, {"height": 720, "source": {"url": "https://foursquare.com/download/#/iphone", "name": "Foursquare for iOS"}, "width": 537, "prefix": "https://igx.4sqi.net/img/general/", "createdAt": 1295109713, "id": "4d31ce51d24e28fd57a3fb59", "venue": {"venueRatingBlacklisted": true, "like": false, "location": {"city": "Singapore", "isFuzzed": true, "cc": "SG", "country": "Singapore", "lat": 1.3412246561059844, "lng": 103.7347091418552, "formattedAddress": ["600230", "Singapore"]}, "categories": [{"icon": {"prefix": "https://ss3.4sqi.net/img/categories_v2/building/home_", "suffix": ".png"}, "name": "Home (private)", "id": "4bf58dd8d48988d103941735", "pluralName": "Homes (private)", "primary": true, "shortName": "Home"}], "verified": false, "contact": {}, "beenHere": {"lastCheckinExpiredAt": 0}, "stats": {"tipCount": 0, "usersCount": 3, "checkinsCount": 431}, "id": "4cf6572fe308236ac18ee5ac", "name": "Chris & <NAME>"}, "suffix": "/IPVDLBKDN5VIPCDCZTHXRRRZOFJDUX3PDNLLN3LLM1KPAAOH.jpg", "visibility": "public"}], "count": 3}, "totalCount": 3}}
json
<filename>accounting_app/accounting_app/doctype/purchase_invoice/purchase_invoice.json<gh_stars>0 { "actions": [], "autoname": "PINV.#####", "creation": "2021-05-09 15:34:51.359222", "doctype": "DocType", "editable_grid": 1, "engine": "InnoDB", "field_order": [ "company", "posting_date", "credit_to", "column_break_3", "supplier", "payment_due_date", "expense_account", "section_break_8", "item", "section_break_10", "total_qty", "column_break_12", "total_amount" ], "fields": [ { "fieldname": "supplier", "fieldtype": "Link", "label": "Supplier", "options": "Party" }, { "fieldname": "posting_date", "fieldtype": "Date", "label": "Posting Date" }, { "fieldname": "payment_due_date", "fieldtype": "Date", "label": "Payment Due Date" }, { "fieldname": "total_qty", "fieldtype": "Int", "label": "Total Qty" }, { "fieldname": "total_amount", "fieldtype": "Int", "label": "Total Amount" }, { "fieldname": "credit_to", "fieldtype": "Link", "label": "Credit To", "options": "Account" }, { "fieldname": "expense_account", "fieldtype": "Link", "label": "Expense Account", "options": "Account" }, { "fieldname": "item", "fieldtype": "Table", "label": "Item", "options": "Purchase_Invoice_Item" }, { "fieldname": "company", "fieldtype": "Link", "label": "Company", "options": "Company" }, { "fieldname": "column_break_3", "fieldtype": "Column Break" }, { "fieldname": "section_break_8", "fieldtype": "Section Break" }, { "fieldname": "section_break_10", "fieldtype": "Section Break" }, { "fieldname": "column_break_12", "fieldtype": "Column Break" } ], "index_web_pages_for_search": 1, "links": [], "modified": "2021-06-01 13:35:51.603876", "modified_by": "Administrator", "module": "Accounting App", "name": "Purchase Invoice", "owner": "Administrator", "permissions": [ { "create": 1, "delete": 1, "email": 1, "export": 1, "print": 1, "read": 1, "report": 1, "role": "System Manager", "share": 1, "write": 1 } ], "quick_entry": 1, "sort_field": "modified", "sort_order": "DESC", "title_field": "company", "track_changes": 1 }
json
<HTML> <HEAD> <META NAME="Generator" CONTENT="Corel WordPerfect 8"> <TITLE></TITLE> </HEAD> <BODY TEXT="#000000" LINK="#0000ff" VLINK="#551a8b" ALINK="#ff0000" BGCOLOR="white"> <P ALIGN="CENTER">NATIONAL BUREAU OF ECONOMIC RESEARCH, INC.</P> <BR WP="BR1"><BR WP="BR2"> <P ALIGN="CENTER">SUMMER INSTITUTE 2000</P> <BR WP="BR1"><BR WP="BR2"> <P ALIGN="CENTER">Law and Economics</P> <P ALIGN="CENTER"><NAME> and <NAME>, Organizers</P> <BR WP="BR1"><BR WP="BR2"> <P ALIGN="CENTER">Royal Sonesta Hotel</P> <P ALIGN="CENTER">5 Cambridge Parkway</P> <P ALIGN="CENTER">Cambridge, Massachusetts</P> <BR WP="BR1"><BR WP="BR2"> <P ALIGN="CENTER">August 4 and 5, 2000</P> <BR WP="BR1"><BR WP="BR2"> <P ALIGN="CENTER"><STRONG><U>PRELIMINARY PROGRAM</U></STRONG> <FONT FACE="Times New Roman"><U></U></FONT></P> <P><FONT FACE="Univers"><STRONG><U>FRIDAY, AUGUST 4</U></STRONG></FONT><FONT FACE="Univers"><U></U></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">12:00 N Lunch</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 1:00 PM CHRISTINE JOLLS, Harvard University and NBER</FONT></P> <P><FONT FACE="Univers"> <EM>Employment Mandates and Antidiscrimination Law</EM> </FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, Princeton University and NBER</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 1:45 PM Break</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 2:00 PM <NAME>, University of Pennsylvania</FONT></P> <P><FONT FACE="Univers"> <NAME>, University of Pennsylvania and NBER</FONT></P> <P><FONT FACE="Univers"> <EM>Does Repeat Play Elicit Cooperation? Evidence from Federal Civil Litigation</EM></FONT></P> <P><FONT FACE="Univers"> </FONT></P> <P><FONT FACE="Univers"> Discussant: <NAME>, University of Southern California</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 2:45 PM <NAME>, UC, Berkeley and NBER</FONT></P> <P><FONT FACE="Univers"> <NAME>, UC, Berkeley</FONT></P> <P><FONT FACE="Univers"> <EM>The Accident Externalities from Driving</EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, Yale University</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 3:30 PM Break</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 3:45 PM KIP VISCUSI, Harvard University and NBER</FONT></P> <P><FONT FACE="Univers"> <EM><a href="http://www.nber.org/~confer/2000/si2000/viscusi.pdf">Jurors, Judges, and Mistreatment of Risk by the Courts</a></EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, Carnegie-Mellon University</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 4:30 PM <NAME> and <NAME>, University of Michigan</FONT></P> <P><FONT FACE="Univers"> <EM>Personal Bankruptcy and the Level of Entrepreneurial Activity</EM></FONT></P> <P><FONT FACE="Univers"><EM> </EM></FONT></P> <P><FONT FACE="Univers"> Discussant: <NAME>, Yale University</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 5:15 PM Adjourn</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 6:30 PM Group Dinner</FONT></P> <P><FONT FACE="Univers"> Legal Sea Foods</FONT></P> <P><FONT FACE="Univers"> 5 Cambridge Center</FONT></P> <P><FONT FACE="Univers"> Cambridge, MA</FONT></P> <P><FONT FACE="Univers"> </FONT><FONT FACE="Univers"></FONT></P> <P><FONT FACE="Univers"><STRONG>SI00 LE Program</STRONG></FONT></P> <P><FONT FACE="Univers"><STRONG>Page two</STRONG></FONT><FONT FACE="Univers"></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"><STRONG><U>SATURDAY, AUGUST 5:</U></STRONG></FONT><FONT FACE="Univers"><U></U></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 8:30 AM Coffee and Doughnuts</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 8:45 AM <NAME>, University of Pennsylvania and NBER</FONT></P> <P><FONT FACE="Univers"> <EM><a href="http://www.nber.org/~confer/2000/si2000/chang.pdf">A Liberal Theory of Social Welfare: Fairness, Utility, and the Pareto Principle</a></EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"><EM> </EM><NAME> and <NAME>, Harvard University and NBER</FONT></P> <P><FONT FACE="Univers"> <EM><a href="http://www.nber.org/~confer/2000/si2000/kaplow.pdf">Any NonWelfarist Method of Policy Assessment Violates the Pareto Principal</a></EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> (No discussant this session)</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> 9:45 AM Break</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">10:00 AM <NAME> and <NAME>, Harvard University and NBER</FONT></P> <P><FONT FACE="Univers"> <EM>Incentives for Enforcement</EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, Harvard University</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">10:45 AM <NAME>, Stanford University and NBER</FONT></P> <P><FONT FACE="Univers"> <NAME>, Harvard University</FONT></P> <P><FONT FACE="Univers"> <EM>Corruption and Optimal Law Enforcement</EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, University of Illinois</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">11:30 AM Break</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">11:45 AM <NAME>, Northwestern University and NBER</FONT></P> <P><FONT FACE="Univers"> <EM>Settlement with Multiple Plaintiffs: The Role of Insolvency</EM></FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers"> Discussant: <NAME>, Vanderbilt University</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">12:30 PM Adjourn</FONT></P> <BR WP="BR1"><BR WP="BR2"> <P><FONT FACE="Univers">4/19/00</FONT></P> </BODY> </HTML>
html
body { background-color: lightgray; } table, th, td { border: 1px solid black; border-collapse: collapse; } th, td { padding: 15px; } table { border-spacing: 5px; margin:20px } form { text-align: center; padding: 5px; } input { border: solid 5; color: black; padding: 8px 16px; margin: 2px; cursor: pointer; font-size: 20px; border-radius: 5px; } input, select, textarea { color: black; } header { text-align: center; font-size: 50px; background-color: darkblue; color: white; } header > p { text-align: center; margin: 5px; font-size: 20px; background-color: darkblue; color: white; border-bottom: solid red 5px; padding-bottom: 40px; } input { border: solid 1px gray; } button { display: inline; border: 0px; background-color: blue; padding: 2px; color: whitesmoke; border-radius: 5px; outline: none; cursor: pointer; margin-left: 5px; }
css
One month after a deadly exchange of artillery fire, the two Koreas ramped up their rhetoric, with South Korea’s president pledging unsparing retaliation if attacked again and a top North Korean official threatening a “sacred” nuclear war if provoked. South Korean troops, tanks and fighter jets put on a thundering display of force Thursday as President Lee Myung-bak visited with soldiers at a base near the border, while North Korea’s elite marked a key military anniversary by lashing out at the South for encouraging war. For both countries, the rallying cries and military maneuvers mainly seemed designed to build support at home. But they raised fears anew of all-out war on a peninsula that New Mexico Gov. Bill Richardson called a “tinderbox” after returning from a visit to the North Korean capital this week. The two Koreas and their allies called a truce in 1953 to end three years of devastating war, but violence has flared up from time to time, most recently in the disputed waters off their west coast. North Korea does not recognize the maritime line drawn by U. N. forces, and the territorial dispute in the Yellow Sea has erupted into deadly naval skirmishes. In March, a South Korean warship went down in the western waters, killing 46 sailors. And a month ago, South Korean live-fire drills in nearby waters triggered a North Korean artillery shower on Yeonpyeong Island that killed four South Koreans, the first attack on a civilian area since the Korean War. Caught by surprise, Seoul since has beefed up its rules of engagement and has staged military drills, including joint exercises with U. S. troops, meant to remind the North of its superior firepower. The South even carried out provocative artillery drills from Yeonpyeong Island on Monday in a bold dare to the North to retaliate. The drills continued Thursday, with tanks firing artillery and fighter jets dropping bombs at training grounds in Pocheon, some 20 miles (30 kilometres) from the North. The boom of cannons echoed throughout the valley and the hills erupted in smoke during the brief but dramatic exercise. There was a theatrical quality to the exercises: Dozens of excited schoolchildren in bright yellow jackets were shuttled to the site to watch from bleachers as military music blared from loudspeakers. President Lee met with troops manning a front-line army base in the east on the type of morale-boosting visit more commonly seen in the North. He vowed to retaliate if attacked again. “I had thought that we could safeguard peace if we had patience, but that wasn’t the case,” he told the troops, according to his office. Any surprise attack will be met with an “unsparing” response, he warned. After days of showing restraint, North Korea condemned the drills as a “grave military provocation. ” Defense chief Kim Yong Chun said North Korea was prepared to launch a “sacred war” and poised to use its nuclear capabilities to defend itself. Kim said in Pyongyang that the military would deal “more devastating physical blows” if its rivals violate North Korean territory by even a millimetre. He also threatened to “wipe out” South Korea and the U. S. if they start a war, the official Korean Central News Agency reported. North Korea is believed to have enough weaponised plutonium for at least a half-dozen atomic bombs, and also has revealed a uranium enrichment program that would give it a second way to make nuclear weapons. After negotiating for years with its neighbours and the U. S. on dismantling its nuclear program in exchange for aid and concessions, Pyongyang walked away from the talks in 2009. China, North Korea’s only major ally and the impoverished nation’s main benefactor, has pushed for a resumption of the disarmament talks as a vehicle for dialogue. Richardson warned in an interview with The Associated Press on Wednesday that violence could flare anew if the South continues its drills and the North abandons its stated intention of refraining from retaliation. “The situation is still a tinderbox. There’s still enormous tension, enormous mistrust and I believe diplomacy is what is needed to get us out of this tinderbox,” he said in New Mexico after returning from a private trip to Pyongyang. But he said the North Koreans “realize they went too far,” and appear willing to reach out and change. North Korea is driven not just by military zeal but also broader diplomatic, political and economic issues, according to a senior South Korean government official who spoke on condition of anonymity, citing government policy. He said the succession movement in North Korea has added an extra layer of unpredictability to its behavior, with the leadership using provocations to build solidarity and loyalty at home. Leader Kim Jong Il is believed to be grooming his young son Kim Jong Un to succeed him. The heir apparent made his formal political debut at a ruling Workers’ Party convention in September. In recent days, North Korea has turned its attention to glorifying Kim Jong Il and his “songun,” or “military-first,” policy. A rare statement sent to the AP, apparently by the North Korean Embassy in Beijing, praised Kim’s role as commander of the Korean People’s Army. “The traditional single-hearted unity of the Korean society in which all the people are united around their leader in one ideology, mind and purpose has entered a new, higher stage of development under Kim Jong Il’s songun—based leadership,” it said. North Koreans will be marking Kim’s appointment to supreme commander on Friday, a day that is also celebrated by many Christians in South Korea as Christmas Eve.
english
{ "directions": [ "Combine black beans, corn, green chilies, tomatoes, green onions, shrimp, and cilantro in a large bowl; stir. Combine remaining ingredients and stir well.", "Stir in olive oil, cumin, garlic powder, lime juice, and hot sauce. Best refrigerated 2 hours before serving." ], "ingredients": [ "1 (15 ounce) can black beans, rinsed and drained", "1 cup frozen corn, thawed", "1 (4 ounce) can chopped green chilies", "1 (14.5 ounce) can Hunt's\u00ae Diced Tomatoes, drained", "6 green onions, chopped", "1 (12 ounce) package frozen small cooked shrimp, thawed", "2 tablespoons chopped fresh cilantro", "1/2 teaspoon cumin", "1/8 teaspoon garlic powder", "1 lime, juiced", "1 tablespoon olive oil", "Hot sauce to taste", "Salt and pepper to taste (optional)" ], "language": "en-US", "source": "allrecipes.com", "tags": [], "title": "Black Bean, Corn, Tomato, and Shrimp Salad", "url": "http://allrecipes.com/recipe/256312/black-bean-corn-tomato-and-shrimp/" }
json
html { box-sizing: border-box; } *, *:before, *:after { box-sizing: inherit; } body { margin: 0; font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; font-size: 1rem; font-weight: 400; line-height: 1.5; color: #212529; background-color: #fff; } .form-signin, .form-signup { width: 100%; max-width: 350px; padding: 15px; margin: auto; } .rate { float: left; height: 46px; padding: 0 10px; } .rate:not(:checked)>input { position: absolute; top: -9999px; } .rate:not(:checked)>label { float: right; width: 1em; overflow: hidden; white-space: nowrap; cursor: pointer; font-size: 30px; color: #ccc; } .rate:not(:checked)>label:before { content: '★ '; } .rate>input:checked~label { color: #ffc700; } .rate:not(:checked)>label:hover, .rate:not(:checked)>label:hover~label { color: #deb217; } .rate>input:checked+label:hover, .rate>input:checked+label:hover~label, .rate>input:checked~label:hover, .rate>input:checked~label:hover~label, .rate>label:hover~input:checked~label { color: #c59b08; } .pets { background-image: url(https://cdn.ttgtmedia.com/visuals/ComputerWeekly/Hero%20Images/pets-cats-dogs-adobe-hero.jpg); background-repeat: no-repeat; background-position: center; background-size: cover; display: flex; justify-content: center; align-items: center; padding: 50px 0; width: 100%; margin: 0 auto; } .pets figcaption { padding: 5px; background-color: #ffffffc0; } .container { width: 80vw; margin: 0 auto; } header { text-align: center; padding: .5rem 0; color: black; font-size: 1.2rem; } header>.container { display: flex; align-items: center; justify-content: space-between; } .navbar-brand { padding-top: .3125rem; padding-bottom: .3125rem; margin-right: 1rem; font-size: 1.3rem; text-decoration: none; white-space: nowrap; color: black; } .navbar-nav { margin: 0; display: flex; align-items: center; flex-direction: row; padding-left: 0; margin-bottom: 0; list-style: none; } .nav-item { padding: 10px 10px; } .nav-link { color: black; text-decoration: none; } .album { display: flex; flex-wrap: wrap; justify-content: center; } .album>.card, .list>.card { width: 350px; margin: 10px; } .card>.list { margin: 10px; display: flex; justify-content: center; flex-wrap: wrap; } .card { border: 1px solid rgba(0, 0, 0, .125); border-radius: .25rem; } .card.card-horizontal { display: flex; flex-direction: row; } .card>img { border-top-left-radius: .25rem; border-top-right-radius: .25rem; object-fit: cover; width: 100%; height: 225px; } .card.card-horizontal>.card-image img { width: 300px; height: auto; } .card>form { padding: 10px; } .card-header { padding: .5rem 1rem; margin-bottom: 0; background-color: rgba(0, 0, 0, .03); border-bottom: 1px solid rgba(0, 0, 0, .125); display: flex; align-items: center; justify-content: space-between; } .card-footer { padding: .5rem 1rem; background-color: rgba(0, 0, 0, .03); border-top: 1px solid rgba(0, 0, 0, .125); } .card-body { padding: 1rem; } form.filter { display: flex; } .btn { display: inline-block; font-weight: 400; line-height: 1.5; color: #212529; text-align: center; text-decoration: none; vertical-align: middle; cursor: pointer; user-select: none; background-color: transparent; border: 1px solid transparent; padding: .375rem .75rem; font-size: 1rem; border-radius: 25px; transition: color .15s ease-in-out, background-color .15s ease-in-out, border-color .15s ease-in-out, box-shadow .15s ease-in-out; } .btn-primary { color: #fff; background-color: #0d6efd; border-color: #0d6efd; } .btn-primary:hover { color: #fff; background-color: #0b5ed7; border-color: #0a58ca; } .btn-danger { color: #fff; background-color: #dc3545; border-color: #dc3545; } .btn-danger:hover { color: #fff; background-color: #bb2d3b; border-color: #b02a37; } .btn-outline-secondary { color: #6c757d; border-color: #6c757d; } .btn-success { color: #fff; background-color: #198754; border-color: #198754; } .btn-success:hover { color: #fff; background-color: #157347; border-color: #146c43; } .login-page, .register-page { display: flex; align-items: center; height: 100vh; } .form-control { display: block; width: 100%; padding: .375rem .75rem; font-size: 1rem; font-weight: 400; color: #212529; background-color: #fff; background-clip: padding-box; border: 1px solid #ced4da; border-radius: .25rem; transition: border-color .15s ease-in-out, box-shadow .15s ease-in-out; } .form-signin>div, .form-signup>div { margin-bottom: 10px; } .text-center { text-align: center; } .form-signin .btn[type="submit"], .form-signup .btn[type="submit"] { width: 100%; } .alert-danger { color: #842029; background-color: #f8d7da; border-color: #f5c2c7; } .alert { position: relative; padding: 1rem 1rem; margin-bottom: 1rem; border: 1px solid transparent; border-radius: .25rem; } .mb-3 { margin-bottom: 10px; } .mb-5 { margin-bottom: 20px; } .list-group { display: flex; flex-direction: column; padding-left: 0; margin-bottom: 0; border-radius: .25rem; } .list-group-horizontal { flex-direction: row; overflow-x: scroll; } .list-group-item { position: relative; display: block; padding: .5rem 1rem; color: #212529; text-decoration: none; background-color: #fff; border: 1px solid rgba(0, 0, 0, .125); } .comment-header { display: flex; align-items: center; /* justify-content: space-between; */ } .comment-header-data { display: flex; flex-direction: column; margin-right: auto; } ul.pagination { list-style: none; display: flex; } ul>li { padding: 10px; }
css
<reponame>maddrag0n/uidesign-toolkit { "name": "ntcjs", "version": "1.1.2", "description": "A Node CommonJS compatible wrapper for the Name That Color library (ntc js) - http://chir.ag/projects/ntc/", "main": "./src/index.js", "scripts": { "test": "tape ./test/*.js" }, "repository": { "type": "git", "url": "git+https://github.com/pbojinov/name-that-color.git" }, "keywords": ["name", "that", "color", "hex", "rgb", "hue", "ntc", "ntc.js"], "author": "<NAME> (<EMAIL>)", "license": "MIT", "bugs": { "url": "https://github.com/pbojinov/name-that-color/issues" }, "homepage": "https://github.com/pbojinov/name-that-color#readme", "devDependencies": { "tap": "^10.7.1", "tap-spec": "^4.1.1", "tape": "^4.8.0" } }
json
To: Members of the Council, Dear Member, We are pleased to inform you that SRTEPC will be having a booth at “Technotex 2023” scheduled to be held at the Bombay Exhibition Centre, Mumbai from February 22 to 24,2023 . “Technotex 2023”is an exhibition for Technical Textiles Products being organized by the Ministry of Textiles in association with FICCI. Over 150 foreign buyers from about 30 countries including the US , Germany , Japan, South Korea etc are expected to visit “Technotex 2023” . Considering this , SRTEPC will provide an opportunity for its members to display samples of Technical Textiles products at its booth , where foreign buyers will be visiting in large numbers . Further , since SRTEPC have been assigned the role of export promotion of Technical Textiles , many Associations / Trade Bodies / buying agents located overseas are expected to visit SRTEPC booth , in addition to the buyers. In view of the above , we would like to invite you to display samples of Technical Textiles products of your Company at SRTEPC booth . Your products may attract the attention of the foreign buyers / buying agents which may translate into export orders. You may also send us your Brochures / Visiting Cards with QR Codes for display at SRTEPC booth. Any enquiry received for your products will be communicated to you so that you can take it forward. The charges for Display of Samples - ( maximum 4 ) is Rs. 25000 /- and Rs. 10000/- for Display of Brochures and Visiting cards . For payment of fees , cheque should be made in favour of “The Synthetic & Rayon Textiles Export Promotion Council” . For online payments , bank details are as follows : Please Note that the Samples / Brochures should reach the Head office of SRTEPC located in Mumbai positively on or before February 21, 2023 at 5.00 p.m. In case you need any further details or clarification please get in touch with Mr. Aishwary Jaiswal, Deputy Director (Cell : +91 91180 23531)
english
;(function ( $, window, document, undefined ) { $.fn.mapit = function (options) { var exp; var settings = $.extend({ mapurl: 'https://maps.google.co.uk/maps?q=', tooltip: ' - Google maps', newWindow: true, country: 'uk', exp: '' }, options); return this.each(function () { switch(settings.country) { case 'uk': exp = '(([gG][iI][rR] {0,}0[aA]{2})|((([a-pr-uwyzA-PR-UWYZ][a-hk-yA-HK-Y]?[0-9][0-9]?)|(([a-pr-uwyzA-PR-UWYZ][0-9][a-hjkstuwA-HJKSTUW])|([a-pr-uwyzA-PR-UWYZ][a-hk-yA-HK-Y][0-9][abehmnprv-yABEHMNPRV-Y]))) {0,}[0-9][abd-hjlnp-uw-zABD-HJLNP-UW-Z]{2}))' break; case 'us': exp = '(\\d{5}?)'; break; case 'canada': exp = '([ABCEGHJKLMNPRSTVXY]{1}\d{1}[A-Z]{1} *\\d{1}[A-Z]{1}\\d{1})'; break; default: exp = settings.exp; } $(this).html($(this).html().replace(new RegExp(exp, 'gi'),'<a href="' + settings.mapurl + '$1" ' + (settings.newWindow ? ' target="_blank"' : '') + ' class="mapit" title="$1' + settings.tooltip + '">$1</a>')); }); } }(jQuery));
javascript
In this Radio Jockey film, Arjun Nandakumar, Nimisha Suresh played the primary leads. The Radio Jockey was released in theaters on 20 Sep 2013. Movies like Jeenthoal, Madhura Manohara Moham, Anveshippin Kandethum and others in a similar vein had the same genre but quite different stories. The soundtracks and background music were composed by Jayachandran for the movie Radio Jockey. The movie Radio Jockey belonged to the Drama, genre.
english
package com.facepp.demo.decoder; import android.media.MediaCodec; import android.media.MediaExtractor; import android.media.MediaFormat; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.ByteBuffer; /** * see : https://bigflake.com/mediacodec/ExtractMpegFramesTest_egl14.java.txt * see : https://stackoverflow.com/questions/5960247/convert-bitmap-array-to-yuv-ycbcr-nv21 * see : https://www.jianshu.com/p/4a23f48facfb * * Created by mrsimple on 13/3/2019 */ public class TestDecoder { private static final String TAG = "TestDecoder"; private static final boolean VERBOSE = true; // lots of logging // where to find files (note: requires WRITE_EXTERNAL_STORAGE permission) private static final File FILES_DIR = Environment.getExternalStorageDirectory(); private static final int MAX_FRAMES = 50; // stop extracting after this many /** * Tests extraction from an MP4 to a series of PNG files. * <p> * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the * video with the GPU. If the input video has a different aspect ratio, we could preserve * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if * you're extracting frames you don't want black bars. */ public void extractVideoFrames(File inputFile) throws IOException { MediaCodec decoder = null; CodecOutputSurface outputSurface = null; MediaExtractor extractor = null; int saveWidth = 640; int saveHeight = 480; try { // The MediaExtractor error messages aren't very useful. Check to see if the input // file exists so we can throw a better one if it's not there. if (!inputFile.canRead()) { throw new FileNotFoundException("Unable to read " + inputFile); } extractor = new MediaExtractor(); extractor.setDataSource(inputFile.toString()); int trackIndex = selectTrack(extractor); if (trackIndex < 0) { throw new RuntimeException("No video track found in " + inputFile); } extractor.selectTrack(trackIndex); MediaFormat format = extractor.getTrackFormat(trackIndex); saveWidth = format.getInteger(MediaFormat.KEY_WIDTH) ; saveHeight = format.getInteger(MediaFormat.KEY_HEIGHT) ; if (VERBOSE) { Log.d(TAG, "Video size is " + format.getInteger(MediaFormat.KEY_WIDTH) + "x" + format.getInteger(MediaFormat.KEY_HEIGHT)); } // Could use width/height from the MediaFormat to get full-size frames. outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.getString(MediaFormat.KEY_MIME); decoder = MediaCodec.createDecoderByType(mime); decoder.configure(format, outputSurface.getSurface(), null, 0); decoder.start(); doExtract(extractor, trackIndex, decoder, outputSurface); } finally { // release everything we grabbed if (outputSurface != null) { outputSurface.release(); outputSurface = null; } if (decoder != null) { decoder.stop(); decoder.release(); decoder = null; } if (extractor != null) { extractor.release(); extractor = null; } } } /** * Selects the video track, if any. * * @return the track index, or -1 if no video track is found. */ private int selectTrack(MediaExtractor extractor) { // Select the first video track we find, ignore the rest. int numTracks = extractor.getTrackCount(); for (int i = 0; i < numTracks; i++) { MediaFormat format = extractor.getTrackFormat(i); String mime = format.getString(MediaFormat.KEY_MIME); if (mime.startsWith("video/")) { if (VERBOSE) { Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format); } return i; } } return -1; } /** * Work loop. */ static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder, CodecOutputSurface outputSurface) throws IOException { final int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int decodeCount = 0; long frameSaveTime = 0; boolean outputDone = false; boolean inputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; // Read the sample data into the ByteBuffer. This neither respects nor // updates inputBuf's position, limit, etc. int chunkSize = extractor.readSampleData(inputBuf, 0); if (chunkSize < 0) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS"); } else { if (extractor.getSampleTrackIndex() != trackIndex) { Log.w(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + trackIndex); } long presentationTimeUs = extractor.getSampleTime(); decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeUs, 0 /*flags*/); if (VERBOSE) { Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" + chunkSize); } inputChunk++; extractor.advance(); } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } if (!outputDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not important for us, since we're using Surface if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { Log.e("", "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount); outputSurface.awaitNewImage(); outputSurface.drawImage(true); if (decodeCount < MAX_FRAMES) { File outputFile = new File(FILES_DIR, String.format("frame-%02d.png", decodeCount)); long startWhen = System.nanoTime(); // save frame to file outputSurface.saveFrame(outputFile.toString()); Log.e("", "### save frame " + outputFile.getAbsolutePath()) ; frameSaveTime += System.nanoTime() - startWhen; } decodeCount++; } } } } int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount; Log.d(TAG, "### Saving " + numSaved + " frames took " + (frameSaveTime / numSaved / 1000) + " us per frame"); } }
java
<ul class="doughnut-legend"> {% for c in tracker_categories %} <li> {% include 'components/trackers/category.html' %} </li> {% endfor %} </ul>
html
Spoilers for ‘Game of Thrones’ season 8 episode 4. “We don’t have time for all this.” So said Bran Stark (Isaac Hempstead Wright) in the first episode of the eighth and final season of Game of Thrones. And it is as Bran decreed, with plot developments coming in thick and fast in the fourth round that was aired in India on Hotstar on Monday. Is Cersei Lannister (Lena Headey) a bigger threat than the Night King? The HBO series, which is in firm death throes mode, gave its reply in The Last of the Starks, written by showrunners DB Weiss and David Benioff and directed by David Nutter. Some scenes were leaked in advance on the internet, indicating the hunger for updates and the need to find out how it all ends and move on to the next pop culture juggernaut. The Last of the Starks explores the aftermath of the Battle of Winterfell, in which Arya Stark (Maisie Williams) slays the Night King and vanquishes the White Walkers. Sombre farewells are bid to the departed, including Theon Grejoy, Dolorous Edd, Jorah Mormont and Lyannna Mormont. The nagging question of the true occupant of the Iron Throne returns to the table as the survivors celebrate their victory and drink themselves silly. “We may have defeated them, but we still have us to contend with,” Tyrion Lannister (Peter Dinklage) wryly tells Davos Seaworth (Liam Cunningham). Back at the Westeros capital King’s Landing, Cersei Lannister (Lena Headey) lies in wait with her enforcer Qyburn (Anton Lesser) and ambitious pirate Euron Greyjoy (Pilou Asbaek), secure in the knowledge that she faces a depleted and exhausted army. Despite positioning the White Walkers as the biggest threat to humanity right from the inaugural episode in 2011, DB Weiss and David Benioff have recently shifted back the focus to the power struggle for the Iron Throne, currently occupied by Cersei. The act of tying together complicated and interlocking strands has nudged the series based on George RR Martin’s A Song of Ice and Fire novels towards a much-needed conclusion. The end had to come, valar morghulis and all, but the speed with which major plot turns are being lobbed has led to clumsy contrivances and short-cuts, such as the overnight destruction of the White Walkers, the tensions between twins-and-lovers Cersei and Jaime (Nikolaj Coster-Waldau) – even though Jaime pledged his fealty to Cersei not too long ago – and the awkward coupling of Jon Snow (Kit Harington) and Daenerys Targaryen (Emilia Clarke). The elephant in the room about Jon’s secret – that he is actually half Targaryen and Stark, Daenerys’s nephew and the “true heir” to the Iron Throne – finally got an inelegant walkaround. Did it work? There are only two more episodes to go, so it will have to do. Much of The Last of the Starks was about treading water, with the better scenes serving as reminders of the reasons Game of Thrones became a global phenomenon in the first place: the complex character studies, crisply written conversations, and the ability to up-end expectations. Among the better moments was the evolving relationship between former adversaries Arya and Sandor Clegane (Rory McCann) and the suggestion that the ruthless former bodyguard is among the wisest and most humane characters in the series. One of the most powerful scenes came in late, and took us back to Daenerys’s early campaigns to marshall an army for her conquest. Apart from walking through fire and nurturing dragons, this involved a mastery over languages and the ability to reveal a secret at just the right moment (unlike some others in The Last of the Starks, who behaved as though they were at a school reunion). The endgame is clearly in sight: another big, and final, battle, which will consume King’s Landing and dispense with, in short order, the Jon-Daenery question and the fate of the Lannisters. Generations will remember the ones who laid down their lives in the Battle of Winterfell, Jon Snow declared during the mass funerals in The Last of the Starks. As Game of Thrones winds down, its fans too will not be able to forget just how messy endings can get. Also read:
english
The Central council of the All India Chess Federation decided to review the Rules governing the selection of Indian teams for the Olympiad / World / Asian team championships as the Rules in existence were made eight years ago. A sub committee was formed under the Chairmanship of the President, AICF which met on 02.10.2017 at Chennai and the following decisions were taken. It has been decided to select Indian Men and Women team for Olympiad/World/Asian Team Championships on the basis of ELO rating. However the National Champion will get an added a bonus of 50 rating points and Women Champion 25 rating points. Consequentially, the existing Rule of awarding 75 ELO points for players who participate in the National Challengers/Premier and National Women Challengers/Premier stands withdrawn with immediate effect and as such no ELO points will be added. For the purpose of calculating the rating, three months average rating of the player prior to the selection month will be taken in to account. The teams will be selected 3 months prior to the scheduled month of event. For example, team for 2018 World Chess Olympiad which is scheduled in the month of September 2018 at Georgia will be selected in the month of June 2018 considering the rating average of FRLs of March 2018, April 2018 and May 2018. The same criteria will be applied for World and Asian Team championships. However, if the team earned the right to play World Team Championship based on the performance in World Chess Olympiad, then the same members of the team who participated in the Olympiad will be selected for World Team Championship. In case player from the Olympiad team is not available to play the World Championship due to any reason, the player(s) with highest rating(s) will be offered the empty slot by using the rating criteria. While determining the rating, the three months average rating of the player will be taken into consideration.
english
Bharti Airtel on Saturday said it has received financing commitments of up to USD 2. 5 billion from China Development Bank and Industrial and Commercial Bank of China, with an average maturity of about nine years. The company signed the term sheets for the financing options in the presence of Prime Minister Narendra Modi, who is currently on an official visit to China. Airtel said the funds will help the company further diversify its global financing pool and invest in growth of data networks. “China Development Bank has committed financing of up to USD 2 billion with an average maturity of about nine years,” Airtel said. This is the “single largest bilateral commitment by China Development Bank to any telecom operator globally and the largest bilateral commitment to a private Indian company”. “Industrial and Commercial Bank of China has committed USD 500 million with an average life of about nine years,” Airtel added. Subject to final agreements and requisite approvals, Bharti Airtel can draw these funds over a long availability period, depending on its financing requirements. In March 2015, Bharti Airtel announced a strategic collaboration with China Mobile, under which the two companies will work towards growth of the Long-Term Evolution (LTE) ecosystem and evolving mobile technology standards. Bharti Airtel also has network equipment sourcing agreements with ZTE and Huawei.
english
package util_test import ( "fmt" "os" "os/user" "path/filepath" . "github.com/onsi/ginkgo/extensions/table" . "github.com/onsi/gomega" "github.com/werf/werf/pkg/util" ) type entry struct { path string expectedPathFormat string } var _ = DescribeTable("expand path", func(e entry) { usr, err := user.Current() Ω(err).ShouldNot(HaveOccurred()) wd, err := os.Getwd() Ω(err).ShouldNot(HaveOccurred()) expectedPath := fmt.Sprintf(e.expectedPathFormat, usr.HomeDir, wd) Ω(util.ExpandPath(filepath.FromSlash(e.path))).Should(Equal(filepath.FromSlash(expectedPath))) }, Entry("~", entry{ path: "~", expectedPathFormat: "%[1]s", }), Entry("~/", entry{ path: "~/", expectedPathFormat: "%[1]s", }), Entry("~/path", entry{ path: "~/path", expectedPathFormat: "%[1]s/path", }), Entry("path", entry{ path: "path", expectedPathFormat: "%[2]s/path", }), Entry("path1/../path2", entry{ path: "path1/../path2", expectedPathFormat: "%[2]s/path2", }), )
go
{ "description": "Cookies are files stored locally in your web browser containing identifiable information.", "id": "cookies", "meta": { "license-for-this-file": "This work is licensed under the Creative Commons Attribution-ShareAlike 3.0 Unported License. To view a copy of this license, visit http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to Creative Commons, 444 Castro Street, Suite 900, Mountain View, California, 94041, .USA", "spec-version": "1.1" }, "name": "Cookies", "parent": "track", "subtitle": "and related technologies", "title": "Cookies", "type": "topic" }
json
import numpy as np import torch import anndata from celligner2.othermodels.trvae.trvae import trVAE from celligner2.trainers.trvae.unsupervised import trVAETrainer def trvae_operate( network: trVAE, data: anndata, condition_key: str = None, size_factor_key: str = None, n_epochs: int = 20, freeze: bool = True, freeze_expression: bool = True, remove_dropout: bool = True, ) -> [trVAE, trVAETrainer]: """Transfer Learning function for new data. Uses old trained Network and expands it for new conditions. Parameters ---------- network: trVAE A scNet model object. data: Anndata Query anndata object. condition_key: String Key where the conditions in the data can be found. size_factor_key: String Key where the size_factors in the data can be found. n_epochs: Integer Number of epochs for training the network on query data. freeze: Boolean If 'True' freezes every part of the network except the first layers of encoder/decoder. freeze_expression: Boolean If 'True' freeze every weight in first layers except the condition weights. remove_dropout: Boolean If 'True' remove Dropout for Transfer Learning. Returns ------- new_network: trVAE Newly network that got trained on query data. new_trainer: trVAETrainer Trainer for the newly network. """ conditions = network.conditions new_conditions = [] data_conditions = data.obs[condition_key].unique().tolist() # Check if new conditions are already known for item in data_conditions: if item not in conditions: new_conditions.append(item) n_new_conditions = len(new_conditions) # Add new conditions to overall conditions for condition in new_conditions: conditions.append(condition) # Update DR Rate new_dr = network.dr_rate if remove_dropout: new_dr = 0.0 print("Surgery to get new Network...") new_network = trVAE( network.input_dim, conditions=conditions, hidden_layer_sizes=network.hidden_layer_sizes, latent_dim=network.latent_dim, dr_rate=new_dr, use_mmd=network.use_mmd, mmd_boundary=network.mmd_boundary, recon_loss=network.recon_loss, ) # Expand First Layer weights of encoder/decoder of old network by new conditions encoder_input_weights = network.encoder.FC.L0.cond_L.weight to_be_added_encoder_input_weights = np.random.randn(encoder_input_weights.size()[0], n_new_conditions) * np.sqrt( 2 / (encoder_input_weights.size()[0] + 1 + encoder_input_weights.size()[1])) to_be_added_encoder_input_weights = torch.from_numpy(to_be_added_encoder_input_weights).float().to(network.device) network.encoder.FC.L0.cond_L.weight.data = torch.cat((encoder_input_weights, to_be_added_encoder_input_weights), 1) decoder_input_weights = network.decoder.FirstL.L0.cond_L.weight to_be_added_decoder_input_weights = np.random.randn(decoder_input_weights.size()[0], n_new_conditions) * np.sqrt( 2 / (decoder_input_weights.size()[0] + 1 + decoder_input_weights.size()[1])) to_be_added_decoder_input_weights = torch.from_numpy(to_be_added_decoder_input_weights).float().to(network.device) network.decoder.FirstL.L0.cond_L.weight.data = torch.cat((decoder_input_weights, to_be_added_decoder_input_weights), 1) # Set the weights of new network to old network weights new_network.load_state_dict(network.state_dict()) # Freeze parts of the network if freeze: new_network.freeze = True for name, p in new_network.named_parameters(): p.requires_grad = False if freeze_expression: if 'cond_L.weight' in name: p.requires_grad = True else: if "L0" in name or "B0" in name: p.requires_grad = True new_trainer = trVAETrainer( new_network, data, condition_key=condition_key, size_factor_key=size_factor_key, batch_size=1024, n_samples=4096 ) new_trainer.train( n_epochs=n_epochs, lr=0.001 ) return new_network, new_trainer
python
Microsoft is doing a lot of things right with Bing, the new search engine it debuted in late May. For one, the engine works quite well, including a number of subtle features like video previews that Google doesn’t have (yes, some of the improvements were introduced in Live Search, but that doesn’t mean they aren’t cool). Perhaps more important, the company is doing a good job with its marketing efforts, shedding its corporate monolith culture in favor of something that’s decidedly more.. Googleish. Today Microsoft is launching a fun little contest for Bing that invites you to sing a a jingle about Bing (no more than 5 minutes long) and upload it on Bing’s YouTube Account (another good move here Microsoft — no need to make people upload through a proprietary site). You have up until July 31 to submit videos, and voting will commence today and continue through August 5. Be sure to watch the video above of some Bing interns introducing the concept (I’m currently harassing the TechCrunch interns to make one of our own). This kind of contest isn’t particularly novel — it just isn’t something you’d typically associate with Microsoft. Last month Google launched a contest inviting users to make a video about Google’s Chrome icon, which most people still don’t recognize. It’s no wonder why they’re keen to increase Chrome’s brand awareness: it’s going to be a full-fledged operating system soon. You can find full details on the Bing contest here.
english
Popular TV actor Arjun Bijlani will soon be seen hosting the talent-based reality show 'India's Got Talent'. He is taking it as a huge responsibility and considers his primary role to motivate the contestants and introduce them to the judges. Arjun says: "I have a huge responsibility on my shoulders when it comes to maintaining the standard of the show. My primary role will be to introduce the aspiring contestants to the judges and in my own way, keep their spirits and motivation levels high. To put it simply, I will be a moderator of moods and confidence and I am glad to be doing this. "He explains a few points that makes this reality show different from others and says: "It's not restricted to a certain set of talent only - therefore it offers a lot of variety and there is always an element of surprise. You don't know what the next contestant is going to come up with. "Arjun is not only an actor but also hosted several reality shows like 'Dance Deewane' and others, when asked what he enjoys most, he replies his first love will always be acting but he enjoys doing both. "My first love is acting but I also enjoy hosting. When I started hosting, my fans and viewers showered a lot of praise and that is what kept me motivated to take up more shows as a host. So yes, I balance both and so far, I am enjoying the place I am in," he adds. The talent-based reality show 'India's Got Talent' will be judged by theatre, film and television personality Kirron Kher, Bollywood actress Shilpa Shetty, rapper Badshah, and lyricist, poet and screenwriter Manoj Muntashir. 'India's Got Talent' will start from January 15 at 8 p. m. on Sony Entertainment Television.
english
<reponame>john-hu/react-designpane<filename>example/src/index.css * { box-sizing: border-box; } body, html { margin: 0; padding: 0; font-family: sans-serif; width: 100%; height: 100%; background-color: gray; } .design-pane { margin: 20px 200px; border: solid 4px black; border-radius: 4px; background-color: white; width: calc(100% - 400px); height: calc(100% - 40px); }
css
Having already retained a settled core of players, the Chennai Super Kings went into the 2019 IPL auctions with limited requirements, buying only two players to complete their roster. One of them was Mohit Sharma, who returns to the franchise after three seasons. The 30-year-old pacer's first stint with the Super Kings was from 2013-15, including the 2014 season where he was the Purple Cap winner. At the sidelines of the Junior Super Kings T20 Tournament 2018 trophy and jersey unveiling, Mohit Sharma spoke to the media about his return to CSK and how he had missed the franchise during his time away. How does it feel to be back at Chennai Super Kings? What is so special about the team? It obviously feels good. It always feels good when you return home, whether you're coming from office or coming back from travelling the world. It feels relaxing. The bond and the culture that CSK has developed, you don't get it so easily anywhere else. It's a family culture. It's not just in the players, you see it in the staff, and in the fans, those who support Chennai. I played elsewhere for 3 years, but this was always said about me - "Mohit Sharma, CSK player". I love that, the feeling is really good. The last time you played for CSK, you got a place in the Indian team and had a good World Cup (2015). Do you see this as a chance to get back into the national side? To be very honest, I am not thinking that much. Right now, I am focussing on the process. I have been working on my fitness, I'll see what happens after that. I haven't thought that far ahead. I'll keep ensuring that I contribute for CSK to the best of my abilities, and help the franchise as much as I can. You are known for your variations, especially in the death overs. Have you worked on new variations? When it comes to bowling, I have always maintained that the simpler you keep it, the better it is. All bowlers have variations. Especially with T20s around, the bowlers are using their brains even more. Personally, when it comes to variations, it matters when and how you use them, and against whom. That should be your strength. I believe that whatever skills you have, you sharpen them and based on the situation, figure out where and what to use. Were you expecting CSK to bid for you again, that too for such a big price (Rs 5 crores)? Did you miss CSK and MS Dhoni? If I say I was not missing CSK, I would be lying big time! Yes, I have missed CSK a lot. I have missed being part of CSK - even when I played elsewhere for 3 years, I used to think it would have been better for me to have been playing at CSK. The environment creates a lot of difference. No doubt, the MS Dhoni factor is always there. How well he understands you, how he handles you, that matters a lot. I recently said this: On the cricketing field, if there's someone who has taught me how to walk, it has been Mahendra Singh Dhoni. A lot of credit goes to Mahi bhai. The IPL might be held outside India. Are you mentally prepared for it? Once it gets confirmed, I'll prepare for it. Compared to previous seasons, CSK has a new bowling attack now. How are you preparing yourself, given the kind of competition in the team? It is important to develop the culture of healthy competition. It has already been developed at CSK. All of us are professional cricketers, we've all played alongside each other. Be it Deepak (Chahar) or Shardul (Thakur), we've played so much cricket together that we know each other in and out. So, there's no difficulty in bonding and it helps a lot because of the culture developed at CSK, be it with the staff or the players. It doesn't let you slip into bad situations, which is the best thing.
english
<reponame>farwish/accountserve-go package service import ( "github.com/stretchr/testify/assert" "testing" ) // cd ./app/service && go test func TestDataGoCacheApply(t *testing.T) { assert.Equal(t, 0, 0) } func TestUriFilterExcludeQueryString(t *testing.T) { uris := []string{ "http://www.example.com/?a=b&c=d", "http://www.example.com?a=b&c=d", } var cleanUri string for _, uri := range uris { cleanUri = UriFilterExcludeQueryString(uri) assert.Equal(t, "http://www.example.com", cleanUri) } } func TestRightPageNumber(t *testing.T) { var pageNumberMax = 100 assert.Equal(t, 1, RightPageNumber(-1, pageNumberMax)) assert.Equal(t, 1, RightPageNumber(0, pageNumberMax)) assert.Equal(t, 2, RightPageNumber(2, pageNumberMax)) assert.Equal(t, 20, RightPageNumber(20, pageNumberMax)) assert.Equal(t, 100, RightPageNumber(200, pageNumberMax)) } func TestSegmentCutSearchMode(t *testing.T) { seg := SegmentCutSearchMode("釜山行") assert.Equal(t, seg[0], "釜山") assert.Equal(t, seg[1], "行") }
go
<reponame>bienvenuelisis/noir-meilleur-jekyll { "seller_id": "pub-4997176462744877", "is_confidential": false, "seller_type": "PUBLISHER", "name": "<NAME>", "domain": "noirmeilleur.com" }
json
SAN JOSE, Calif., Jan. 05, 2023 (GLOBE NEWSWIRE) — Zoom Video Communications, Inc. (NASDAQ: ZM) announced that it has appointed Cindy Hoots, Chief Digital Officer and CIO of AstraZeneca, as an independent director on Zoom’s Board of Directors effective immediately. Ms. Hoots has served as the Chief Digital Officer and Chief Information Officer at AstraZeneca PLC, a pharmaceutical company, since January 2020. From January 2018 to December 2019, she served as Global Vice President of Technology of Unilever PLC, a multinational consumer goods company. Prior to joining Unilever, Ms. Hoots served as Vice President of Next Generation Products, Commercial, and Digital Transformation at British American Tobacco plc from 2016 to 2018. She also spent 16 years at Mars, Incorporated. Ms. Hoots received a B.S. in Computer Information Systems from DeVry Institute of Technology. Zoom is for you. Zoom is a space where you can connect to others, share ideas, make plans, and build toward a future limited only by your imagination. Our frictionless communications platform is the only one that started with video as its foundation, and we have set the standard for innovation ever since. That is why we are an intuitive, scalable, and secure choice for individuals, small businesses, and large enterprises alike. Founded in 2011, Zoom is publicly traded (NASDAQ: ZM) and headquartered in San Jose, California. Visit zoom.com and follow @zoom.
english
<gh_stars>0 import React from 'react' import PropTypes from 'prop-types' import { graphql } from 'gatsby' import Helmet from 'react-helmet' import Layout from '../components/Layout' import Content, { HTMLContent } from '../components/Content' export const HomePageTemplate = ({ title, description, youtubeId, content, contentComponent, }) => { const PageContent = contentComponent || Content const src = `https://www.youtube.com/embed/${youtubeId}?rel=0&iv_load_policy=3&playsinline=1&modestbranding=1&color=white&showinfo=0&origin=https%3A%2F%2Fwww.covenantchristianwv.com` return ( <section className="section"> <Helmet> <meta name="description" content={description}></meta> </Helmet> <div className="Hero Hero--home"> <h1 className="Hero-title">{title}</h1> </div> <div className="container"> <div className="grid-x"> <div className="mediumlarge-10 mediumlarge-offset-1"> <div className="section"> {youtubeId && ( <header className="responsive-embed widescreen"> <iframe title="CCS Introduction" width="560" height="315" src={src} frameBorder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowFullScreen /> </header> )} <PageContent className="content" content={content} /> </div> </div> </div> </div> </section> ) } HomePageTemplate.propTypes = HomePageTemplate.propTypes = { title: PropTypes.string.isRequired, description: PropTypes.string, youtubeId: PropTypes.string, content: PropTypes.string, contentComponent: PropTypes.func, } const HomePage = ({ data }) => { const { markdownRemark: post } = data return ( <Layout> <HomePageTemplate contentComponent={HTMLContent} title={post.frontmatter.title} description={post.frontmatter.description} youtubeId={post.frontmatter.youtubeId} content={post.html} /> </Layout> ) } HomePage.propTypes = { data: PropTypes.object.isRequired, } export default HomePage export const homePageQuery = graphql` query HomePage($id: String!) { markdownRemark(id: { eq: $id }) { html frontmatter { title description youtubeId } } } `
javascript
--- title: Configurer les paramètres d’exportation dans API Azure pour FHIR description: Cet article décrit comment configurer les paramètres d’exportation dans API Azure pour FHIR author: matjazl ms.service: healthcare-apis ms.subservice: fhir ms.topic: reference ms.date: 3/5/2020 ms.author: matjazl ms.openlocfilehash: 46a55b83b38593a514d40a9f75d99739a1efb8a2 ms.sourcegitcommit: 877491bd46921c11dd478bd25fc718ceee2dcc08 ms.translationtype: HT ms.contentlocale: fr-FR ms.lasthandoff: 07/02/2020 ms.locfileid: "84871019" --- # <a name="configure-export-setting-and-export-the-data-to-a-storage-account"></a>Configurer le paramètre d’exportation et exporter les données vers un compte de stockage API Azure pour FHIR prend en charge la commande $export qui vous permet d’exporter les données d’un compte API Azure pour FHIR vers un compte de stockage. Quatre étapes sont nécessaires pour effectuer l’exportation dans API Azure pour FHIR : 1. Activer l’identité managée sur le service API Azure pour FHIR 2. Créer un compte de stockage Azure (si vous ne l’avez pas déjà fait) et attribuer au compte de stockage une autorisation d’accès à API Azure pour FHIR 3. Sélectionner le compte de stockage dans API Azure pour FHIR en tant que compte de stockage d’exportation 4. Exécuter l’exportation en appelant la commande $export sur API Azure pour FHIR ## <a name="enabling-managed-identity-on-azure-api-for-fhir"></a>Activation de l’identité managée sur API Azure pour FHIR La première étape de la configuration d’API Azure pour FHIR en vue de l’exportation consiste à activer l’identité managée à l’échelle du système sur le service. Vous pouvez lire toutes les informations relatives aux identités managées dans Azure [ici](../active-directory/managed-identities-azure-resources/overview.md). Pour ce faire, accédez au service API Azure pour FHIR et sélectionnez le panneau Identité. La modification de l’état sur activé permettra d’activer l’identité managée dans le service API Azure pour FHIR. ![Activer une identité managée](media/export-data/fhir-mi-enabled.png) Nous pouvons maintenant passer à l’étape suivante et créer un compte de stockage et attribuer une autorisation à notre service. ## <a name="adding-permission-to-storage-account"></a>Ajout d’autorisations au compte de stockage L’étape suivante de l’exportation consiste à attribuer au service API Azure pour FHIR l’autorisation d’écrire dans le compte de stockage. Après avoir créé un compte de stockage, accédez au panneau Access Control (IAM) dans Compte de stockage et sélectionnez Ajouter des attributions de rôles. ![Activer une identité managée](media/export-data/fhir-export-role-assignment.png) Ici, nous ajoutons ensuite le rôle Contributeur aux données blob du stockage à notre nom de service. ![Activer une identité managée](media/export-data/fhir-export-role-add.png) Nous sommes maintenant prêts pour l’étape suivante, dans laquelle nous pouvons sélectionner le compte de stockage dans API Azure pour FHIR comme compte de stockage par défaut pour $export. ## <a name="selecting-the-storage-account-for-export"></a>Sélection du compte de stockage pour $export La dernière étape avant l’appel de la commande $export consiste à attribuer le compte de stockage Azure qu’API Azure pour FHIR utilisera pour l’exportation des données. Pour ce faire, accédez au panneau Intégration du service API Azure pour FHIR dans le portail Azure et sélectionnez le compte de stockage. ![Activer une identité managée](media/export-data/fhir-export-storage.png) Après cela, nous sommes prêts à exporter les données à l’aide de la commande $export. ## <a name="exporting-the-data-using-export-command"></a>Exportation des données à l’aide de la commande $export Après avoir configuré API Azure pour FHIR pour l’exportation, nous pouvons maintenant utiliser la commande $export pour exporter les données hors du service et vers le compte de stockage que nous avons spécifié. Pour savoir comment appeler la commande $export dans le serveur FHIR, consultez la documentation sur la spécification $export à l’adresse [https://hl7.org/Fhir/uv/bulkdata/export/index.html](https://hl7.org/Fhir/uv/bulkdata/export/index.html) > [!IMPORTANT] > Notez qu’actuellement, API Azure pour FHIR prend uniquement en charge l’exportation au niveau du système, comme défini dans la spécification d’exportation à l’adresse [https://hl7.org/Fhir/uv/bulkdata/export/index.html](https://hl7.org/Fhir/uv/bulkdata/export/index.html). Actuellement, nous ne prenons pas en charge les paramètres de requête avec $export. >[!div class="nextstepaction"] >[Paramètres supplémentaires](azure-api-for-fhir-additional-settings.md)
markdown
The Indian contingent traveled to London yesterday after the first Test in Nottingham concluded on Sunday. The iconic Lord's cricket ground will host the second Test between India and England from August 12. Both teams have already reached the venue and have begun preparations for the encounter. The Board of Control for Cricket in India (BCCI) has updated cricket fans in India about the commencement of the team's training session at Lord's by sharing a photograph on its official social media handles. We can see the Indian players warming up before the session in the picture. The BCCI shared the following post and captioned it: With the first Test ending in a draw, the series is currently at 0-0. It was the first game of the 2021-23 World Championship cycle. India and England received four points each due to the draw result of the Test. Both teams will be eager to register a win at Lord's and gain the upper hand moving forward in the series. All-rounder Moeen Ali has been added to the England squad for the upcoming Test at Lord's. The hosts are currently struggling to find a balance to their side in the absence of Ben Stokes. Team management might have roped in Moeen Ali to play him as the pure all-rounder in the side for the upcoming Tests against India. The England and Wales Cricket Board's (ECB) media advisory read: "Worcestershire spinning all-rounder Moeen Ali has been added to the England Men’s Test squad for the LV= Insurance second Test against India at Lord’s starting on Thursday (12-16 August). He will join the Test party later today and will train this afternoon with the England squad. " England squad for the second Test - Joe Root (Yorkshire), Moeen Ali (Worcestershire), James Anderson (Lancashire), Jonny Bairstow (Yorkshire), Dom Bess (Yorkshire), Stuart Broad (Nottinghamshire), Rory Burns (Surrey), Jos Buttler (Lancashire), Zak Crawley (Kent), Sam Curran (Surrey), Haseeb Hameed (Nottinghamshire), Dan Lawrence (Essex), Jack Leach (Somerset), Craig Overton (Somerset), Ollie Pope (Surrey), Ollie Robinson (Sussex), Dom Sibley (Warwickshire), Mark Wood (Durham)
english
David’s sin is discovered, and he suffers terrible consequences. However, God is still gracious because David has another son who will now be the next king. Despite his horrible failure, God keeps His promise. We’ve all messed up, but God will keep His promises to us. Our assurance comes from God’s faithfulness, not ours. The Gospel of Mark (Part Seven) The Prayer Request of Christ; "Making an Impact Through Sending Prayers."
english
<reponame>jingetiema2100/MicroCommunity<filename>service-common/src/main/java/com/java110/common/bmo/attrSpec/impl/DeleteAttrSpecBMOImpl.java package com.java110.common.bmo.attrSpec.impl; import com.java110.common.bmo.attrSpec.IDeleteAttrSpecBMO; import com.java110.core.annotation.Java110Transactional; import com.java110.intf.common.IAttrSpecInnerServiceSMO; import com.java110.po.attrSpec.AttrSpecPo; import com.java110.vo.ResultVo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; @Service("deleteAttrSpecBMOImpl") public class DeleteAttrSpecBMOImpl implements IDeleteAttrSpecBMO { @Autowired private IAttrSpecInnerServiceSMO attrSpecInnerServiceSMOImpl; /** * @param attrSpecPo 数据 * @return 订单服务能够接受的报文 */ @Java110Transactional public ResponseEntity<String> delete(AttrSpecPo attrSpecPo) { int flag = attrSpecInnerServiceSMOImpl.deleteAttrSpec(attrSpecPo); if (flag > 0) { return ResultVo.createResponseEntity(ResultVo.CODE_OK, "保存成功"); } return ResultVo.createResponseEntity(ResultVo.CODE_ERROR, "保存失败"); } }
java
{ "apiNoBg": ["YOUR-API-KEY-HERE", "ADD-OTHER-API-KEY-HERE-FOR-MORE-LIMIT", "AND-AGAIN"], "apiTobz": "YOUR-API-KEY-HERE", "apiVH": "YOUR-API-KEY-HERE", "apiFarzain": "YOUR-API-KEY-HERE" }
json
http://data.doremus.org/expression/289402dc-e1b1-3311-92ee-7fdfa298951f http://data.doremus.org/expression/92518833-4fdb-3e43-b3f1-102fe1b9ab41 http://data.doremus.org/expression/e8bd758e-83e4-3f94-b4dc-f0190795631c http://data.doremus.org/expression/567f920b-afde-3171-9c7d-43abb8362557 http://data.doremus.org/expression/c8e9f32a-5de9-3abd-82f9-b13c6fc9930a http://data.doremus.org/expression/c250caa6-e5bd-3a72-a845-4026a7a34d56 http://data.doremus.org/expression/aabdcf72-afaa-3a52-9d83-89ee54fa07cb http://data.doremus.org/expression/48e2c3bc-74d4-3470-a40d-70dfb92e9bbc http://data.doremus.org/expression/860a24f7-ccaa-3973-bce4-6239478cc7cc http://data.doremus.org/expression/a78b532a-0780-317f-8f76-63d2585a6b94 http://data.doremus.org/expression/65d88308-7ec0-306c-b2ca-8db83a2071ce http://data.doremus.org/expression/6a6175bf-5ce1-38ae-b5c6-3ea90106f0fc http://data.doremus.org/expression/996ec57c-e3bc-3d03-913c-f03bc9d8e5e6 http://data.doremus.org/expression/63951e6a-fd2d-3d4f-978d-c6221cbc9372 http://data.doremus.org/expression/8f8d20f2-719c-3d8d-8767-0052fdfe6e5c http://data.doremus.org/expression/a4638da4-5d61-31ec-9dbb-9d8f7be712ca http://data.doremus.org/expression/5b6b2da7-7a5a-3036-a962-05f9a8fabb4c http://data.doremus.org/expression/af1266b6-6f72-3ada-8cf7-600aa97983fe http://data.doremus.org/expression/3427f998-9509-3cd0-8c01-273d5d34028a http://data.doremus.org/expression/321ebb0d-f2f3-3340-be47-946d24d07e3a http://data.doremus.org/expression/70e5405c-d221-3111-a050-862f7717550d http://data.doremus.org/expression/f1b0228f-24a5-3516-86a2-d18a82aea963 http://data.doremus.org/expression/9f3782f5-3134-3951-b564-457c8f4eb009 http://data.doremus.org/expression/954a5213-820e-39a3-a1ff-8de78ab1961a http://data.doremus.org/expression/aa2bbd12-a482-344a-b8b2-c2851b3a0f63 http://data.doremus.org/expression/40483e53-c1f8-3cd4-9304-e9ae9ecd91c4 http://data.doremus.org/expression/0f49ea54-0068-3fe4-aa97-57d1f84494aa
json
<filename>src/icons/arrow_forward.js /* arrow_forward icon*/ import React from "react"; const Icon = (props = {}) => ( <svg viewBox="0 0 48 48" {...props}> <path d="M24 8l-2.83 2.83L32.34 22H8v4h24.34L21.17 37.17 24 40l16-16z" /> </svg> ); Icon.displayName = "ArrowForwardIcon"; Icon.isMaterialIcon = true; export default Icon;
javascript
Korella Venkatachari’s three-room house is strewn with toys on the floor, paint containers lie in one corner and unused wood in another. “I have been carving toys for 28 years,” he says, proudly. Venkatachari is amongst the 229 toy-makers of Kondapalli (‘village of hills’, roughly translated) in Krishna district of Andhra Pradesh, a village flanked by the lush green Eastern Ghats and the Narla Tata Rao Thermal Power Station. He and other the artisans have for generations made signature Kondapalli toys. His entire family does this work. He carves the wood, his wife Jyothi paints the toys. Their 18-year-old daughter Mounika dropped out after Class 10 and now helps her parents to make toys. The road in the village that leads to a Reddy dynasty fort is filled with houses and shops where the toys are made and sold. Most of the artisans make the toys at home, with all the members of the family sharing the work. A few mass-production workshops in Kondapalli also employ the toy-makers on daily wages to craft, carve and paint. Most of the toys are sold wholesale on orders from cultural platforms like Shilparamam in Hyderabad. wood from the Eastern Ghats as raw material for the toys. We still get the wood from these ghats,” he says. Over the centuries, the skill moved beyond the Nakarshalu community, and it is no longer a caste-specific occupation. Members of various communities and castes, including Padmashali, Kamsali, Vishwabrahmin, now work in the Kondapalli toy industry. Records of the Mutually Aided Cooperative Society (MACS), established in 2002 by the artisans, show that in February 2017, of the 229 toy-makers in the village, 107 are men and 122 women. Of these, 53 are Dalits, 128 are from Other Backward Classes, 26 are Muslim, and 22 are from other, landed castes. Like Rajya Lakshmi, many others have dropped out of school to make toys. P. Sathyabhama, 33, has been painting toys since she was 13, after completing Class 7. She is a daily wage worker in a workshop owned by Udayagiri Sesha Rao, secretary of the MACS. “I was paid 5 rupees while I was learning 20 years ago. I earn 100 rupees a day now after painting from 10 a.m. to 6 p.m. I go back home, stitch a jacket or two which earns me around 50 rupees, attend to my daughters and husband, cook, and complete the household chores. It is midnight when I get to rest my back. I wake up at 5.30 a.m. to again start the day. Durga’s dilemma reflects that afflicting millions of artisans and skilled craftspersons across India. Theirs is a fragile world. Successive governments have shown no genuine interest in keeping alive these extraordinary skills. With mechanisation and automation, mass-produced plastic toys have flooded the markets everywhere. The resultant loss of markets and income has been devastating. Also gone is the old world patronage they once enjoyed from affluent local powers. Indebtedness makes it impossible for them to invest in small enterprises of their own and forces them to sell to commission agents or to little ‘toy-factories’ that market the products as their own. The first cooperative society, V. Venkayya says, was formed in 1937-1938. “But the art started losing its sheen and demand during the 1990s, when the era of mechanisation and newer technologies started. The art might sustain for a maximum of 10 more years if the government continues to ignore the issues of the artisans.” These issues include housing, pension and artisan identity cards.
english
The Central Bureau of Investigation has summoned businessman Karti Chidambaram, the son of former Union Minister P Chidambaram, to appear before it in connection with the Aircel Maxis deal on Thursday, ANI reported. Karti Chidambaram is accused of facilitating bribes in the Aircel-Maxis transaction. The Aircel-Maxis deal is part of the much bigger 2G spectrum scam, which was worth around Rs 1.76 trillion. In July 2016, the Enforcement Directorate had asked Karti Chidambaram to appear for questioning in the case. Officials are looking into how telecom firm Aircel was sold to a Malaysian communications giant named Maxis. The Enforcement Directorate had said that approval for the deal was given when P Chidambaram was the finance minister in 2006. In August 2015, the ED had issued summons to two directors of a firm linked to Karti Chidambaram after an investigation revealed that Rs 26 lakh had allegedly been sent from the firm to Aircel Televentures. A firm owned by Karti Chidambaram also allegedly received payments from Maxis after the deal was executed. The ED had chargesheeted former Telecom Minister Dayanidhi Maran, his brother Kalanithi and sister-in-law Kavery Kalanithi as accused in the case too.
english
<filename>pipermail/antlr-interest/2005-November/014492.html <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2//EN"> <HTML> <HEAD> <TITLE> [antlr-interest] Antlr 3 and the newline token problem </TITLE> <LINK REL="Index" HREF="index.html" > <LINK REL="made" HREF="mailto:antlr-interest%40antlr.org?Subject=%5Bantlr-interest%5D%20Antlr%203%20and%20the%20newline%20token%20problem&In-Reply-To=000701c5f2a5%2481048590%246b02a8c0%40hercules"> <META NAME="robots" CONTENT="index,nofollow"> <META http-equiv="Content-Type" content="text/html; charset=us-ascii"> <LINK REL="Previous" HREF="014491.html"> <LINK REL="Next" HREF="014495.html"> </HEAD> <BODY BGCOLOR="#ffffff"> <H1>[antlr-interest] Antlr 3 and the newline token problem</H1> <B><NAME></B> <A HREF="mailto:antlr-interest%40antlr.org?Subject=%5Bantlr-interest%5D%20Antlr%203%20and%20the%20newline%20token%20problem&In-Reply-To=000701c5f2a5%2481048590%246b02a8c0%40hercules" TITLE="[antlr-interest] Antlr 3 and the newline token problem">mail at <EMAIL> </A><BR> <I>Sat Nov 26 08:52:36 PST 2005</I> <P><UL> <LI>Previous message: <A HREF="014491.html">[antlr-interest] Antlr 3 and the newline token problem </A></li> <LI>Next message: <A HREF="014495.html">[antlr-interest] Antlr 3 and the newline token problem </A></li> <LI> <B>Messages sorted by:</B> <a href="date.html#14492">[ date ]</a> <a href="thread.html#14492">[ thread ]</a> <a href="subject.html#14492">[ subject ]</a> <a href="author.html#14492">[ author ]</a> </LI> </UL> <HR> <!--beginarticle--> <PRE>Hi, &gt;<i> In any case you've omitted the per-character call for col/offset tracking. </I>&gt;<i> We were discussing line/col/offset counting not just newlines. </I> Well, the offset gets tracked anyway, as ANTLR is going through a String where it has to track the input position anyways. That value is IIRC also accessible (or could be made accessible very easily). What is left is line breaks. How would you imagine ANTLR Lexers do that more efficiently? E.g. always checking if the next character(s) is a \r \n, \n or \r? What about users that want \0 to be their line separator? Or users that don't want that at all? &gt;<i> If the lexer was built to do it properly, there would be no function calls </I>&gt;<i> at all. </I> The overhead of a function call on x86 is very low. Plus, your compiler might decide to inline, at least in managed languages, as said. For C++ a no-virtual-method-needed way via templates has been discussed. &gt;<i> &gt; I don't know what you're </I>&gt;<i> &gt; doing with the 4000 lines you have parsed in the same time, </I>&gt;<i> &gt; but are 4000 de-refs really significant compared to stepping </I>&gt;<i> &gt; through the parsing rules for 4000 lines of code and building the AST? </I>&gt;<i> </I>&gt;<i> Lexers don't build ASTs. The per-char calls needed for line/col/offset </I>&gt;<i> tracking would definitely hurt lexer performance if the counts were tacked </I>&gt;<i> on via overridden methods. </I> The only thing that is (currently) done using an overridden method is the newline thing, isn't it? A per character virtual method call would be ugly, that's true. Are you using the Lexer standalone? Even in that case I'd wonder if it really makes a difference. For each character you have at least one switch, you have the testing of alternatives etc. Will a virtual method call for every ~20 characters make a difference bigger than maybe 1%? I think there are more important places where ANTLR could - and is - be enhanced, e.g. the String copying thing or various things in the C++ part that have been discussed countless times on this list. I'm not generally arguing against including something like that, but you'd have to find a very flexible way to do so. Otherwise users will be unhappy because it doesn't match what they want to have, and their solution might get more complex. Martin </PRE> <!--endarticle--> <HR> <P><UL> <!--threads--> <LI>Previous message: <A HREF="014491.html">[antlr-interest] Antlr 3 and the newline token problem </A></li> <LI>Next message: <A HREF="014495.html">[antlr-interest] Antlr 3 and the newline token problem </A></li> <LI> <B>Messages sorted by:</B> <a href="date.html#14492">[ date ]</a> <a href="thread.html#14492">[ thread ]</a> <a href="subject.html#14492">[ subject ]</a> <a href="author.html#14492">[ author ]</a> </LI> </UL> <hr> <a href="http://www.antlr.org/mailman/listinfo/antlr-interest">More information about the antlr-interest mailing list</a><br> </body></html>
html
{ "applicant": "ke2:carnelian", "colors": [ "ke2:variants/carnelian/color_skin", "ke2:variants/carnelian/color_gemstone", "ke2:variants/carnelian/color_hair", "ke2:variants/carnelian/color_outfit", "ke2:variants/carnelian/color_visor", "ke2:variants/carnelian/color_eyes" ], "textures": [ ], "names": [ ] }
json
{"items":[{"id":"id5zlagfo1","quantity":"4"},{"id":"25g48h325s","quantity":"7"},{"id":"u5jw4lhjfw","quantity":"4"},{"id":"bkaugzu3fg","quantity":"3"},{"id":"baij5iwdw7","quantity":"5"}],"status":"closed","userEmail":"<EMAIL>"}
json
Garbba Rakshambigai Fertility Centre, Garbba Rakshambiagi Fertility Centre, 4, 6th cross street, United India Colony, India. At Trichy: Garbba Rakshambiagi Fertility Centre, Priya Nursing Home 8-B, Ramarayar Street, Tennur, Trichy 620017 India. 1. What is the current success rate for your clinic? (preferably by age as I know that the success rate will be much lower for somebody of my age) The cumulative pregnancy rate at our centre is about 42%. The break up of that figure would be the following :
english
West Ham vice-chairman Karren Brady insists that every Premier League club wants to complete the season despite there being “a lot of obstacles” to restarting the campaign. Clubs were presented with the league’s “Project Restart” plans at a meeting on Friday and were told the remaining 92 matches of the 2019-2020 season must be played at neutral venues. That is an extra sticking point to a litany of other logistical issues as there remains opposition on sporting integrity grounds about the idea of not finishing the campaign on a home-and-away basis. “At yesterday’s meeting, every club said it wants the season to restart,” said Brady in her column for the Sun newspaper. Brighton chief executive Paul Barber said on Thursday that the idea of not playing matches home and away was a “further imperfection” on top of games being behind closed doors. Before plans to return to playing can progress, the Premier League clubs must also address the thorny issues of testing and player welfare. Players, coaches and backroom staff would reportedly be tested two or three times a week and could be forced to quarantine in hotels away from their families to help prevent the spread of the virus. The Bundesliga’s ambitious plans to return to playing matches this month were dealt a blow with the news on Friday that three people at Cologne had tested positive for coronavirus and been placed in a 14-day quarantine. Manchester City striker Sergio Aguero admitted this week that players are “scared” at the prospect of being rushed back into action, while Brighton’s Glenn Murray questioned whether it is right for football’s return to drain resources from public services such as ambulance crews and police. The proposal of neutral venues by the Premier League is designed to reduce the number of medical, security and broadcast personnel needed at every match. “It’s clear that the location of the games when the league recommences will be subject to approval from both the Government and Sports Grounds Safety Authority,” added Brady.
english
<reponame>arian153/Engine-5<gh_stars>1-10 #include "Vector2.hpp" #include "..//Utility/Utility.hpp" #include <ostream> #include "Matrix22.hpp" #include "Vector3.hpp" #include "Vector4.hpp" #include "../Utility/VectorDef.hpp" namespace Engine5 { Vector2::Vector2(Real x, Real y) : x(x), y(y) { } Vector2::Vector2(Real arr[2]) : x(arr[0]), y(arr[1]) { } Vector2::Vector2(const Vector3& rhs) : x(rhs.x), y(rhs.y) { } Vector2::Vector2(const Vector4& rhs) : x(rhs.x), y(rhs.y) { } Vector2::Vector2(const Vector2& rhs) : x(rhs.x), y(rhs.y) { } Vector2::~Vector2() { } void Vector2::Set(Real _x, Real _y) { x = _x; y = _y; } void Vector2::SetZero() { x = 0.0f; y = 0.0f; } void Vector2::SetInverse() { x = 1.0f / x; y = 1.0f / y; } void Vector2::SetNegate() { x = -x; y = -y; } void Vector2::SetNormalize() { Real length = sqrtf(x * x + y * y); if (length > 0.f) { (*this) *= (1.f / length); } } void Vector2::SetHalf() { this->x *= 0.5f; this->y *= 0.5f; } void Vector2::SetClean() { if (Math::IsZero(x)) x = 0.0f; if (Math::IsZero(y)) y = 0.0f; } void Vector2::SetProjection(const Vector2& a, const Vector2& b) { Real multiplier = (a.DotProduct(b)) / (b.DotProduct(b)); this->x = b.x * multiplier; this->y = b.y * multiplier; } Real Vector2::Length() const { return sqrtf(x * x + y * y); } Real Vector2::LengthSquared() const { return (x * x + y * y); } Real Vector2::DistanceTo(const Vector2& rhs) const { Real _x = rhs.x - this->x; Real _y = rhs.y - this->y; return sqrtf(_x * _x + _y * _y); } Real Vector2::DistanceSquaredTo(const Vector2& rhs) const { Real _x = rhs.x - this->x; Real _y = rhs.y - this->y; return (_x * _x + _y * _y); } Vector2 Vector2::ProjectionTo(const Vector2& rhs) const { Vector2 result = rhs; Real multiplier = ((*this).DotProduct(rhs)) / (rhs.DotProduct(rhs)); result *= multiplier; return result; } Vector2 Vector2::ProjectionFrom(const Vector2& rhs) const { Vector2 result = *this; Real multiplier = (rhs.DotProduct(*this)) / ((*this).DotProduct(*this)); result *= multiplier; return result; } Vector2 Vector2::Unit() const { Vector2 result = *this; result.SetNormalize(); return result; } Vector2 Vector2::Half() const { Vector2 result = *this; result.SetHalf(); return result; } Vector2 Vector2::Inverse() const { return Vector2( Math::IsZero(x) ? 0.0f : 1.0f / this->x, Math::IsZero(y) ? 0.0f : 1.0f / this->y); } Real Vector2::DotProduct(const Vector2& rhs) const { return (x * rhs.x + y * rhs.y); } Real Vector2::CrossProduct(const Vector2& rhs) const { return (x * rhs.y - y * rhs.x); } Vector2 Vector2::CrossProduct(const Real& rhs) const { return Vector2(rhs * y, -rhs * x); } Matrix22 Vector2::OuterProduct(const Vector2& rhs) const { return Matrix22( this->x * rhs.x, this->x * rhs.y, this->y * rhs.x, this->y * rhs.y); } Vector2 Vector2::HadamardProduct(const Vector2& rhs) const { return Vector2(x * rhs.x, y * rhs.y); } bool Vector2::IsValid() const { return Math::IsValid(x) && Math::IsValid(y); } bool Vector2::IsZero() const { return Math::IsZero(x) && Math::IsZero(y); } bool Vector2::IsEqual(const Vector2& rhs) const { return Math::IsEqual(x, rhs.x) && Math::IsEqual(y, rhs.y); } bool Vector2::IsNotEqual(const Vector2& rhs) const { return Math::IsNotEqual(x, rhs.x) || Math::IsNotEqual(y, rhs.y); } Real Vector2::GrepVec1(size_t flag0) const { return (*this)[SafeFlag(flag0)]; } Vector2 Vector2::GrepVec2(size_t flag0, size_t flag1) const { return Vector2((*this)[SafeFlag(flag0)], (*this)[SafeFlag(flag1)]); } Vector3 Vector2::GrepVec3(size_t flag0, size_t flag1, size_t flag2) const { return Vector3((*this)[SafeFlag(flag0)], (*this)[SafeFlag(flag1)], (*this)[SafeFlag(flag2)]); } Vector4 Vector2::GrepVec4(size_t flag0, size_t flag1, size_t flag2, size_t flag3) const { return Vector4((*this)[SafeFlag(flag0)], (*this)[SafeFlag(flag1)], (*this)[SafeFlag(flag2)], (*this)[SafeFlag(flag3)]); } size_t Vector2::SafeFlag(size_t given) const { return given > Math::Vector::Y ? Math::Vector::Y : given; } bool Vector2::operator==(const Vector2& rhs) const { return Math::IsEqual(x, rhs.x) && Math::IsEqual(y, rhs.y); } bool Vector2::operator!=(const Vector2& rhs) const { return Math::IsNotEqual(x, rhs.x) || Math::IsNotEqual(y, rhs.y); } Vector2 Vector2::operator-() const { return Vector2(-x, -y); } Vector2& Vector2::operator=(const Vector2& rhs) { if (this != &rhs) { x = rhs.x; y = rhs.y; } return *this; } Vector2& Vector2::operator=(const Vector3& rhs) { x = rhs.x; y = rhs.y; return *this; } Vector2& Vector2::operator=(const Vector4& rhs) { x = rhs.x; y = rhs.y; return *this; } Vector2& Vector2::operator=(Real rhs) { x = rhs; y = rhs; return *this; } Vector2& Vector2::operator+=(const Vector2& rhs) { x += rhs.x; y += rhs.y; return *this; } Vector2& Vector2::operator+=(Real real) { x += real; y += real; return *this; } Vector2& Vector2::operator-=(const Vector2& rhs) { x -= rhs.x; y -= rhs.y; return *this; } Vector2& Vector2::operator-=(Real real) { x -= real; y -= real; return *this; } Vector2& Vector2::operator*=(Real real) { x *= real; y *= real; return *this; } Vector2& Vector2::operator/=(Real real) { x /= real; y /= real; return *this; } Vector2 Vector2::operator+(const Vector2& rhs) const { return Vector2(x + rhs.x, y + rhs.y); } Vector2 Vector2::operator+(Real real) const { return Vector2(x + real, y + real); } Vector2 Vector2::operator-(const Vector2& rhs) const { return Vector2(x - rhs.x, y - rhs.y); } Vector2 Vector2::operator-(Real real) const { return Vector2(x - real, y - real); } Vector2 Vector2::operator*(Real real) const { return Vector2(x * real, y * real); } Vector2 Vector2::operator/(Real real) const { return Vector2(x / real, y / real); } Vector2& Vector2::operator++() { ++x; ++y; return *this; } Vector2 Vector2::operator++(int) { Vector2 result(*this); ++(*this); return result; } Vector2& Vector2::operator--() { --x; --y; return *this; } Vector2 Vector2::operator--(int) { Vector2 result(*this); --(*this); return result; } Real Vector2::operator[](size_t i) const { return (&x)[i]; } Real& Vector2::operator[](size_t i) { return (&x)[i]; } Real DotProduct(const Vector2& vec1, const Vector2& vec2) { return vec1.DotProduct(vec2); } Real CrossProduct(const Vector2& vec1, const Vector2& vec2) { return (vec1.x * vec2.y - vec1.y * vec2.x); } Vector2 CrossProduct(Real vec1, const Vector2& vec2) { return Vector2(-vec1 * vec2.y, vec1 * vec2.x); } Vector2 CrossProduct(const Vector2& vec1, Real vec2) { return Vector2(vec2 * vec1.y, -vec2 * vec1.x); } Matrix22 OuterProduct(const Vector2& vec1, const Vector2& vec2) { return vec1.OuterProduct(vec2); } Vector2 HadamardProduct(const Vector2& vec1, const Vector2& vec2) { return vec1.HadamardProduct(vec2); } Vector2 Projection(const Vector2& vec1, const Vector2& vec2) { return vec1.ProjectionTo(vec2); } Vector2 LinearInterpolation(const Vector2& start, const Vector2& end, Real t) { Vector2 result; result.x = (1.0f - t) * start.x + t * end.x; result.y = (1.0f - t) * start.y + t * end.y; return result; } std::ostream& operator<<(std::ostream& os, const Vector2& rhs) { os << "[" << rhs.x << ", " << rhs.y << "]"; return os; } Vector2 operator*(Real real, const Vector2& vector) { return Vector2(vector.x * real, vector.y * real); } }
cpp
Lewis Hamilton is set to take a five-place grid penalty at Sunday's Austrian Grand Prix after Mercedes changed the gearbox on his car. As the previous unit has not completed six consecutive races, Hamilton will be served with a sanction that will come as a huge blow to his hopes of reeling in championship leader Sebastian Vettel this weekend. An official report from FIA Formula One Technical Delegate Jo Bauer read: "[Hamilton] did finish the last race in Baku and this gearbox change was before the six consecutive events expired. "As this is not in compliance with article 23. 5a of the 2017 Formula 1 sporting regulations, I am referring this matter to the stewards for their consideration. "The team informed the technical delegate about the gearbox change on Tuesday, 04th July 2017, at 11:29 hours. " The Formula One website describes the chances of Hamilton serving a penalty as "inevitable". Hamilton, who trails Ferrari's Vettel by 14 points in the drivers' standings, was quickest in both practice sessions on Friday and has qualified on pole in Austria for the past two years.
english
<filename>src/libraries/dynamicMesh/dynamicMesh/fvMeshAdder/fvMeshAdder.cpp<gh_stars>0 /*---------------------------------------------------------------------------*\ Copyright (C) 2011-2016 OpenFOAM Foundation ------------------------------------------------------------------------------- License This file is part of CAELUS. CAELUS is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CAELUS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CAELUS. If not, see <http://www.gnu.org/licenses/>. \*---------------------------------------------------------------------------*/ #include "fvMesh.hpp" #include "fvMeshAdder.hpp" #include "faceCoupleInfo.hpp" #include "fvMesh.hpp" /* * * * * * * * * * * * * * * Static Member Data * * * * * * * * * * * * * */ namespace CML { defineTypeNameAndDebug(fvMeshAdder, 0); } // * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * // //- Calculate map from new patch faces to old patch faces. -1 where // could not map. CML::labelList CML::fvMeshAdder::calcPatchMap ( const label oldStart, const label oldSize, const labelList& oldToNew, const polyPatch& newPatch, const label unmappedValue ) { labelList newToOld(newPatch.size(), unmappedValue); label newStart = newPatch.start(); label newSize = newPatch.size(); for (label i = 0; i < oldSize; i++) { label newFacei = oldToNew[oldStart+i]; if (newFacei >= newStart && newFacei < newStart+newSize) { newToOld[newFacei-newStart] = i; } } return newToOld; } // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // // Inplace add mesh1 to mesh0 CML::autoPtr<CML::mapAddedPolyMesh> CML::fvMeshAdder::add ( fvMesh& mesh0, const fvMesh& mesh1, const faceCoupleInfo& coupleInfo, const bool validBoundary ) { mesh0.clearOut(); // Resulting merged mesh (polyMesh only!) autoPtr<mapAddedPolyMesh> mapPtr ( polyMeshAdder::add ( mesh0, mesh1, coupleInfo, validBoundary ) ); // Adjust the fvMesh part. const polyBoundaryMesh& patches = mesh0.boundaryMesh(); fvBoundaryMesh& fvPatches = const_cast<fvBoundaryMesh&>(mesh0.boundary()); fvPatches.setSize(patches.size()); forAll(patches, patchi) { fvPatches.set(patchi, fvPatch::New(patches[patchi], fvPatches)); } // Do the mapping of the stored fields // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ fvMeshAdder::MapVolFields<scalar>(mapPtr, mesh0, mesh1); fvMeshAdder::MapVolFields<vector>(mapPtr, mesh0, mesh1); fvMeshAdder::MapVolFields<sphericalTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapVolFields<symmTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapVolFields<tensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapSurfaceFields<scalar>(mapPtr, mesh0, mesh1); fvMeshAdder::MapSurfaceFields<vector>(mapPtr, mesh0, mesh1); fvMeshAdder::MapSurfaceFields<sphericalTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapSurfaceFields<symmTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapSurfaceFields<tensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapDimFields<scalar>(mapPtr, mesh0, mesh1); fvMeshAdder::MapDimFields<vector>(mapPtr, mesh0, mesh1); fvMeshAdder::MapDimFields<sphericalTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapDimFields<symmTensor>(mapPtr, mesh0, mesh1); fvMeshAdder::MapDimFields<tensor>(mapPtr, mesh0, mesh1); return mapPtr; } // ************************************************************************* //
cpp
// MusicXML Class Library // Copyright (c) by <NAME> // Distributed under the MIT License #include "../impl/EncodingFunctions.h" #include "../core/Date.h" #include "../core/elements/Encoder.h" #include "../core/elements/Encoding.h" #include "../core/elements/Encoding.h" #include "../core/elements/EncodingChoice.h" #include "../core/elements/EncodingDate.h" #include "../core/elements/EncodingDescription.h" #include "../core/elements/Identification.h" #include "../core/elements/Software.h" #include "../core/elements/Supports.h" #include "../core/elements/MiscellaneousField.h" #include "../core/elements/Miscellaneous.h" namespace mx { namespace impl { void createEncoding( const api::EncodingData& inEncoding, core::ScoreHeaderGroup& header ) { auto identification = header.getIdentification(); auto encoding = identification->getEncoding(); if( !inEncoding.encoder.empty() ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); auto item = core::makeEncodingChoice(); item->setChoice( core::EncodingChoice::Choice::encoder ); item->getEncoder()->setValue( core::XsString( inEncoding.encoder ) ); encoding->addEncodingChoice( item ); } core::Date tryDate{ inEncoding.encodingDate.year, inEncoding.encodingDate.month, inEncoding.encodingDate.day }; const bool isYearValid = inEncoding.encodingDate.year == tryDate.getYear(); const bool isMonthValid = inEncoding.encodingDate.month == tryDate.getMonth(); const bool isDayValid = inEncoding.encodingDate.day == tryDate.getDay(); if( isYearValid || isMonthValid || isDayValid ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); auto item = core::makeEncodingChoice(); item->setChoice( core::EncodingChoice::Choice::encodingDate ); item->getEncodingDate()->setValue( tryDate ); encoding->addEncodingChoice( item ); } if( !inEncoding.encodingDescription.empty() ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); auto item = core::makeEncodingChoice(); item->setChoice( core::EncodingChoice::Choice::encodingDescription ); item->getEncodingDescription()->setValue( core::XsString( inEncoding.encodingDescription ) ); encoding->addEncodingChoice( item ); } for( const auto& s : inEncoding.software ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); auto item = core::makeEncodingChoice(); item->setChoice( core::EncodingChoice::Choice::software ); item->getSoftware()->setValue( core::XsString( s ) ); encoding->addEncodingChoice( item ); } for ( const auto& s : inEncoding.supportedItems ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); auto item = core::makeEncodingChoice(); item->setChoice( core::EncodingChoice::Choice::supports ); auto supports = item->getSupports(); auto attributes = supports->getAttributes(); if ( !s.elementName.empty() ) { attributes->element.setValue( s.elementName ); } if ( !s.attributeName.empty() ) { attributes->hasAttribute = true; attributes->attribute.setValue( s.attributeName ); } if ( !s.specificValue.empty() ) { attributes->hasValue = true; attributes->value.setValue( s.specificValue ); } attributes->type = s.isSupported ? core::YesNo::yes : core::YesNo::no; encoding->addEncodingChoice( item ); } for ( const auto& m : inEncoding.miscelaneousFields ) { header.setHasIdentification( true ); identification->setHasEncoding( true ); identification->setHasMiscellaneous( true ); auto item = core::makeMiscellaneousField(); item->getAttributes()->name.setValue( m.key ); item->setValue( core::XsString{ m.value } ); identification->getMiscellaneous()->addMiscellaneousField( item ); } } api::EncodingData createEncoding( const core::Encoding& inEncoding ) { api::EncodingData outEncoding; bool isDateFound = false; bool isEncoderFound = false; bool isDescriptionFound = false; for( auto ec : inEncoding.getEncodingChoiceSet() ) { switch( ec->getChoice() ) { case core::EncodingChoice::Choice::encodingDate: { if( !isDateFound ) { outEncoding.encodingDate.year = ec->getEncodingDate()->getValue().getYear(); outEncoding.encodingDate.month = ec->getEncodingDate()->getValue().getMonth(); outEncoding.encodingDate.day = ec->getEncodingDate()->getValue().getDay(); isDateFound = true; } break; } case core::EncodingChoice::Choice::encoder: { if( !isEncoderFound ) { outEncoding.encoder = ec->getEncoder()->getValue().getValue(); isEncoderFound = true; } break; } case core::EncodingChoice::Choice::encodingDescription: { if( !isDescriptionFound ) { outEncoding.encodingDescription = ec->getEncodingDescription()->getValue().getValue(); isDescriptionFound = true; } break; } case core::EncodingChoice::Choice::software: { outEncoding.software.emplace_back( ec->getSoftware()->getValue().getValue() ); break; } case core::EncodingChoice::Choice::supports: { auto supportsElement = ec->getSupports(); auto attr = supportsElement->getAttributes(); api::SupportedItem item; item.elementName = attr->element.getValue(); if( attr->hasAttribute ) { item.attributeName = attr->attribute.getValue(); } item.isSupported = ( attr->type == core::YesNo::yes ); if( attr->hasValue ) { item.specificValue = attr->value.getValue(); } outEncoding.supportedItems.push_back( std::move( item ) ); break; } } } return outEncoding; } } }
cpp