text
stringlengths
1
1.04M
language
stringclasses
25 values
{ "name": "StrongNodeEdge Token", "symbol": "SNE", "id": "0x32934CB16DA43fd661116468c1B225Fc26CF9A8c", "decimals": 18, "coingecko_url": "https://www.coingecko.com/en/coins/strongnode", "market_cap_usd": 0.0, "market_cap_rank": null, "24_hr_volume_usd": 19150.11, "logoURI": "https://raw.githubusercontent.com/poolsharks-protocol/token-metadata/master/blockchains/polygon-pos/assets/0x32934CB16DA43fd661116468c1B225Fc26CF9A8c/logo.png" }
json
const sidebar = require('./sidebar'); module.exports = { title: 'Apostrophe 3 Documentation', theme: 'apostrophe', plugins: [ [ '@vuepress/google-analytics', { ga: 'UA-106613728-6' } ], [ 'sitemap', { hostname: 'https://v3.docs.apostrophecms.org' } ] ], markdown: { extendMarkdown: md => { md.use(require('markdown-it-attrs')); } }, themeConfig: { // Disabled to move into dropdown nav // repo: 'https://github.com/apostrophecms/apostrophe', docsRepo: 'https://github.com/apostrophecms/a3-docs', docsBranch: 'main', docsDir: 'docs', lastUpdated: 'Last updated', nextLinks: true, prevLinks: true, editLinks: true, sidebar, feedbackWidget: { docsRepoIssue: 'apostrophecms/a3-docs' }, logo: '/images/apos-dark.png', nav: [ { text: 'Versions', ariaLabel: 'Apostrophe versions', items: [ { text: 'Apostrophe 3', link: '/' }, { text: 'Apostrophe 2', link: 'https://v2.docs.apostrophecms.org', target: '_self' } ] }, { text: 'Sections', ariaLabel: 'Documentation sections', items: [ { text: 'Getting started', link: '/guide/setting-up.md' }, { text: 'Guide', link: '/guide/introduction.md' }, { text: 'Reference', link: '/reference/' }, { text: 'Cookbook', link: '/cookbook/' } ] }, { text: 'More', ariaLabel: 'More Apostrophe links', items: [ { text: 'Main site', link: 'https://apostrophecms.com', rel: false }, { text: 'Extensions', link: 'https://apostrophecms.com/extensions', rel: false }, { text: 'GitHub', link: 'https://github.com/apostrophecms/apostrophe', re: false }, { text: 'Discord', link: 'http://chat.apostrophecms.org/', rel: false }, { text: 'Forum', link: 'https://github.com/apostrophecms/apostrophe/discussions', rel: false }, { text: 'Stack Overflow', link: 'https://stackoverflow.com/questions/tagged/apostrophe-cms', rel: false } ] } ], algolia: { apiKey: '<KEY>', indexName: 'apostrophecms', algoliaOptions: { facetFilters: [ 'tags:v3' ] } } }, head: [ // <script type="text/javascript" id="hs-script-loader" async defer src="//js.hs-scripts.com/6104347.js"></script> [ 'script', { type: 'text/javascript', id: 'hs-script-loader', async: 'true', defer: 'true', src: '//js.hs-scripts.com/6104347.js' } ] ] };
javascript
<gh_stars>1-10 import styled from 'styled-components' export const FormContainer = styled.div` height: 50vh; width: 100%; background-color: transparent; padding: 0; display: flex; align-items: center; justify-content: center; ` export const Form = styled.div` width: 90%; height: 90%; background-color: #d1d1d1; border-radius: 5px; .name { width: 100%; height: 30%; padding: 6%; display: flex; align-items: flex-start; justify-content: space-between; flex-direction: column; label { text-transform: uppercase; color: #499251; left: 0; margin-bottom: 5px; } input { border: none; outline: none; width: 100%; padding: 5px; font-size: 12pt; border-radius: 5px; } } .info { width: 100%; height: 30%; padding: 5%; display: flex; align-items: flex-start; justify-content: space-between; flex-direction: row; div { margin: 0 1%; } label { text-transform: uppercase; color: #499251; left: 0; margin-bottom: 5px; } input, select { border: none; outline: none; width: 100%; padding: 5px; font-size: 12pt; border-radius: 5px; background-color: #fff; } } .btn { width: 100%; height: 30%; padding: 6%; display: flex; align-items: center; justify-content: space-between; button { width: 100%; text-transform: uppercase; color: #499251; left: 0; margin-bottom: 5px; font-size: 12pt; border-radius: 5px; padding: 5px; font-size: 12pt; border: none; outline: none; transition: 0.2s; &:hover { box-shadow: 5px 5px 5px #b5b5b5; transition: 0.2s; } } } `
typescript
<reponame>balswyan/senior-capstone-fall-2018 import json with open('final.json') as f: people = json.load(f) string = "" for person in people['results']: string = person["dateoflastcontact"] person['year'] = string[0:4] person['month'] = string[5:7] person['day'] = string[8:10] with open('final2.json', 'w') as f: json.dump(people, f)
python
<reponame>CraignRush/IslandRacer var searchData= [ ['sand',['Sand',['../track_8h.html#ac1404ca5b1e2d42556980919aff64ccca084469cc3b30ce6398320fb7ffddfbbc',1,'track.h']]], ['silverstone',['Silverstone',['../player_8h.html#a9c0489b2664a5ac943d6f70b3a8dcd67adc44f8c01f6ca67aaf1a86788b34953d',1,'player.h']]], ['steerleft',['SteerLeft',['../car_8h.html#aecfb003a08d29c0b75cd266ca0cadb76a75c51eb03194000babe1d96429234d95',1,'car.h']]], ['steerright',['SteerRight',['../car_8h.html#aecfb003a08d29c0b75cd266ca0cadb76acfaca206a70356b4f901dff03b335e17',1,'car.h']]] ];
javascript
<gh_stars>1-10 { "values": [ "luigis_mansion:room/normal/ceramics_studio/turn_lights/on/chest", "luigis_mansion:room/normal/ceramics_studio/turn_lights/on/lamp" ] }
json
Cyclone 'Biparjoy' looming over the Arabian Sea has intensified into a severe cyclonic storm, which may hinder the monsoon onset over Kerala, according to meteorologists. The India Meteorological Department said on June 9 that favourable conditions exist for the monsoon onset within two days. Thiruvananthapuram: Cyclone 'Biparjoy', the first storm brewing in the Arabian Sea this year, rapidly intensified into a severe cyclonic storm with meteorologists predicting a "mild" monsoon onset over Kerala and "weak" progress beyond the southern peninsula under its influence. The India Meteorological Department ( IMD ) on Wednesday morning said conditions are favourable for monsoon onset over Kerala within two days. Meteorologists, however, said the cyclone has been impacting the intensity of the monsoon and the onset over Kerala would be "mild". The Met Office said the severe cyclonic storm is likely to move nearly northwards and intensify into a very severe cyclonic storm. It would then move north-northwestwards during the subsequent three days. However, the IMD has not yet predicted any major impact on countries adjoining the Arabian Sea, including India, Oman, Iran, and Pakistan. IMD predicts severe 'Cyclone' Meteorologists say the tentative track of the system will be in the northward direction but storm at times defy the predicted track and the intensity. Forecasting agencies said the storm has been undergoing "rapid intensification", escalating from just a cyclonic circulation to a severe cyclonic storm in just 48 hours, defying earlier predictions. Atmospheric conditions and cloud mass indicate that the system is likely to sustain the strength of a very severe cyclone till June 12. Scientists say cyclonic storms in the Bay of Bengal and the Arabian Sea have been intensifying rapidly and retaining their intensity for a longer duration due to climate change. According to a study 'Changing status of tropical cyclones over the north Indian Ocean', the frequency, duration, and intensity of cyclones in the Arabian Sea have increased by about 20 per cent in the post-monsoon period and 40 per cent in the pre-monsoon period. There has been a 52 per cent increase in the number of cyclones in the Arabian Sea, while very severe cyclones have increased by 150 per cent. "The increase in cyclone activity in the Arabian Sea is tightly linked to the rising ocean temperatures and increased availability of moisture under global warming. The Arabian Sea used to be cool, but now it is a warm pool," said Roxy Mathew Koll , a climate scientist at the Indian Institute of Tropical Meteorology. "The oceans have become warmer already on account of climate change. In fact, a recent study shows that the Arabian Sea has warmed up by almost 1. 2 degrees Celsius since March, thus conditions are very much favorable for the rapid intensification of the system (Cyclone Bipajoy) so it has the potential to sustain the strength for a longer period," said Raghu Murtugudde, Professor, Department of Atmospheric and Oceanic Science, University of Maryland and IIT Bombay. Mahesh Palawat, vice president (climate and meteorology) of Skymet Weather, said the cloud mass is concentrated around this system and enough moisture is not reaching the Kerala coast. Though the criteria for monsoon onset can be met in the next two days, it will not be a thumping start. After the onset over Kerala, the monsoon will remain "weak" until the storm degenerates around June 12, he said. "The powerful weather system in the Arabian Sea may spoil the advancement of the monsoon deep inland. Under their influence, the monsoon stream may reach coastal parts but will struggle to penetrate beyond the Western Ghats," Skymet Weather said on Tuesday. A senior IMD scientist said the southern peninsula will get rain under the influence of the cyclonic storm and a low-pressure system developing in the Bay of Bengal. However, further progress of the monsoon beyond the southern peninsula will happen after the cyclone degenerates. "It would not be the case of classic monsoon onset, satisfying all the given criteria. We would have scattered rains along the West Coast strip but no inland penetration and widespread rains," Koll said. The southwest monsoon normally sets in over Kerala on June 1 with a standard deviation of about seven days. In mid-May, the IMD said monsoon might arrive in Kerala by June 4. Skymet had predicted the monsoon onset over Kerala on June 7 with an error margin of three days. Over the last 150 years, the date of monsoon onset over Kerala has varied widely, the earliest being May 11, 1918, and the most delayed being June 18, 1972, according to IMD data. The southwest monsoon arrived in the southern state on May 29 last year, June 3 in 2021, June 1 in 2020, June 8 in 2019, and May 29 in 2018. Research shows a delay in the monsoon onset over Kerala (MOK) does not necessarily mean a delay in the monsoon onset over northwest India. However, a delay in the MOK is generally associated with a delay in onset at least over the southern states and Mumbai. Scientists say a delayed MOK also does not impact the total rainfall over the country during the season. India is expected to get normal rainfall during the southwest monsoon season despite the evolving El Nino conditions, the IMD had earlier said. Northwest India is expected to see normal to below-normal rainfall. East and northeast, central, and south peninsula are expected to receive normal rainfall at 94-106 per cent of the long-period average of 87 cm. Rainfall less than 90 per cent of the long-period average is considered 'deficient', between 90 per cent and 95 per cent is 'below normal', between 105 per cent and 110 per cent is 'above normal' and more than 100 per cent is 'excess' precipitation. Normal rainfall is critical for India's agricultural landscape, with 52 per cent of the net cultivated area relying on it. It is also crucial for the replenishing of reservoirs critical for drinking water apart from power generation across the country. Rainfed agriculture accounts for about 40 per cent of the country's total food production, making it a crucial contributor to India's food security and economic stability. (with PTI inputs)
english
Imagine taking a 20-year home loan and paying EMIs for more than 26 years. Unfortunately, the RBI's flurry of repo rate hikes in recent months has pushed up home loan rates from 6.7% in April to 8.1% now. In this video, we will tell you what borrowers can do to keep their loan tenure the same and how you can pay off your 20-year home loan in just 12 years.
english
.gallery { display: -webkit-box; display: -moz-box; display: -ms-flexbox; display: -webkit-flex; display: flex; flex-flow: row wrap; -webkit-flex-flow: row wrap; justify-content: space-around; }
css
{"meta":{"generated_at":"2016-09-02T18:08:18.290Z","location":"Singapore","total_repos":4},"repos":[{"name":"cf-ui","html_url":"https://github.com/cloudflare/cf-ui","description":":gem: CloudFlare UI Framework","pushed_at":"2016-09-02T17:56:27Z","updated_at":"2016-08-31T16:55:48Z","formatted_time":"8 minutes ago","language":"JavaScript","stargazers_count":1033,"forks_count":44,"open_issues_count":5,"size":1406,"type":"Organization","owner":{"login":"cloudflare","avatar_url":"https://avatars.githubusercontent.com/u/314135?v=3","html_url":"https://github.com/cloudflare"},"contributors":[{"login":"thejameskyle","html_url":"https://github.com/thejameskyle","contributions":191},{"login":"jwineman","html_url":"https://github.com/jwineman","contributions":8},{"login":"akenn","html_url":"https://github.com/akenn","contributions":6},{"login":"thellimist","html_url":"https://github.com/thellimist","contributions":5},{"login":"wyuenho","html_url":"https://github.com/wyuenho","contributions":2},{"login":"L1fescape","html_url":"https://github.com/L1fescape","contributions":1},{"login":"plumlee","html_url":"https://github.com/plumlee","contributions":1},{"login":"toekneestuck","html_url":"https://github.com/toekneestuck","contributions":1}]},{"name":"deep-learning-workshop","html_url":"https://github.com/mdda/deep-learning-workshop","description":"Deep Learning Workshop : Including a VirtualBox VM with pre-configured Jupyter, Theano/Lasange, models and data","pushed_at":"2016-09-02T16:22:26Z","updated_at":"2016-08-30T06:44:38Z","formatted_time":"an hour ago","language":"Jupyter Notebook","stargazers_count":120,"forks_count":15,"open_issues_count":2,"size":16039,"type":"User","owner":{"login":"mdda","avatar_url":"https://avatars.githubusercontent.com/u/362532?v=3","html_url":"https://github.com/mdda"},"contributors":[{"login":"mdda","html_url":"https://github.com/mdda","contributions":604}]},{"name":"codelf","html_url":"https://github.com/unbug/codelf","description":"Organize your GitHub stars and repositories.Search over projects from GitHub, Bitbucket, Google Code, Codeplex, Sourceforge, Fedora Project to find real-world usage variable names","pushed_at":"2016-09-02T02:33:55Z","updated_at":"2016-09-02T07:41:56Z","formatted_time":"15 hours ago","language":"HTML","stargazers_count":1224,"forks_count":102,"open_issues_count":2,"size":11973,"type":"User","owner":{"login":"unbug","avatar_url":"https://avatars.githubusercontent.com/u/799578?v=3","html_url":"https://github.com/unbug"},"contributors":[{"login":"unbug","html_url":"https://github.com/unbug","contributions":306},{"login":"BossChow","html_url":"https://github.com/BossChow","contributions":1}]},{"name":"drozer","html_url":"https://github.com/mwrlabs/drozer","description":"The Leading Security Assessment Framework for Android.","pushed_at":"2016-09-01T18:41:05Z","updated_at":"2016-09-02T09:17:07Z","formatted_time":"a day ago","language":"Python","stargazers_count":718,"forks_count":314,"open_issues_count":31,"size":47779,"type":"Organization","owner":{"login":"mwrlabs","avatar_url":"https://avatars.githubusercontent.com/u/1469843?v=3","html_url":"https://github.com/mwrlabs"},"contributors":[{"login":"dbradberry","html_url":"https://github.com/dbradberry","contributions":547},{"login":"metall0id","html_url":"https://github.com/metall0id","contributions":178},{"login":"ikelos","html_url":"https://github.com/ikelos","contributions":54},{"login":"HenryHoggard","html_url":"https://github.com/HenryHoggard","contributions":35},{"login":"JosephRedfern","html_url":"https://github.com/JosephRedfern","contributions":34},{"login":"luander","html_url":"https://github.com/luander","contributions":7},{"login":"dnet","html_url":"https://github.com/dnet","contributions":5},{"login":"rvantonder","html_url":"https://github.com/rvantonder","contributions":4},{"login":"agnivesh","html_url":"https://github.com/agnivesh","contributions":3},{"login":"nezza","html_url":"https://github.com/nezza","contributions":2},{"login":"vanhoefm","html_url":"https://github.com/vanhoefm","contributions":1},{"login":"jduck","html_url":"https://github.com/jduck","contributions":1},{"login":"Droidmannn","html_url":"https://github.com/Droidmannn","contributions":1},{"login":"matt-hillman","html_url":"https://github.com/matt-hillman","contributions":1},{"login":"tombrium","html_url":"https://github.com/tombrium","contributions":1},{"login":"amarekano","html_url":"https://github.com/amarekano","contributions":1}]}]}
json
A panel resumes voting on the final draft of a new constitution for Egypt. A panel resumes voting on the final draft of a new constitution for Egypt. The current panel includes representatives from civil society, political parties, the army, police, and the Coptic church. The new constitution is expected to spark controversy, as it will strengthen the army\'s hand. It will also forbid the formation of parties based on religious grounds. Egypt\'s interim authorities suspended the previous constitution written during Mohamed Morsi\'s presidency after his removal on July 3. Egypt\' ousted president Mohamed Morsi has questioned the legality of his trial, as he appears before a court for a prison break in 2011. Egypt\' ousted president Mohamed Morsi has questioned the legality of his trial, as he appears before a court for a prison break in 2011. Morsi and 130 others are accused of the prison break during the uprising against former dictator Hosni Mubarak. The former president told the judges from inside a glass-encased metal case that he remains the country\'s legitimate leader, and that the court itself is illegal. Morsi was flown by helicopter from the Borg al-Arab prison in Alexandria. The trial has now been adjourned until February 22. His other trial on the charge of inciting violence against anti-government protests, will resume on Saturday. The case has been delayed twice since it opened in November. Egypt \'s first democratically-elected president has been implicated in two other cases. The country has been gripped by mass protests since the military toppled him in early July last year. Israeli Prime Minister Benjamin Netanyahu has once again rejected calls to extend a partial freeze on settlement activities in the occupied territories. "The prime minister has not changed his position on this issue, there is no question of extending the moratorium," an unnamed Israeli official told AFP on Friday. Last week, US President Barack Obama said it would make sense for Tel Aviv to extend the 10-month moratorium during its direct talks with the Palestinian Authority. US Secretary of State Hillary Clinton urged Tel Aviv on Thursday to extend the deadline, which expires at the end of this month. "Where we sit now it would be useful for some extension, it would be extremely useful," Clinton said. Meanwhile, the European Union said that the settlements are "illegal under international law" and called for an "extension of the moratorium decided by Israel." Acting Palestinian Authority Chief Mahmoud Abbas has threatened to leave the negotiations if Israel resumes its settlement activities in the occupied West Bank and East al-Quds (Jerusalem). According to Palestinians, Israel's insistence of the continuation of settlement projects would be a major obstacle to the establishment of a viable Palestinian state. The acting Palestinian Authority chief says direct talks with Tel Aviv will continue even if Israel resumes the construction of Jewish settlements in the West Bank. Mahmoud Abbas had repeatedly threatened to walk away from the US-sponsored direct negotiations, launched this month in Washington, if Tel Aviv does not extend its partial settlement freeze -- due to expire at the end of the month. "I cannot say I will leave the negotiations, but it's very difficult for me to resume talks if Prime Minister Netanyahu declares that he will continue his activity in the West Bank and Jerusalem,'' Abbas told a closed meeting of Jewish American leaders in New York late Tuesday, according to a transcript of the event obtained by the Associated Press. The U-turn will likely anger most Palestinians, who believe Abbas has backed away from his ultimatum under Washington pressure. During the meeting, Abbas again asked Israel to extend the freeze for several months while the two sides negotiate the final borders between Israel and a future Palestinian state. "Let's demarcate the border, at that time. Israelis will be free to build in their territory and the Palestinians the same,'' Abbas said. Despite the international calls, Tel Aviv has so far refused to renew the partial 10-month freeze on new settlement activities in the occupied Palestinian territories. Israeli settlement expansion is illegal under international laws. Iran has officially started injecting gas into hundreds of centrifuges at its Fordow nuclear plant in the fourth step away from the 2015 nuclear deal.
english
<gh_stars>0 // // Decompiled by Procyon v0.5.36 // package groovy.swing.impl; import java.awt.Component; public interface ComponentFacade { Component getComponent(); }
java
/** * Copyright &copy; 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved. */ package com.thinkgem.jeesite.modules.exam.service; import com.thinkgem.jeesite.common.persistence.Page; import com.thinkgem.jeesite.common.service.CrudService; import com.thinkgem.jeesite.modules.exam.dao.*; import com.thinkgem.jeesite.modules.exam.entity.ExamStandardBaseInfo; import com.thinkgem.jeesite.modules.exam.entity.ExamWorkflowDefine; import com.thinkgem.jeesite.modules.sys.entity.Dict; import com.thinkgem.jeesite.modules.sys.utils.UserUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.List; /** * 考评流程定义Service * @author eav.liu * @version 2019-12-09 */ @Service @Transactional(readOnly = true) public class ExamWorkflowDefineService extends CrudService<ExamWorkflowDefineDao, ExamWorkflowDefine> { @Autowired private ExamWorkflowSegmentGuanlianService examWorkflowSegmentGuanlianService; @Autowired private ExamWorkflowDefineDao examWorkflowDefineDao; @Autowired private ExamStandardBaseInfoDao examStandardBaseInfoDao; @Autowired private ExamStandardTemplateDataDao examStandardTemplateDataDao; @Autowired private ExamWorkflowSegmentsTaskService examWorkflowSegmentsTaskService; @Autowired private ExamWorkflowSegmentsTaskDao examWorkflowSegmentsTaskDao; @Autowired private ExamWorkflowSegmentGuanlianDao examWorkflowSegmentGuanlianDao; @Autowired private ExamWorkflowSegmentsDefineDao examWorkflowSegmentsDefineDao; public ExamWorkflowDefine get(String id) { return super.get(id); } public List<ExamWorkflowDefine> findList(ExamWorkflowDefine examWorkflowDefine) { /*暂时不添加 此方法目前在添加考评的时候调用,其他地方无使用 创建者混乱,目前创建者都是四个绩效办账号,添加后组干看不到*/ examWorkflowDefine.getSqlMap().put("dsf",dataScopeFilter(UserUtils.getUser(),"o","a")); return super.findList(examWorkflowDefine); } public Page<ExamWorkflowDefine> findPage(Page<ExamWorkflowDefine> page, ExamWorkflowDefine examWorkflowDefine) { /*暂时不添加 创建者混乱,目前创建者都是四个绩效办账号,添加后组干看不到*/ examWorkflowDefine.getSqlMap().put("dsf",dataScopeFilter(UserUtils.getUser(),"o","a")); return super.findPage(page, examWorkflowDefine); } @Transactional(readOnly = false) public void save(ExamWorkflowDefine examWorkflowDefine) { //flag true:添加记录 false:修改记录 Boolean flag = true; ExamWorkflowDefine old = null; if (!examWorkflowDefine.getIsNewRecord()){ old = get(examWorkflowDefine.getId()); flag = false; } //过滤掉空字符串 String[] filterArr= removeNullStringArray(examWorkflowDefine.getTemplatesIdsArr()); if(filterArr != null && filterArr.length > 0){ String arr = StringUtils.join(filterArr, ","); examWorkflowDefine.setTemplatesIds(arr); }else{ examWorkflowDefine.setTemplatesIds(null); } //添加时默认不启用 if(examWorkflowDefine.getIsNewRecord()){ examWorkflowDefine.setIsUse("0"); } super.save(examWorkflowDefine); // List<ExamWorkflowSegmentGuanlian> examWorkflowSegmentGuanlianList = new ArrayList<>(); // // /** // * 前端修改数据,设置流程类型为禁用,流程类型不允许再修改 // */ // // List<String> segments = null; // //List<ExamWorkflowSegmentGuanlian> glList = examWorkflowSegmentGuanlianDao.findByWdIdAndType(examWorkflowDefine.getId(), examWorkflowDefine.getFlowType()); // if(flag){ // segments = examWorkflowDefine.getSegments();//环节名称 // List<Integer> sorts = examWorkflowDefine.getSorts();//环节排序 // //segments 和 sorts 不需要再做非空判断 // for (int i=0 ; i < segments.size(); i++){ // ExamWorkflowSegmentGuanlian gl = new ExamWorkflowSegmentGuanlian(); // gl.setWorkflowDefineId(examWorkflowDefine.getId()); // gl.setType(examWorkflowDefine.getFlowType()); // gl.setName(segments.get(i)); // gl.setSort(sorts.get(i)); // gl.setStatus("未分配"); // //gl.setSegDefId(); // gl.setSegDefId(examWorkflowSegmentsDefineDao.findIdByTypeAndName(examWorkflowDefine.getFlowType(),segments.get(i))); // examWorkflowSegmentGuanlianService.save(gl); // examWorkflowSegmentGuanlianList.add(gl); // } // } // /**根据选择的考评流程模板向任务分配表插入数据 // * 如果是添加流程,直接向任务分配表插入数据 // * 如果是修改流程,则先判断考评流程模板是否改变,如果考评流程模板改变了,向任务分配表插入新数据,之前的数据删除作废,如果考评流程模板没改变,任务分配表不做额外数据处理 // */ // if (flag){ // //添加 // if(filterArr != null && filterArr.length >0){ // for (String str:filterArr) { // List<Map<String, Object>> list = examStandardTemplateDataDao.findWorkTypeByStandardId(str); // if (list != null && list.size() > 0){ // for (Map<String, Object> m:list) { // for (int i=0 ; i < examWorkflowSegmentGuanlianList.size(); i++){ // ExamWorkflowSegmentsTask task = new ExamWorkflowSegmentsTask(); // task.setTagType(str); // task.setWorkType(m.get("item").toString()); // task.setWorkflowId(examWorkflowDefine.getId()); // //task.setSegmentId(examWorkflowSegmentGuanlianList.get(i).getId()); // task.setSegmentId(examWorkflowSegmentGuanlianList.get(i).getSegDefId()); // if(m.get("row") != null){ // task.setRowNum(Integer.valueOf(m.get("row").toString())); // } // // examWorkflowSegmentsTaskService.save(task); // } // } // } // } // } // }else{ // //修改 // if(!old.getTemplatesIds().equals(examWorkflowDefine.getTemplatesIds())){ // if(examWorkflowDefine.getTemplatesIds() == null){ // //考评标准为空,删除该流程所有的任务分配 // examWorkflowSegmentsTaskDao.deleteAllByWorkflowId(examWorkflowDefine.getId()); // }else{ // List<ExamWorkflowSegmentGuanlian> wfGLList = examWorkflowSegmentGuanlianDao.findByWdIdAndType(examWorkflowDefine.getId(), examWorkflowDefine.getFlowType()); // for (String newStr: filterArr) { // if (!old.getTemplatesIds().contains(newStr)){ // //插入 // List<Map<String, Object>> list2 = examStandardTemplateDataDao.findWorkTypeByStandardId(newStr); // if (list2 != null && list2.size() > 0){ // for (Map<String, Object> m:list2) { // for (int i=0 ; i < wfGLList.size(); i++){ // ExamWorkflowSegmentsTask task = new ExamWorkflowSegmentsTask(); // task.setTagType(newStr); // task.setWorkType(m.get("item").toString()); // task.setWorkflowId(examWorkflowDefine.getId()); // task.setSegmentId(wfGLList.get(i).getSegDefId()); // if(m.get("row") != null){ // task.setRowNum(Integer.valueOf(m.get("row").toString())); // } // examWorkflowSegmentsTaskService.save(task); // } // } // } // } // } // //去掉的考评标准执行删除 // examWorkflowSegmentsTaskDao.deleteTask(examWorkflowDefine.getId(), Arrays.asList(filterArr)); // // //更新每个环节,人员分配状态 // for (ExamWorkflowSegmentGuanlian wfSegment:wfGLList) { // Integer sum = examWorkflowSegmentsTaskDao.findSumByWdIdAndSegId(examWorkflowDefine.getId(), wfSegment.getId()); // Integer num= examWorkflowSegmentsTaskDao.findNumByWdIdAndSegIdAndIds(examWorkflowDefine.getId(), wfSegment.getId()); // if(0 == num.intValue() ){ // wfSegment.setStatus("未分配"); // examWorkflowSegmentGuanlianService.save(wfSegment); // }else if(sum.intValue() == num.intValue() ){ // wfSegment.setStatus("分配完成"); // examWorkflowSegmentGuanlianService.save(wfSegment); // }else{ // wfSegment.setStatus("未全部分配完"); // examWorkflowSegmentGuanlianService.save(wfSegment); // } // } // // } // // } // } } @Transactional(readOnly = false) public void delete(ExamWorkflowDefine examWorkflowDefine) { super.delete(examWorkflowDefine); } /** * 删除 * @param id */ @Transactional(readOnly = false) public void deleteById(String id) { //删除主表数据 /*改为逻辑删除 防止误删时导致考评查询不出来*/ examWorkflowDefineDao.deleteById(id); //删除流程环节关联表数据 examWorkflowSegmentGuanlianDao.deleteAllByWdId(id); //删除任务分配表数据 examWorkflowSegmentsTaskDao.deleteAllByWorkflowId(id); } /** * 是否启用考评流程 * @param id */ @Transactional(readOnly = false) public void isUsable(String id, String isUse){ examWorkflowDefineDao.updateIsUse(id,isUse); } /** * 考评流程模版下拉框(启用的标准) * @return */ public List<Dict> templateFile(){ //查询启用的考核标准 List<ExamStandardBaseInfo> allList = examStandardBaseInfoDao.findStandardList(); ArrayList<Dict> list = new ArrayList<>(); if (allList != null && allList.size( )> 0){ for (ExamStandardBaseInfo e: allList) { Dict dict = new Dict(); dict.setLabel(e.getName()); dict.setValue(e.getId()); dict.setId(e.getId()); list.add(dict); } } return list; } /** 进行数据过滤 * 考评流程模版下拉框(启用的标准) * @return */ public List<Dict> templateFile(boolean isFilter){ ExamStandardBaseInfo examStandardBaseInfo= new ExamStandardBaseInfo(); if (isFilter){ examStandardBaseInfo.getSqlMap().put("dsf",alterDataScopeFilter(UserUtils.getUser(),"o","a")); } //查询启用的考核标准 List<ExamStandardBaseInfo> allList = examStandardBaseInfoDao.findStandardListFilter(examStandardBaseInfo); ArrayList<Dict> list = new ArrayList<>(); if (allList != null && allList.size( )> 0){ for (ExamStandardBaseInfo e: allList) { Dict dict = new Dict(); dict.setLabel(e.getName()); dict.setValue(e.getId()); dict.setId(e.getId()); list.add(dict); } } return list; } /** * 过滤掉String数组里面的空字符串 * @param arrayString * @return */ public String[] removeNullStringArray(String[] arrayString) { if(arrayString !=null && arrayString.length >0){ List<String> list1 = new ArrayList<String>(); for (int i=0 ;i<arrayString.length; i++) { if(arrayString[i]!=null && arrayString[i] !=""){ list1.add(arrayString[i]); } } return list1.toArray(new String[list1.size()]); } return null; } /** * 绩效自动考评 * @param examWorkflowDefine * @return */ public List<ExamWorkflowDefine> findAllInfo(ExamWorkflowDefine examWorkflowDefine){ return examWorkflowDefineDao.findAllInfo(examWorkflowDefine); } }
java
AYUSHMAN BHARAT DIWAS 2023: A significant portion of India’s population lives below the poverty line (BPL) and cannot afford expensive healthcare facilities. Ayushman Bharat Yojana is a government health scheme that provides healthcare to those who belong to the economically weaker sections in India. The scheme was initiated to move away from the selective and segmented health service approach to something that is more comprehensive. Ayushman Bharat Diwas is celebrated on April 30. - Ayushman Bharat - Pradhan Mantri Jan Arogya Yojana (AB-PMJAY), was launched on September 23, 2018, in Ranchi, Jharkhand, by Prime Minister Narendra Modi. - Through the Ayushman Bharat scheme, the government intends to provide affordable healthcare to over 50 crore beneficiaries. The services provided under the PMJAY scheme will be paperless and cashless. - Families registered under the PMJAY scheme will receive up to Rs 5 lakhs of health insurance per year. About 10. 7 crore people receive this health insurance amount for secondary and tertiary care and hospitalisation. - The PMJAY scheme falls under the Universal Health Coverage (UHC) and Sustainable Development Goals (SDG) agendas of the WHO and the UN. - Ayushman Bharat also reflects UN’s goal to “Leave No One Behind,” as a target for the 2030 Sustainable Development Goals (SDGs). - The PMJAY scheme can be accessed at designated Health and Wellness Centres (HWCs). The cost of treatment is borne by the government as per the scheme. There are 1, 59, 662 HWCs operating in the country, as of April 2023. - Apart from emergencies, trauma, maternal and child health care services, the HWCs are created to provide services related to non-communicable diseases, ENT care, palliative and rehabilitative care, and mental health. - As per the 71st Round of National Sample Survey (NSS), 85. 9% of rural households and 82% of urban households were found to have zero access to healthcare insurance. - The Pradhan Mantri Jan ArogyaYojana covers the patients’ expenses from the start of hospitalisation till the post-hospitalisation recovery process. - 30 states and union territories have signed the memorandum of understanding (MoU) with the central government to implement the Ayushman Bharat scheme.
english
{ "name": "webhook-stream", "description": "Collects webhook requests and streams them to trusted clients.", "keywords": [ "webhooks", "utility" ], "website": "http://github.com/progrium/webhook-stream", "repository": "http://github.com/progrium/webhook-stream", "success_url": "/", "env": { "BUILDPACK_URL": "https://github.com/kr/heroku-buildpack-go.git", "SECRET": { "description": "A secret.", "generator": "secret" } } }
json
ITANAGAR: The China's People's Liberation Army (PLA) has confirmed that five youths missing from Arunachal Pradesh have been found by them and their handing over process to Indian authorities is being worked out, Union minister Kiren Rijiju said on Tuesday. The five youths had gone missing on Friday from the Sino-Indian border in Upper Subansiri district of Arunachal Pradesh. "China's PLA has responded to the hotline message sent by Indian Army. They have confirmed that the missing youths from Arunachal Pradesh have been found by their side. Further modalities to handover the persons to our authority is being worked out," Rijiju tweeted. Engaged as porters and guides by the Indian army, the five youths from the Nacho area of Upper Subansiri district who were part of a seven-member group which went for hunting in a jungle were reported missing by their families through social media last Friday. China, on Monday brushed off concerns over the whereabouts of the five youths and needled India, saying it has never recognised the northeastern state which it claims is part of south Tibet. "China's position on the east sector of the China-India boundary, or Zangnan (the southern part of China's Xizang (Tibet) ), is consistent and clear," Chinese foreign ministry spokesperson Zhao Lijian told a media briefing in Beijing, adding the Chinese government has never recognized the so-called "Arunachal Pradesh". "I'm not aware of the situation you mentioned," the spokesperson said when asked about any updates about the missing Indian nationals. Rijiju took to Twitter on Sunday to say the Indian army has already sent a hotline message to the PLA establishment at the border point in Arunachal Pradesh and that a response is awaited. The development comes at a time when the Indian army has enhanced its deployment along the 3,400 km-long Line of Actual Control (LAC) in view of the tense border row between India and China in eastern Ladakh. Nacho is the last administrative circle along the McMahon line and is around 120 km from the district headquarters Daporijo. Two members from the group of youth which went for hunting in the jungle returned home and informed the families of the other five that they had been whisked away by the Chinese troops from Sera-7, an Army patrol zone located about 12 km further north of Nacho. A senior official of the Arunachal Pradesh government said on Saturday the local police has launched a probe into the matter. The Superintendent of Police (SP) of Upper Subansiri said no one has lodged any formal complaint with the police or the armed forces that guard strategic areas along the LAC. Earlier, talking about the incident, the SP Taru Gussar had said, "We came to know from local sources that five people belonging to the Tagin community were abducted by the PLA from a jungle near Nacho when they were out hunting". "We are trying to verify the fact and are in touch with the Army since there have been past instances of the PLA capturing locals from the LAC and releasing them," the SP said. Those allegedly kidnapped have been identified as Toch Singkam, Prasat Ringling, Dongtu Ebiya, Tanu Baker and Ngaru Diri. AAPSU president Hawa Bagang and General Secretary Tabom Dai in a joint statement said, "People of Arunachal Pradesh are proud Indians and staunch nationalists. We don't need any recognition or validation from a foreign entity. Instead of issuing misleading statements, the Chinese government should direct its armed forces to immediately release five civilians from Nacho area in Upper Subansiri district who, were abducted by PLA personnel recently. " The AAPSU termed the incident as "unfortunate" and called upon both the Centre and the State government to ensure early and safe release of the youths. "The recent incident is not an isolated case as time and again the PLA has been repeatedly indulging in similar antics whereby innocent civilians living at the inhospitable border areas are taken captive at regular intervals," the union said in its statement. In March, a 21-year-old man was abducted by the PLA from Asapila sector near the McMahon line. While his two friends managed to escape, Togley Sinkam was taken away at gunpoint, his family had said. After 19 days in captivity, the youth was released by the Chinese PLA.
english
package mytypes // MyString is a custom version of the `string` type. type MyString string // Len returns the length of the string. func (s MyString) Len() int { return len(s) }
go
MuscleBlaze MB Peanut Butter Creamy is primarily used for Nutritional Deficiency, Fatigue, High Cholesterol. The key ingredient of MuscleBlaze MB Peanut Butter Creamy is Peanut Butter . The properties of which have been shared below. Medicine or substances used to treat cancer by preventing the growth of cancer cells. Drugs that are used for managing excessive blood sugar levels in case of diabetes. Substances that provide nourishment to the body. Medicine or a substance that reduces lipid concentration in the blood which is beneficial in decreasing cholestrol level and preventing heart diseases. Agents that enhance protein synthesis and augments muscle growth. No side effects of MuscleBlaze MB Peanut Butter Creamy have been reported in the medical literature. However, you should always consult your doctor before using MuscleBlaze MB Peanut Butter Creamy.
english
<filename>package.json { "name": "frontend-mithril", "version": "0.0.1", "description": "Enterprise hello,world client", "main": "index.js", "scripts": { "build": "webpack src/index.js --output bin/bundle.js -p", "start": "webpack src/index.js --output bin/bundle.js -d --watch", "start:dev": "webpack-dev-server --mode=development --host=localhost --watch-content-base --open-page index.html", "test": "ospec" }, "keywords": [], "author": "<NAME>", "license": "MIT", "dependencies": { "mithril": "^1.1.6" }, "devDependencies": { "eslint": "^5.10.0", "eslint-config-prettier": "^3.3.0", "eslint-plugin-prettier": "^3.0.0", "prettier": "^1.15.3", "webpack": "^4.28.0", "webpack-cli": "^3.1.2", "webpack-dev-server": "^3.1.13" } }
json
<filename>README.md # node-router 原生nodejs路由系统 ### 克隆 clone - $ `git clone <EMAIL>:johnnynode/node-router.git --depth 1` ### 安装 install - $ `npm i` ### 运行 run - $ `node index` ### 访问 visit - $ `http://localhost:3000/`
markdown
Dinesh Mongia reminded the selectors of his first-class credentials by hammering an impressive 101 not out for the Indian Board President's XI against West Indies. Mongia, who has yet to play a Test match despite featuring heavily in India's one-day plans, rescued his side from 186 for 7 as they declared on 275 for 8. He added 56 for the eighth wicket with Amit Mishra, with three sixes and 12 fours in his 14th first-class hundred. The Board XI had been reduced to 17 for 2 during a fiery opening spell from Mervyn Dillon, who dismissed Sanjay Bangar and Gautam Gambhir in his opening five overs. Hemang Badani (67) and Mohammad Kaif steadied the ship with a 91-run partnership, but after Carl Hooper snared Kaif for 40, three more wickets fell for 20 runs, with Jermaine Lawson grabbing two of them. West Indies reached 20 for 0 in reply at stumps, with Chris Gayle not out on 14 and Wavell Hinds on 5.
english
package quina.component.error; import quina.component.ComponentConstants; import quina.component.ComponentType; import quina.http.Request; import quina.http.Response; import quina.http.server.response.AbstractResponse; import quina.http.server.response.AnyResponse; import quina.http.server.response.AnyResponseImpl; /** * エラーコンポーネント. * * ラムダ実装で、直接Routerにセットする場合は * こちらを利用します. */ @FunctionalInterface public interface ErrorComponent { /** * コンポーネントタイプを取得. * @return ComponentType コンポーネントタイプが返却されます. */ default ComponentType getType() { return ComponentType.Error; } /** * 対応HTTPメソッド定義を取得. * @return int このコンポーネントが対応するHTTPメソッド定義が返却されます. */ default int getMethod() { return ComponentConstants.HTTP_METHOD_ALL; } /** * HttpError処理を実行. * @param state HTTPステータスを設定します. * @param req HttpRequestを設定します. * @param res HttpResponseを設定します. */ default void call(int state, Request req, Response<?> res) { call(state, req, res, null); } /** * HttpError処理を実行. * @param state HTTPステータスを設定します. * @param req HttpRequestを設定します. * @param res HttpResponseを設定します. * @param e 例外を設定します. */ default void call( int state, Request req, Response<?> res, Throwable e) { // 真っ更のResponseが必要なので新規作成. res = new AnyResponseImpl(((AbstractResponse<?>)res).getElement(), ((AbstractResponse<?>)res).getSrcComponentType()); final boolean json = ((AbstractResponse<?>)res) .getSrcComponentType().isRESTful(); // json返却の場合. if(json) { // JSON返却条件を設定. res.setContentType("application/json"); } else { // HTML返却条件を設定. res.setContentType("text/html"); } // 実行処理. call(state, json, req, (AnyResponse)res, e); } /** * HttpError処理を実行. * @param state HTTPステータスを設定します. * @param json エラーが発生した呼び出しコンポーネントが * [RESFful]の場合は[true]が設定されます. * @param req HttpRequestを設定します. * @param res Responseを設定します. */ default void call( int state, boolean restful, Request req, Response<?> res) { // 真っ更のResponseが必要なので新規作成. res = new AnyResponseImpl(((AbstractResponse<?>)res).getElement(), ((AbstractResponse<?>)res).getSrcComponentType()); call(state, restful, req, (AnyResponse)res, null); } /** * HttpError処理を実行. * @param state HTTPステータスを設定します. * @param json エラーが発生した呼び出しコンポーネントが * [RESFful]の場合は[true]が設定されます. * @param req HttpRequestを設定します. * @param res AnyResponseを設定します. * @param e 例外を設定します. */ public void call(int state, boolean restful, Request req, AnyResponse res, Throwable e); }
java
package example import ( "fmt" "os" "go.m3o.com/file" ) // List files by their project and optionally a path. func ListFiles() { fileService := file.NewFileService(os.Getenv("M3O_API_TOKEN")) rsp, err := fileService.List(&file.ListRequest{ Project: "examples", }) fmt.Println(rsp, err) }
go
Kolkata, Oct 5 (IANS): West Bengal ranks second in the country with respect to the number of missing children, an NGO official said Saturday, adding that the abuse children face at home was one big reason why they run away, exposing themselves to trafficking. Additional Director Rajib K. Halder of international NGO Child In Need Institute (CINI) said the mapping of missing children in West Bengal was undertaken in collaborated with Plan International, under the Missing Child Alert project (MCA) run by Plan. The results of the mapping will be revealed Monday. "For the last three months, we have done a mapping project to know from which districts, which areas the children go missing. A spotting has being done and the biggest revelation is the failure of the safety-net at home and school. This is the biggest issue," Halder told IANS ahead of the unveiling of the report. Abuse at the hands of members of the family forces children to leave home, and many of them end up getting trafficked to Bangladesh and Nepal. This has garnered widespread attention among SAARC countries of Sri Lanka, Bhutan, India, Maldives, Nepal, Pakistan, Bangladesh and Afghanistan. "Child abuse at home has serious repercussions. NGOs are not interested, police is not interested and government is not interested. It's an issue of south Asia. Our young boys and girls are trafficked to Bangladesh and Nepal. Therefore the state loses a large number of children every year," he said. Stating that 54 percent of child sex abuse incidences occur at home, Halder said factors like poverty also play additional roles in trafficking. The strategic location of West Bengal, bordering Nepal, Bangladesh and Bhutan and also serving as a gateway to the north east, plays a crucial role in cross-border child trafficking. The project led to the identification of key districts in the state from where children go missing. "Districts are listed now and the prime districts have been identified. This will be the basis for the state government's plan of action," said Halder, adding that as many as 4,000 children go missing in one district (South 24-Parganas) in a year. Plan International is one of the oldest and largest children's development organisations in the world and runs the MCA project in Bangladesh, India and Nepal with participation from Pakistan. The mapping project has been also carried out in Uttar Pradesh, the state from which the largest number of children go missing each year, and Bihar.
english
<filename>dsgrid/tests/make_us_data_registry.py<gh_stars>1-10 import getpass import logging import os import shutil import tempfile from pathlib import Path import click from dsgrid.loggers import setup_logging, check_log_file_size from dsgrid.registry.registry_manager import RegistryManager from dsgrid.tests.common import ( create_local_test_registry, TEST_DATASET_DIRECTORY, TEST_REMOTE_REGISTRY, ) from dsgrid.utils.timing import timer_stats_collector from dsgrid.utils.files import load_data from dsgrid.tests.common import replace_dimension_uuids_from_registry, TEST_PROJECT_REPO logger = logging.getLogger(__name__) def make_test_data_registry( registry_path, src_dir, dataset_path=None, include_projects=True, include_datasets=True, offline_mode=True, ) -> RegistryManager: """Creates a local registry from a dsgrid project source directory for testing. Parameters ---------- registry_path : Path Path in which the registry will be created. src_dir : Path Path containing source config files dataset_path : Path | None If None, use "DSGRID_LOCAL_DATA_DIRECTORY" env variable. include_projects : bool If False, do not register any projects. include_datasets : bool If False, do not register any datasets. offline_mode : bool If False, use the test remote registry. """ if not include_projects and include_datasets: raise Exception("If include_datasets is True then include_projects must also be True.") if dataset_path is None: dataset_path = os.environ.get("DSGRID_LOCAL_DATA_DIRECTORY", TEST_DATASET_DIRECTORY) dataset_path = Path(dataset_path) path = create_local_test_registry(registry_path) dataset_dir = Path("datasets/sector_models/comstock") user = getpass.getuser() log_message = "Initial registration" if offline_mode: manager = RegistryManager.load(path, offline_mode=offline_mode) else: manager = RegistryManager.load( path, remote_path=TEST_REMOTE_REGISTRY, offline_mode=offline_mode ) project_config_file = src_dir / "project.toml" project_id = load_data(project_config_file)["project_id"] dataset_config_file = src_dir / dataset_dir / "dataset.toml" dataset_mapping_file = src_dir / dataset_dir / "dimension_mappings.toml" if not dataset_mapping_file.exists(): dataset_mapping_file = None dataset_id = load_data(dataset_config_file)["dataset_id"] if include_projects: print("\n 1. register project: \n") manager.project_manager.register( project_config_file, user, log_message, ) if include_datasets: print("\n 2. register dataset: \n") replace_dimension_uuids_from_registry(path, (dataset_config_file,)) manager.dataset_manager.register( dataset_config_file, dataset_path / dataset_id, user, log_message, ) print("\n 3. submit dataset to project\n") manager.project_manager.submit_dataset( project_id, dataset_id, user, log_message, dimension_mapping_file=dataset_mapping_file, ) return manager @click.command() @click.argument("registry-path", type=Path, default=f"{Path.home()}/.dsgrid-test-registry") @click.option( "-f", "--force", default=False, is_flag=True, show_default=True, help="Delete registry-path if it exists.", ) @click.option( "-p", "--project-dir", default=TEST_PROJECT_REPO, required=True, help="path to a project repository", ) @click.option( "-d", "--dataset-dir", default=TEST_DATASET_DIRECTORY, required=True, help="path to your local datasets", ) @click.option( "--verbose", is_flag=True, default=False, show_default=True, help="Enable verbose log output." ) def run(registry_path, force, project_dir, dataset_dir, verbose): """Creates a local registry from a dsgrid project source directory for testing.""" level = logging.DEBUG if verbose else logging.INFO log_file = Path("test_dsgrid_project.log") check_log_file_size(log_file, no_prompts=True) setup_logging("dsgrid", log_file, console_level=level, file_level=level, mode="a") if registry_path.exists(): if force: shutil.rmtree(registry_path) else: print(f"{registry_path} already exists. Use --force to overwrite.") os.makedirs(registry_path) tmp_project_dir = Path(tempfile.gettempdir()) / "tmp_test_project_dir" if tmp_project_dir.exists(): shutil.rmtree(tmp_project_dir) shutil.copytree(project_dir, tmp_project_dir) try: make_test_data_registry(registry_path, tmp_project_dir / "dsgrid_project", dataset_dir) finally: timer_stats_collector.log_stats() if __name__ == "__main__": run()
python
Taapsee Pannu will be seen in the role of cricketer Mithali Raj in the forthcoming biopic based on the Indian cricketer, Shabaash Mithu. The actress, who has previously featured in several sports films like Rashmi Rocket, Soorma, and even Saand Ki Aankh will be seen in the role of a cricketer in this one. However, considering that she has been a part of several sports films in the past, the actress in a recent interview with Bollywood Hungama spoke being associated so many films based on sports and how sports is like her second personality. In this exclusive interview with Bollywood Hungama, Taapsee Pannu spoke about doing back-to-back sports films. She said, “Aisa time aahi gaya tha iss film ke shooting ke dauran, jahan main bhoolne wali thi main actress hoon – back to back main itna sports ki film karne lagi hoon. Par woh isliye kyunki sports mera second character hai in life. Main bachpan se hi bahut active rahi hoon sports mein – galli wale pithu se lekar badminton, volleyball sab try kiya hai.” (Transl: There was a time during the shoot, where I almost forgot that I am an actress. I have done so many back-to-back sports films. May be that is because sports like my second character in life. I have been very active in sports since childhood. I have tried everything from galli sports like seven stones to badminton, and volleyball.) Speaking of Shabaash Mithu, the film is directed by Srijit Mukherjee and it is slated to release on July 15, 2022. Catch us for latest Bollywood News, New Bollywood Movies update, Box office collection, New Movies Release , Bollywood News Hindi, Entertainment News, Bollywood Live News Today & Upcoming Movies 2024 and stay updated with latest hindi movies only on Bollywood Hungama.
english
/* * Copyright (C) 2014 - present Instructure, Inc. * * This file is part of Canvas. * * Canvas is free software: you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License as published by the Free * Software Foundation, version 3 of the License. * * Canvas is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ const CoffeeScript = require("coffee-script"); const babylon = require("@babel/parser"); const fs = require('fs'); const scanner = require("./scanner"); const { HbsProcessor } = require("@instructure/i18nliner-handlebars"); const { AbstractProcessor, CallHelpers, Commands, config, JsProcessor, } = require("@instructure/i18nliner"); const Check = Commands.Check; // tell i18nliner's babylon how to handle `import('../foo').then` config.babylonPlugins.push('dynamicImport') config.babylonPlugins.push('optionalChaining') // tell i18nliner's babylon how to handle typescript config.babylonPlugins.push('typescript') AbstractProcessor.prototype.checkFiles = function() { const processor = this.constructor.name.replace(/Processor/, '').toLowerCase() const files = scanner.getFilesForProcessor(processor) for (const file of files) { this.checkWrapper(file, this.checkFile.bind(this)) } } JsProcessor.prototype.sourceFor = function(file) { var source = fs.readFileSync(file).toString(); var data = { source: source, skip: !source.match(/I18n\.t/) }; if (!data.skip) { if (file.match(/\.coffee$/)) { data.source = CoffeeScript.compile(source, {}); } data.ast = babylon.parse(data.source, { plugins: config.babylonPlugins, sourceType: "module" }); } return data; }; // we do the actual pre-processing in sourceFor, so just pass data straight through JsProcessor.prototype.preProcess = function(data) { return data; }; require("./scoped_hbs_pre_processor"); var ScopedESMExtractor = require("./scoped_esm_extractor"); var ScopedHbsExtractor = require("./scoped_hbs_extractor"); var ScopedTranslationHash = require("./scoped_translation_hash"); // remove path stuff we don't want in the scope var pathRegex = new RegExp( '.*(' + 'ui/shared/jst' + '|ui/features/screenreader_gradebook/jst' + '|packages/[^/]+/src/jst' + '|gems/plugins/[^/]+/app/views/jst' + ')' ) ScopedHbsExtractor.prototype.normalizePath = function(path) { return path.replace(pathRegex, "").replace(/^([^\/]+\/)templates\//, '$1'); }; var GenerateJs = require("./generate_js"); Commands.Generate_js = GenerateJs; // swap out the defaults for our scope-aware varieties Check.prototype.TranslationHash = ScopedTranslationHash; JsProcessor.prototype.I18nJsExtractor = ScopedESMExtractor; HbsProcessor.prototype.Extractor = ScopedHbsExtractor; CallHelpers.keyPattern = /^\#?\w+(\.\w+)+$/ // handle our absolute keys module.exports = { Commands, scanner };
javascript
Although the of Kenshin's life depicted in this OVA series was also detailed in the original manga, the TV series "Rurouni Kenshin: Meiji kenkaku roman tan" (1996) never mentions Tomoe by name, but Hiko does mention ask Kenshin about his lost love at Tomoe's gravesite in Kyoto. The only characters aside from Kenshin that also appear in the TV series are Saito, Hiko, and assumedly Shishio (the unnamed assassin). The story of Rurouni Kenshin is a highly fictionalized tale very loosely based on real historical figures. The region 1 DVDs published by ADV include some brief notes that connect the anime characters to their historical counterparts. Among the events in this OVA is included a fictionalized version of what became known as the Ikeda-ya affair, when in June 1864 the Shinsengumi in Kyoto raided an inn, the Ikeda-ya, being used as a hideout by pro-Imperial fighters. The alleged plot was to destroy Kyoto through fire, hopefully destabilizing the Shogunate's grip on the people. In the end, the Shinsengumi broke up the ring, killing 8, wounding another 4, and arresting 20. This made them national heroes in the eyes of the Tokugawa bakufu.
english
const fork = require('child_process').fork; let config; try { config = require("./config"); } catch (error) { console.error("config", error) } let CLUSTER = []; function getArgumentsFromConfig(cfg) { let argv = []; Object.entries(cfg).map((set, index) => { argv.push(`-${set[0]}`, set[1]); }); return argv }; let con = _ => console.log(_.substring(0, _.length - 1).toString()); let down = (n, i) => { CLUSTER.splice(CLUSTER.findIndex(v => n === v), 1); console.log(`[x][${i}] Отключен от кластера. Работает ${CLUSTER.length} из ${config.accounts.length}`) }; let reload = (oldNode, params) => { let i = CLUSTER.findIndex(node => oldNode === node); oldNode.kill(); CLUSTER.push(launchNode(i + 1, params)); console.log(`[^][${i + 1}] Нода перезапущена.`) }; function run() { if (config != null && config.accounts !== undefined && Array.isArray(config.accounts)) { config.accounts = config.accounts.filter(v => v != null && v !== "" && typeof v == "object"); config.accounts.forEach((config, i) => { let params = getArgumentsFromConfig(config); if (params != null) { setTimeout(() => { CLUSTER.push(launchNode(i + 1, params)); }, 100 * i); } else console.log("Не найден токен") }); console.log(`Запущено ${CLUSTER.length} из ${config.accounts.length}`) } else { console.log(`Ошибка запуска кластера`) } } run(); function launchNode(localId, params) { const node = fork("./index.js", params, { stdio: ['pipe', 'pipe', 'pipe', 'ipc'], }); node.stdout.on('data', _ => con(`[${new Date().toLocaleTimeString()}][${localId}]` + _.toString())); node.on('close', () => down(node, localId)); node.on('exit', (code, signal) => { if (code === 7) { reload(node, params) } }); return node; } const readline = require('readline'); const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); setInterval(() => { CLUSTER.forEach(v => v.kill()); let r = () => { if (CLUSTER.length === 0) run(); else setTimeout(() => r(), 500) }; r(); }, 1000 * 60 * 60 * 1.5); rl.on('line', (l) => { switch (l.trim()) { case "reload": CLUSTER.forEach(v => v.kill()); let r = () => { if (CLUSTER.length === 0) run(); else setTimeout(() => r(), 500) }; r(); break; case "stop": process.exit(); break; default: break; } });
javascript
<gh_stars>0 {"ast":null,"code":"var _jsxFileName = \"C:\\\\Users\\\\Pedro\\\\Downloads\\\\Anderson\\\\Rocketseat\\\\OmniStack 8.0\\\\Projeto\\\\frontend\\\\src\\\\routes.js\";\nimport React from 'react';\nimport { BrowserRouter, Route } from 'react-router-dom';\nimport Login from './pages/Login';\nimport Main from './pages/Main';\nexport default function Routes() {\n return React.createElement(BrowserRouter, {\n __source: {\n fileName: _jsxFileName,\n lineNumber: 8\n },\n __self: this\n }, React.createElement(Route, {\n path: \"/\",\n exact: true,\n component: Login,\n __source: {\n fileName: _jsxFileName,\n lineNumber: 9\n },\n __self: this\n }), React.createElement(Route, {\n path: \"/dev/:id\",\n component: Main,\n __source: {\n fileName: _jsxFileName,\n lineNumber: 10\n },\n __self: this\n }));\n} //BrowserRouter = Rota do navegador, roteamento no browser\n//Route = rota\n//Quando exporta uma função ela vira um componente. Serve para isolar um pedaço da aplicação, uma parte visual, ou alguma coisa que precisa repetir o funcionamento. Função que retorna um html.\n//Retorna conteúdo HTML ou JSX (conteúdo html dentro do JavaScript)\n//Precisa ter 1 Route por página\n//path=\"/\" significa que o usuário está na raiz, não possui nenhuma rota informada na barra de endereços\n//Por padrão o react-router-dom não verifica se o caminho no navegador é exatamente igual a (\"/\"). Verifica se o caminho\n//começa com (\"/\") ai sempre vai chamar a primeira rota (Login). Para corrigir o erro usa exact na primeira rota\n//Rota main vai receber um parâmetro na rota que é o id do usuário logado. (/dev/:id).","map":{"version":3,"sources":["C:\\Users\\Pedro\\Downloads\\Anderson\\Rocketseat\\OmniStack 8.0\\Projeto\\frontend\\src\\routes.js"],"names":["React","BrowserRouter","Route","Login","Main","Routes"],"mappings":";AAAA,OAAOA,KAAP,MAAkB,OAAlB;AACA,SAASC,aAAT,EAAwBC,KAAxB,QAAqC,kBAArC;AACA,OAAOC,KAAP,MAAkB,eAAlB;AACA,OAAOC,IAAP,MAAiB,cAAjB;AAEA,eAAe,SAASC,MAAT,GAAkB;AAC7B,SACI,oBAAC,aAAD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KACI,oBAAC,KAAD;AAAO,IAAA,IAAI,EAAC,GAAZ;AAAgB,IAAA,KAAK,MAArB;AAAsB,IAAA,SAAS,EAAEF,KAAjC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IADJ,EAEI,oBAAC,KAAD;AAAO,IAAA,IAAI,EAAC,UAAZ;AAAuB,IAAA,SAAS,EAAEC,IAAlC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAFJ,CADJ;AAMH,C,CAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","sourcesContent":["import React from 'react';\r\nimport { BrowserRouter, Route } from 'react-router-dom';\r\nimport Login from './pages/Login';\r\nimport Main from './pages/Main';\r\n\r\nexport default function Routes() {\r\n return (\r\n <BrowserRouter>\r\n <Route path=\"/\" exact component={Login} />\r\n <Route path=\"/dev/:id\" component={Main} />\r\n </BrowserRouter>\r\n );\r\n}\r\n\r\n//BrowserRouter = Rota do navegador, roteamento no browser\r\n//Route = rota\r\n//Quando exporta uma função ela vira um componente. Serve para isolar um pedaço da aplicação, uma parte visual, ou alguma coisa que precisa repetir o funcionamento. Função que retorna um html.\r\n//Retorna conteúdo HTML ou JSX (conteúdo html dentro do JavaScript)\r\n//Precisa ter 1 Route por página\r\n//path=\"/\" significa que o usuário está na raiz, não possui nenhuma rota informada na barra de endereços\r\n//Por padrão o react-router-dom não verifica se o caminho no navegador é exatamente igual a (\"/\"). Verifica se o caminho\r\n//começa com (\"/\") ai sempre vai chamar a primeira rota (Login). Para corrigir o erro usa exact na primeira rota\r\n//Rota main vai receber um parâmetro na rota que é o id do usuário logado. (/dev/:id).\r\n\r\n"]},"metadata":{},"sourceType":"module"}
json
<reponame>StoreConnect/sensorthings-api-model<gh_stars>1-10 { "@iot.id": 1, "@iot.selfLink": "http://example.org/v1.0/HistoricalLocations(1)", "<EMAIL>": "HistoricalLocations(1)/Locations", "<EMAIL>": "HistoricalLocations(1)/Thing", "time": "2015-01-25T12:00:00-07:00" }
json
<gh_stars>1-10 import * as React from 'react'; import { FormControl, FormLabel, FormHelperText, Select, } from '@chakra-ui/react'; import type { Product } from '@/shared/types/query.types'; import { useApi } from '@/shared/services/api'; import { ORGANIZATIONS_LIST, ORGANIZATIONS_OWN_UNITS_LIST, } from '@/shared/queries/organization-queries'; interface OrganizationSelectProps { data: Product; } const OrganizationSelect: React.FC<OrganizationSelectProps> = ({ data }) => { const [organization, setOrganization] = React.useState<string>( data.organization?.id ?? '' ); const [organizationUnit, setOrganizationUnit] = React.useState<string>( data.organizationUnit?.id ?? '' ); // Events const onOrganizationChange = React.useCallback( (event: React.ChangeEvent<HTMLSelectElement>) => { setOrganization(event.target.value); }, [setOrganization] ); const onOrganizationUnitChange = React.useCallback( (event: React.ChangeEvent<HTMLSelectElement>) => { setOrganizationUnit(event.target.value); }, [setOrganizationUnit] ); // Queries const organizationsQuery = useApi( 'organizations', { query: ORGANIZATIONS_LIST, }, { suspense: false } ); // Org unit grab the org and loop child org units const organizationUnitsQuery = useApi( ['organizationUnits', organization], { query: ORGANIZATIONS_OWN_UNITS_LIST, variables: { id: organization, }, }, { suspense: false, enabled: !!organization } ); return ( <> <FormControl id="product-organization"> <FormLabel>Organization</FormLabel> <Select isLoading={organizationsQuery.isLoading} onChange={onOrganizationChange} name="organization" variant="bc-input" value={organization} data-testid="prd-edit-org-dd" > <option value="">Select an Organization</option> {organizationsQuery.data?.allOrganizations.map((org) => ( <option key={org.id} value={org.id}> {org.title} </option> ))} </Select> <FormHelperText> Which organization does this product belong to? </FormHelperText> </FormControl> <FormControl id="product-organization-unit"> <FormLabel>Organization Unit</FormLabel> <Select name="organizationUnit" variant="bc-input" isDisabled={!organization} onChange={onOrganizationUnitChange} value={organizationUnit} > <option value="">Select an Organization Unit</option> {organizationUnitsQuery.data?.Organization?.orgUnits.map((unit) => ( <option key={unit.id} value={unit.id}> {unit.title} </option> ))} </Select> </FormControl> </> ); }; export default OrganizationSelect;
typescript
<filename>_posts/2020-10-13-DE_TIL141.md --- layout: post title: "Airflow에서 Task 병렬실행을 위한 localexecutor 셋팅" tags: [Data Engineering] comments: true --- . Data_Engineering_TIL(20201013) #### # 참고자료 1) Airflow에서 Task 병렬처리를 위한 환경설정을 셋팅하는 실습 김영현님이 작성한 ‘Airflow를 이용한 데이터 Workflow 관리’ 자료를 참고함 2) ‘Airflow 구조와 execution_date 이해하기’ 블로그 글 url : https://bomwo.cc/posts/execution_date/ 3) Airflow에서 Task 병렬실행을 위한 celeryexecutor 셋팅 url : https://minman2115.github.io/DE_TIL137/ 4) ‘[data] airflow 설치(DB: mysql)’ 블로그글 url : https://m.blog.naver.com/varkiry05/222018641877 #### # airflow executor executor는 worker라고보 불리며 조건에 따라 여러가지 executor를 제공하고 있다. sequentialexector(default) 1) task 순차처리함 2) sqlite를 backend로 설정 3) 아주 심플한 test용도로만 권장 localexecutor 1) task 병렬 처리 가능 2) mysql이나 postgresql을 backend로 설정 3) task마다 subprocess를 생성한다. celeryexecutor 1) task를 여러 서버(node)에 분산 처리 가능 (cluster) 2) celery backend (rabbitmq, redis, …) 설정이 필요함 #### # 실습내용 ** 실습환경 : Amazon linux AMI 2 step 1) Airflow 설치 먼저 아래와 같이 airflow와 그것에 의존하는 프로그램들을 설치하자 ```python [ec2-user@ip-10-1-10-239 ~]$ sudo yum update -y [ec2-user@ip-10-1-10-239 ~]$ sudo yum install python3 -y [ec2-user@ip-10-1-10-239 ~]$ sudo yum install gcc python3-devel -y [ec2-user@ip-10-1-10-239 ~]$ sudo pip3 install apache-airflow [ec2-user@ip-10-1-10-239 ~]$ sudo yum install mysql-devel -y [ec2-user@ip-10-1-10-239 ~]$ sudo pip3 install 'apache-airflow[mysql]' [ec2-user@ip-10-1-10-239 ~]$ sudo pip3 install 'apache-airflow[celery]' [ec2-user@ip-10-1-10-239 ~]$ sudo pip3 install boto3 [ec2-user@ip-10-1-10-239 ~]$ aws configure AWS Access Key ID [None]: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx AWS Secret Access Key [None]: yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy Default region name [None]: ap-northeast-2 Default output format [None]: json # Meta DB 구동 [ec2-user@ip-10-1-10-239 ~]$ airflow initdb ``` step 2) Airflow의 MetaDB로 활용할 MySQL 설치 ```python [ec2-user@ip-10-1-10-239 ~]$ sudo yum install https://dev.mysql.com/get/mysql80-community-release-el7-3.noarch.rpm [ec2-user@ip-10-1-10-239 ~]$ sudo yum install mysql-community-server -y [ec2-user@ip-10-1-10-239 ~]$ sudo systemctl enable --now mysqld [ec2-user@ip-10-1-10-239 ~]$ systemctl status mysqld ● mysqld.service - MySQL Server Loaded: loaded (/usr/lib/systemd/system/mysqld.service; enabled; vendor preset: disabled) Active: active (running) since Wed 2020-10-07 07:28:55 UTC; 8s ago Docs: man:mysqld(8) http://dev.mysql.com/doc/refman/en/using-systemd.html Process: 11832 ExecStartPre=/usr/bin/mysqld_pre_systemd (code=exited, status=0/SUCCESS) Main PID: 11906 (mysqld) Status: "Server is operational" CGroup: /system.slice/mysqld.service └─11906 /usr/sbin/mysqld Oct 07 07:28:50 ip-10-1-10-81.ap-northeast-2.compute.internal systemd[1]: Starting MySQL Server... Oct 07 07:28:55 ip-10-1-10-81.ap-northeast-2.compute.internal systemd[1]: Started MySQL Server. ``` Airflow에서 설치한 MySQL을 사용할 수 있도록 몇가지 config 설정을 아래와 같이 해준다. 슈퍼유저인 root @ localhost 가 initial password로 세팅되어 error log file에 저장되어 있기 때문에 그거를 확인하기 위해 다음과 같은 명령어를 실행한다. ```python [ec2-user@ip-10-1-10-239 ~]$ sudo grep 'temporary password' /var/log/mysqld.log 2020-10-07T07:28:52.228023Z 6 [Note] [MY-010454] [Server] A temporary password is generated for root@localhost: F=u3q.9hf!>d ``` 위에서 확인한 임시 루트비번을 갖고 MySQL 서버에서 최초 installation 설정을 해준다. ```python [ec2-user@ip-10-1-10-239 ~]$ sudo mysql_secure_installation -p'F=u3q.9hf!>d' mysql_secure_installation: [Warning] Using a password on the command line interface can be insecure. Securing the MySQL server deployment. The existing password for the user account root has expired. Please set a new password. New password: Re-enter new password: The 'validate_password' component is installed on the server. The subsequent steps will run with the existing configuration of the component. Using existing password for root. Estimated strength of the password: 100 Change the password for root ? ((Press y|Y for Yes, any other key for No) : ... skipping. By default, a MySQL installation has an anonymous user, allowing anyone to log into MySQL without having to have a user account created for them. This is intended only for testing, and to make the installation go a bit smoother. You should remove them before moving into a production environment. Remove anonymous users? (Press y|Y for Yes, any other key for No) : y Success. Normally, root should only be allowed to connect from 'localhost'. This ensures that someone cannot guess at the root password from the network. Disallow root login remotely? (Press y|Y for Yes, any other key for No) : ... skipping. By default, MySQL comes with a database named 'test' that anyone can access. This is also intended only for testing, and should be removed before moving into a production environment. Remove test database and access to it? (Press y|Y for Yes, any other key for No) : y - Dropping test database... Success. - Removing privileges on test database... Success. Reloading the privilege tables will ensure that all changes made so far will take effect immediately. Reload privilege tables now? (Press y|Y for Yes, any other key for No) : y Success. All done! ``` mysql에 접속해서 airflow에서 사용할 database와 계정을 생성한다. root 계정으로 접속해서 아래와 같이 database와 계정을 생성한다(이름, 비밀번호는 직접 설정) ```python [ec2-user@ip-10-1-10-239 ~]$ mysql -uroot -p Enter password: Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 14 Server version: 8.0.21 MySQL Community Server - GPL Copyright (c) 2000, 2020, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> create database airflow; Query OK, 1 row affected (0.01 sec) mysql> create user 'airflow'@'localhost' identified by 'MyNewStrongP@ssw0d!'; Query OK, 0 rows affected (0.00 sec) mysql> ALTER USER 'airflow'@'localhost' IDENTIFIED WITH mysql_native_password BY '<PASSWORD>!'; Query OK, 0 rows affected (0.00 sec) mysql> grant all privileges on airflow.* to 'airflow'@'localhost'; Query OK, 0 rows affected (0.00 sec) mysql> create user 'airflow'@'%' identified by 'MyNewStrongP@ssw0d!'; Query OK, 0 rows affected (0.01 sec) mysql> ALTER USER 'airflow'@'%' IDENTIFIED WITH mysql_native_password BY '<PASSWORD>!'; Query OK, 0 rows affected (0.00 sec) mysql> grant all privileges on airflow.* to 'airflow'@'%'; Query OK, 0 rows affected (0.00 sec) mysql> flush privileges; Query OK, 0 rows affected (0.01 sec) mysql> exit; Bye ``` 그런 다음에 mysql의 my.cnf 파일을 수정해줘야 한다. 안그러면 airflow initdb 했을때 ‘Exception: Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql’ Error가 발생할 것이다. ```python [ec2-user@ip-10-1-10-23 airflow]$ sudo vim /etc/my.cnf # 하단에 아래 내용을 추가 explicit_defaults_for_timestamp = 1 max_allowed_packet = 30M [ec2-user@ip-10-1-10-23 airflow]$ sudo systemctl restart mysqld [ec2-user@ip-10-1-10-23 airflow]$ sudo systemctl status mysqld ● mysqld.service - MySQL Server Loaded: loaded (/usr/lib/systemd/system/mysqld.service; enabled; vendor preset: disabled) Active: active (running) since Thu 2020-10-08 07:36:23 UTC; 22s ago Docs: man:mysqld(8) http://dev.mysql.com/doc/refman/en/using-systemd.html Process: 10133 ExecStartPre=/usr/bin/mysqld_pre_systemd (code=exited, status=0/SUCCESS) Main PID: 10157 (mysqld) Status: "Server is operational" CGroup: /system.slice/mysqld.service └─10157 /usr/sbin/mysqld Oct 08 07:36:22 ip-10-1-10-23.ap-northeast-2.compute.internal systemd[1]: Starting MySQL Server... Oct 08 07:36:23 ip-10-1-10-23.ap-northeast-2.compute.internal systemd[1]: Started MySQL Server. ``` 그 다음에 airflow에서 airflow.cfg를 다음과 같이 수정해줘야 한다. ```python [ec2-user@ip-10-1-10-23 ~]$ cd ~/airflow [ec2-user@ip-10-1-10-23 airflow]$ ls airflow.cfg airflow.db airflow-webserver.pid logs unittests.cfg [ec2-user@ip-10-1-10-23 airflow]$ sudo vim airflow.cfg # executor를 아래와 같이 LocalExecutor로 변경해준다. executor = LocalExecutor # sql_alchemy_conn = mysql://[ID]:[PASSWORD]@[IP]:3306/airflow sql_alchemy_conn = mysql://airflow:MyNewStrongP@ssw0d!@localhost:3306/airflow # 만약 샘플데이터를 원하지 않는다면 airflow.cfg 파일 내 다음 옵션도 False로 변경해야한다 load_examples = False ``` airflow metadb를 아래와 같이 재구동한다. ```python [ec2-user@ip-10-1-10-224 airflow]$ airflow initdb DB: mysql://airflow:***@localhost:3306/airflow [2020-10-08 09:24:48,497] {db.py:378} INFO - Creating tables INFO [alembic.runtime.migration] Context impl MySQLImpl. INFO [alembic.runtime.migration] Will assume non-transactional DDL. INFO [alembic.runtime.migration] Running upgrade -> e3a246e0dc1, current schema INFO [alembic.runtime.migration] Running upgrade e3a246e0dc1 -> 1507a7289a2f, create is_encrypted INFO [alembic.runtime.migration] Running upgrade 1507a7289a2f -> 13eb55f81627, maintain history for compatibility with earlier migrations INFO [alembic.runtime.migration] Running upgrade 13eb55f81627 -> 338e90f54d61, More logging into task_instance INFO [alembic.runtime.migration] Running upgrade 338e90f54d61 -> 52d714495f0, job_id indices INFO [alembic.runtime.migration] Running upgrade 52d714495f0 -> 502898887f84, Adding extra to Log INFO [alembic.runtime.migration] Running upgrade 502898887f84 -> 1b38cef5b76e, add dagrun INFO [alembic.runtime.migration] Running upgrade 1b38cef5b76e -> 2e541a1dcfed, task_duration INFO [alembic.runtime.migration] Running upgrade 2e541a1dcfed -> 40e67319e3a9, dagrun_config INFO [alembic.runtime.migration] Running upgrade 40e67319e3a9 -> 561833c1c74b, add password column to user INFO [alembic.runtime.migration] Running upgrade 561833c1c74b -> 4446e08588, dagrun start end INFO [alembic.runtime.migration] Running upgrade 4446e08588 -> bbc73705a13e, Add notification_sent column to sla_miss INFO [alembic.runtime.migration] Running upgrade bbc73705a13e -> bba5a7cfc896, Add a column to track the encryption state of the 'Extra' field in connection INFO [alembic.runtime.migration] Running upgrade bba5a7cfc896 -> 1968acfc09e3, add is_encrypted column to variable table INFO [alembic.runtime.migration] Running upgrade 1968acfc09e3 -> 2e82aab8ef20, rename user table INFO [alembic.runtime.migration] Running upgrade 2e82aab8ef20 -> 211e584da130, add TI state index INFO [alembic.runtime.migration] Running upgrade 211e584da130 -> 64de9cddf6c9, add task fails journal table INFO [alembic.runtime.migration] Running upgrade 64de9cddf6c9 -> f2ca10b85618, add dag_stats table INFO [alembic.runtime.migration] Running upgrade f2ca10b85618 -> 4addfa1236f1, Add fractional seconds to mysql tables INFO [alembic.runtime.migration] Running upgrade 4addfa1236f1 -> 8504051e801b, xcom dag task indices INFO [alembic.runtime.migration] Running upgrade 8504051e801b -> 5e7d17757c7a, add pid field to TaskInstance INFO [alembic.runtime.migration] Running upgrade 5e7d17757c7a -> 127d2bf2dfa7, Add dag_id/state index on dag_run table INFO [alembic.runtime.migration] Running upgrade 127d2bf2dfa7 -> cc1e65623dc7, add max tries column to task instance INFO [alembic.runtime.migration] Running upgrade cc1e65623dc7 -> bdaa763e6c56, Make xcom value column a large binary INFO [alembic.runtime.migration] Running upgrade bdaa763e6c56 -> 947454bf1dff, add ti job_id index INFO [alembic.runtime.migration] Running upgrade 947454bf1dff -> d2ae31099d61, Increase text size for MySQL (not relevant for other DBs' text types) INFO [alembic.runtime.migration] Running upgrade d2ae31099d61 -> 0e2a74e0fc9f, Add time zone awareness INFO [alembic.runtime.migration] Running upgrade d2ae31099d61 -> 33ae817a1ff4, kubernetes_resource_checkpointing INFO [alembic.runtime.migration] Running upgrade 33ae817a1ff4 -> 27c6a30d7c24, kubernetes_resource_checkpointing INFO [alembic.runtime.migration] Running upgrade 27c6a30d7c24 -> 86770d1215c0, add kubernetes scheduler uniqueness INFO [alembic.runtime.migration] Running upgrade 86770d1215c0, 0e2a74e0fc9f -> 05f30312d566, merge heads INFO [alembic.runtime.migration] Running upgrade 05f30312d566 -> f23433877c24, fix mysql not null constraint INFO [alembic.runtime.migration] Running upgrade f23433877c24 -> 856955da8476, fix sqlite foreign key INFO [alembic.runtime.migration] Running upgrade 856955da8476 -> 9635ae0956e7, index-faskfail INFO [alembic.runtime.migration] Running upgrade 9635ae0956e7 -> dd25f486b8ea, add idx_log_dag INFO [alembic.runtime.migration] Running upgrade dd25f486b8ea -> bf00311e1990, add index to taskinstance INFO [alembic.runtime.migration] Running upgrade 9635ae0956e7 -> 0a2a5b66e19d, add task_reschedule table INFO [alembic.runtime.migration] Running upgrade 0a2a5b66e19d, bf00311e1990 -> 03bc53e68815, merge_heads_2 INFO [alembic.runtime.migration] Running upgrade 03bc53e68815 -> 41f5f12752f8, add superuser field INFO [alembic.runtime.migration] Running upgrade 41f5f12752f8 -> c8ffec048a3b, add fields to dag INFO [alembic.runtime.migration] Running upgrade c8ffec048a3b -> dd4ecb8fbee3, Add schedule interval to dag INFO [alembic.runtime.migration] Running upgrade dd4ecb8fbee3 -> 939bb1e647c8, task reschedule fk on cascade delete INFO [alembic.runtime.migration] Running upgrade 939bb1e647c8 -> 6e96a59344a4, Make TaskInstance.pool not nullable INFO [alembic.runtime.migration] Running upgrade 6e96a59344a4 -> d38e04c12aa2, add serialized_dag table Revision ID: xxxxxxxxxxxxxxxxxxxxxxxxx Revises: <PASSWORD> Create Date: 2019-08-01 14:39:35.616417 INFO [alembic.runtime.migration] Running upgrade d38e04c12aa2 -> b3b105409875, add root_dag_id to DAG INFO [alembic.runtime.migration] Running upgrade 6e96a59344a4 -> 74effc47d867, change datetime to datetime2(6) on MSSQL tables INFO [alembic.runtime.migration] Running upgrade 939bb1e647c8 -> 004c1210f153, increase queue name size limit INFO [alembic.runtime.migration] Running upgrade c8ffec048a3b -> a56c9515abdc, Remove dag_stat table INFO [alembic.runtime.migration] Running upgrade a56c9515abdc, 004c1210f153, 74effc47d867, b3b105409875 -> 08364691d074, Merge the four heads back together INFO [alembic.runtime.migration] Running upgrade 08364691d074 -> fe461863935f, increase_length_for_connection_password INFO [alembic.runtime.migration] Running upgrade fe461863935f -> 7939bcff74ba, Add DagTags table INFO [alembic.runtime.migration] Running upgrade 7939bcff74ba -> a4c2fd67d16b, add pool_slots field to task_instance INFO [alembic.runtime.migration] Running upgrade a4c2fd67d16b -> 852ae6c715af, Add RenderedTaskInstanceFields table INFO [alembic.runtime.migration] Running upgrade 852ae6c715af -> 952da73b5eff, add dag_code table INFO [alembic.runtime.migration] Running upgrade 952da73b5eff -> a66efa278eea, Add Precision to execution_date in RenderedTaskInstanceFields table INFO [alembic.runtime.migration] Running upgrade a66efa278eea -> da3f683c3a5a, Add dag_hash Column to serialized_dag table WARNI [airflow.models.crypto] cryptography not found - values will not be stored encrypted. Done. ``` xcom으로 전달되는 데이터의 크기 확장을 위해 컬럼 타입을 아래와 같이 airflow metadb에 접속해서 변경한다. ```python [ec2-user@ip-10-1-10-17 airflow]$ mysql -uroot -p Enter password: ## <PASSWORD>! 로 접속 Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 9 Server version: 8.0.21 MySQL Community Server - GPL Copyright (c) 2000, 2020, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> alter table airflow.xcom modify value LONGBLOB; Query OK, 0 rows affected (0.05 sec) Records: 0 Duplicates: 0 Warnings: 0 mysql> exit Bye ``` 그런 다음에 airflow 서버와 스케쥴러도 구동한다. ```python [ec2-user@ip-10-1-10-4 airflow]$ airflow webserver -p 8080 ____________ _____________ ____ |__( )_________ __/__ /________ __ ____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / / ___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ / _/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/ [2020-10-13 00:33:29,928] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:29,928] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags Running the Gunicorn Server with: Workers: 4 sync Host: 0.0.0.0:8080 Timeout: 120 Logfiles: - - ================================================================= [2020-10-13 00:33:30 +0000] [12247] [INFO] Starting gunicorn 20.0.4 [2020-10-13 00:33:30 +0000] [12247] [INFO] Listening at: http://0.0.0.0:8080 (12247) [2020-10-13 00:33:30 +0000] [12247] [INFO] Using worker: sync [2020-10-13 00:33:30 +0000] [12250] [INFO] Booting worker with pid: 12250 [2020-10-13 00:33:30 +0000] [12251] [INFO] Booting worker with pid: 12251 [2020-10-13 00:33:30 +0000] [12252] [INFO] Booting worker with pid: 12252 [2020-10-13 00:33:30 +0000] [12253] [INFO] Booting worker with pid: 12253 [2020-10-13 00:33:31,606] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:31,616] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags [2020-10-13 00:33:31,653] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:31,660] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags [2020-10-13 00:33:31,804] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:31,809] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags [2020-10-13 00:33:31,913] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:31,913] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags [2020-10-13 00:34:00 +0000] [12247] [INFO] Handling signal: ttin [2020-10-13 00:34:00 +0000] [12509] [INFO] Booting worker with pid: 12509 [2020-10-13 00:34:00,752] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:34:00,752] {dagbag.py:417} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags [2020-10-13 00:34:01 +0000] [12247] [INFO] Handling signal: ttou [2020-10-13 00:34:01 +0000] [12250] [INFO] Worker exiting (pid: 12250) # 새로 터미널 창을 하나 띄워서 아래와 같이 스케쥴러를 구동한다. [ec2-user@ip-10-1-10-4 ~]$ airflow scheduler ____________ _____________ ____ |__( )_________ __/__ /________ __ ____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / / ___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ / _/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/ [2020-10-13 00:33:51,618] {__init__.py:50} INFO - Using executor LocalExecutor [2020-10-13 00:33:51,626] {scheduler_job.py:1367} INFO - Starting the scheduler [2020-10-13 00:33:51,626] {scheduler_job.py:1375} INFO - Running execute loop for -1 seconds [2020-10-13 00:33:51,626] {scheduler_job.py:1376} INFO - Processing each file at most -1 times [2020-10-13 00:33:51,626] {scheduler_job.py:1379} INFO - Searching for files in /home/ec2-user/airflow/dags [2020-10-13 00:33:51,627] {scheduler_job.py:1381} INFO - There are 0 files in /home/ec2-user/airflow/dags [2020-10-13 00:33:51,733] {scheduler_job.py:1438} INFO - Resetting orphaned tasks for active dag runs [2020-10-13 00:33:51,751] {dag_processing.py:562} INFO - Launched DagFileProcessorManager with pid: 12435 [2020-10-13 00:33:51,791] {settings.py:55} INFO - Configured default timezone <Timezone [UTC]> ```
markdown
"""Multi-subject source localization with MNE.""" from . import group_model, inverse, utils from .group_model import compute_fwd, prepare_fwds from .inverse import compute_group_inverse from ._version import __version__ __all__ = ["group_model", "inverse", "compute_group_inverse", "compute_fwd", "prepare_fwds", "__version__", "utils"]
python
import React from "react" import PropTypes from "prop-types" import styled from "styled-components" import { GridContainer } from "../layout" import { ArchivePost } from "./components" import { responsiveBreakpointDown } from "../../utils" /** * PostArchive component */ export const PostArchive = ({ posts, categoryType = "" }) => { const isRecipe = categoryType === "Recipe" return ( <GridContainer> <PostsWrapper> {posts.map(post => ( <ArchivePost key={post._id} post={post} isRecipe={isRecipe} /> ))} </PostsWrapper> </GridContainer> ) } /** * PropTypes */ PostArchive.propTypes = { posts: PropTypes.arrayOf(PropTypes.object.isRequired).isRequired, categoryType: PropTypes.string, } /** * Styles */ const PostsWrapper = styled.div` display: flex; flex-wrap: wrap; width: 100%; margin-bottom: -80px; ${responsiveBreakpointDown("laptop", `justify-content: space-between;`)} ${responsiveBreakpointDown("mobile", `margin-bottom: -60px;`)} > * { margin-bottom: 80px; ${responsiveBreakpointDown("mobile", `margin-bottom: 60px;`)} &:not(:nth-child(3n)) { margin-right: 80px; ${responsiveBreakpointDown("desktop", `margin-right: 60px;`)} ${responsiveBreakpointDown("laptop", `margin-right: 0;`)} } &:last-child { margin-right: 0 !important; } } `
javascript
<filename>server/test/api.test.js const rauth = require('request-promise').defaults({json: true, baseUrl: 'http://localhost:3000'}); const rapi = require('request-promise').defaults({json: true, baseUrl: 'http://localhost:3000/api'}); function authenticateAsUser () { return rauth.post('/auth', { body: {username: "user", password: "password"} }) .then(data => data.token); } function authenticateAsAdmin () { return rauth.post('/auth', { body: {username: "user", password: "<PASSWORD>"} }) .then(data => data.token); } describe('/auth', () => { it('Authenticates user properly! Test data is user:password', (done) => { rauth.post('/auth', { body: { username: "user", password: "password" } }).then(res => { done(); }) .catch(err => done(err)); }); }); describe('/api/reg', () => { it('Returns users registered by administrator with id 42230 from 2018-01-25 to 2018-01-30', done => { const ids = 42230, from = '2018-01-25', to = '2018-01-30'; const dateFrom = new Date(2018, 0, 25), dateTo = new Date(2018, 0, 30); authenticateAsUser().then(token => { const url = `/reg?ids=${ids}&from=${from}&to=${to}`; rapi.get(url, {headers: {'Authorization': token}}) .then(data => { console.log(data.data[0].count); if (data.length == 0) throw new Error('Registered clients not found! It should return a lot of data!'); // const invalidDates = data.map(client => client.registrationDate).filter(date => date > dateTo || date < dateFrom); // if (invalidDates.length > 0) throw new Error('Returned dates are not in the requested range! ' + url); else done(); }) .catch(err => done(err)); }) }) }) describe('/api/users', () => { describe('GET', () => { it('Should return users array without password.', (done) => { authenticateAsUser().then(token => { rapi.get('/users', {headers: {'Authorization': token}}) .then(data => data.users) .then(users => { if (users.length) return users; else throw new Error('Users data empty!'); }) .then(users => users.filter(user => user.password)) .then(users => !users.length ? done() : done(new Error('Returned user data has includes password!'))) .catch(err => done(err)); }) }) }) })
javascript
To live in Bangalore, one of the most bustling and happening cities of India, is truly a boon. Working all week and unwinding during the weekend by taking a stroll in the gardens galore, shopping at malls, pub-hopping or attending a music concert are some of the most common things done by the working class. But sometimes craving for an escape into the beauty of nature is only natural while living in a big city like Bangalore. This craving got to my nerves so my friends and I decided to head to Vattakanal, a mystical town that is located about 7 km from Kodaikanal in Tamil Nadu. Since Kodaikanal is a popular tourist destination, we decided to explore its neighbouring, and supposedly offbeat, town of Vattakanal. We planned the trip on a long weekend during monsoon, which is usually the peak time for tourists to visit places such as Kodaikanal. In order to reach Vattakanal, one must pass right through Kodaikanal, there isn't any other way to reach the town. So we packed our bags and left on a Friday night. The drive of 465 km took us almost 10 hours, so we reached Kodaikanal at around 9 am. The route from Kodaikanal to Vattakanal is quite narrow and tends to face a major traffic jam even if a single vehicle breaks down. And this was precisely the only misfortune we faced during the trip, since it took us nearly 3 hours to complete the last leg of our travel! After the strenuous journey, we reached our cottage which was perched on a magnificent altitude that allowed us to get the most breathtaking view of the Western Ghats; a view that immediately refreshed us from the tiring journey. At first, we could only see mist covering everything that surrounded our cottage, almost as if we were living in the clouds! But after an hour or so, the thick mist cleared off to give us a mesmerising view of the Western Ghats, covered in lush greenery at certain parts. Since the journey tired us down quite a bit, we decided to just revel in the panoramic view and reconnect on the first day. After a good night's sleep, we woke up to an even more enchanting view of the sun rising from the mountains, while the sky changed its colour from a dull grey, to a vibrant pink, orange and then finally lit all of the town. To start off the day with a hearty breakfast, we headed to the much-famed Altaf's Cafe that was located just 5 minutes from our cottage. Surprisingly crowded with what looked like all of Vattakanal's tourists, the cafe lived up to the fame since it had delicious food of all sorts of cuisine. From here, we walked towards the beautiful Vattakanal Falls that was also situated at a walkable distance from our cottage. The falls came down flowing in a steps manner, so, it was easy to enjoy the falls with groups of visitors occupying different steps. Another place of interest in Vattakanal is the Dolphin's Nose, which is a peak perched at a glorious altitude of 6600 feet, a popular trekking destination. It offers a beautiful view of the entire landscape of mountains and more. The trek can take about 3-4 hours to be completed, both ways, and since we were short on time and pretty content with the view from our cottage, we decided against going on the trek. We returned to our cottage, spent more time talking with each other and on our way back we stopped at the stunning Kodai Lake, where boating, kayaking and other such fun water sports are available. We stopped for some time, cherished its beauty and then finally headed back to Bangalore on the night of day 2. For a relaxed getaway, hit the road and visit Vattakanal! Get best deals on travel, receive travel tips and travel stories instantly Subscribe to Nativeplanet.
english
<reponame>robertons/moipy # -*- coding: utf-8 -*- import sys sys.path.append("/Users/robertoneves/Projetos/moippy") import moippy import json def main(arg): moippy.Moip('e0dc7c54ce4e4b53988525f32168ad79_v2', 'b769e7ec74b7417281463ca162060b0d', '348d2089063e4126b067b8907a2f49d5_v2', sandbox=True, debug=True) banco = moippy.BankAccount( id="BKA-YDZA6XBNCOOW", bankNumber = "237", agencyNumber = "12345", agencyCheckNumber = "0", accountNumber = "12345678", accountCheckNumber = "7", type = "SAVING", holder= { "taxDocument": { "type": "CPF", "number": "622.134.533-22" }, "fullname": "<NAME>"} ) banco.Update() print(banco.toJSON()) if __name__ == "__main__": main(sys.argv)
python
<filename>Data/Ministere_des_finances/Communiques_de_presse/Communiques_2015-10-16_3456.json {"id": 3456, "url": "http://proxy-pubminefi.diffusion.finances.gouv.fr/pub/document/18/19886.pdf", "author": "<NAME>", "title": "NOTE AUX REDACTIONS - <NAME> inaugurera l'institut des Métiers et de l'Artisanat du Pays de Meaux (77) lundi 19 octobre à 11H30", "text": " \n \n \n<NAME> \n \nSECRETAIRE D’ETAT CHARGEE DU COMMERCE, DE L’ARTISANAT, DE LA CONSOMMATION ET DE L’ECONOMIE \nSOCIALE ET SOLIDAIRE, AUPRES DU MINISTRE DE L’ECONOMIE, DE L’INDUSTRIE ET DU NUMERIQUE \n \nN o t e a u x r é d a c t i o n s \nN o t e a u x r é d a c t i o n s \nwww.economie.gouv.fr \n@MartinePinville \nParis, le 16 octobre 2015 \nN° 921 \n \n<NAME> inaugurera \nl’institut des Métiers et de l’Artisanat \ndu Pays de Meaux \n \n- Lundi 19 octobre à 11h30 - \nParc d’activités – 2, rue Ir<NAME> \nChauconin-Neufmontiers (77) \n \n<NAME>, Secrétaire d’Etat chargée du Commerce, de l’Artisanat, de la Consommation et \nde l’Economie sociale et solidaire inaugurera l’Institut des Métiers et de l’Artisanat (IMA) du Pays \nde Meaux, lundi 19 octobre à 12h. \n \nL’IMA du Pays de Meaux est un établissement qui s’inscrit au sein de l’Université régionale des \nmétiers et de l’artisanat (URMA) d’Ile de France et rassemble 15 sites de formation continue et 6 \ncentres de formation d’apprentis. Ce centre permettra à 1 600 apprenants en alternance de se \nformer dans plusieurs secteurs : l’alimentation, l’automobile, l’électricité/électrotechnique, le \ncommerce et la beauté. \n \nA cette occasion, <NAME> rappellera l’engagement du gouvernement dans une politique \ndéterminée et ambitieuse en matière d’apprentissage ainsi que les dispositifs pour le soutenir. En \neffet, avec près de 70 % des apprentis qui trouvent un emploi durable à la fin de leur formation, \nl’apprentissage est un des chemins les plus sûrs pour trouver un emploi. \n \nDéroulé : \n \n11h30 \nDécoupe du ruban et dévoilement de la plaque \n \n11h45 \nVisite de l’institut \n \n12h15 \nDiscours d’inauguration \n \n12h50 \nPoint presse \n \n \n \n \n \nAccréditations auprès de la Préfecture de Seine-et-Marne : \n01 64 71 75 95 ou 75 29 – <EMAIL> \n \n \n \nContacts presse cabinet de <NAME> : \n<NAME> et <NAME> - 01 53 18 44 13 - <EMAIL>inets.finances.gouv.fr \n", "published_date": "2015-10-16", "section": "Communiques"}
json
Indian captain Virat Kohli was honored by the Madame Tussauds Wax museum in Dubai as they unveiled his wax statue on Monday (October 18). His statue is wearing the nostalgic 1992 World Cup design jersey that the Indian team has been rocking since November of 2020 when it was first revealed by the new kit makers of the Team India, MPL sports. However, this isn’t Kohli’s first wax statue at the renowned wax museum. While Madame Tussauds unveiled Kohli’s first wax statue in 2018 in the Delhi museum, his second wax statue was out in England during the 2019 World Cup in England. Virat Kohli is by far the best batter in the modern era despite the presence of the likes of Steve Smith, Kane Williamson, and Joe Root. While Smith and Root, and Williamson have done amazingly well in Tests and ODI formats of the game, Kohli is the only batter in the modern era to average more than 50 in all three formats of international cricket. The 32-year-old has multiple statutes at the Madame Tussauds museum. Meanwhile, the latest statue shows Kohli’s in India’s navy blue jersey. The official kit makers of the Indian team have unveiled a new look jersey for the T20 World Cup 2021, which India donned in their first warmup match against England recently. The talismanic batter has announced that he will step down as India’s T20I skipper after the competition, he would like to end his stint on a high note. India won the tournament in its inaugural edition in 2007 in South Africa and made it into the finals in 2014 as well in Bangladesh. Kohli will look to continue his golden run in the tournament having won the Player of the Tournament in the last two editions of the T20 World Cup in 2014 and 2016. The Men in Blue will kick-start their campaign against arch-rivals Pakistan at the Dubai International Cricket Stadium on October 24. The Men in Blue have a perfect record against their arch-rivals in T20 World Cups and 50-over World Cups as the two sides have locked horns in five T20 World Cup matches and India emerged winners every single time.
english
Chennai: Popular Malayalam actress Lissy Lakshmi on Friday confirmed that her marriage with filmmaker Priyadarshan has officially ended. She says that the procedure a "real ordeal," and now she's relieved. "My marriage with Priyadarshan officially ended today. We both have signed the final papers at the Family Court in Chennai. This has been a real ordeal," Lissy said in a statement. Priyadarshan married Lissy in 1990 and soon after Lissy gave up acting. They had filed for divorce in March. "In recent times, all celebrity divorces, from Hrithik (Roshan) and Sussanne to Dilip and Manju (Warrier) to most recently Amala (Paul) and Vijay all have been mutually agreed divorces. I am sure it also must have been painful to those couples but whatever differences they may have had, they all decided to respect each other," she said. However, she says her relationship was the only exception. "It was often fierce and uncivilised battle on and off the courts until a compromise was reached at the Madras High Court. Perhaps the ugliness of our divorce proceedings says all about the kind of marriage we have had. Anyway, I feel relieved now," she added. Lissy feels this is the end of a long and often very difficult road, and it's time to move on. (Except for the headline, this story has not been edited by NDTV staff and is published from a syndicated feed. )
english
Fortnite leakers aren't always correct about what they find while data mining. Their history of getting predictions right gives fans false hope when they leak information that is not accurate. Leaked skins often get Fortnite players foaming at the mouth, only to have those skins never see the light of day. During the release of Avengers: Infinity War, players could briefly control Thanos in Fortnite if they took out the player currently controlling him. This led fans to believe that the Mad Titan will eventually be released as a playable skin in the future. Even as recently as Fortnite Chapter 2 Season 5, there were rumors about Thanos' return with the inclusion of a new XL skin in the game's files. Thanos is still unavailable for players to obtain. Right before IT: Chapter 2 released in theaters, red balloons started popping up over storm drains in Fortnite. When popped, these balloons let out an evil laugh. The buzz about Pennywise coming to Fortnite was strong, but the killer clown never made an appearance. All the Marvel heroes added to Fortnite over time got fans to think that they would see the friendly neighborhood Spiderman join the fray. An achievement called "Maximum Carnage" that appeared last Season made players think the web-slinger and his nemesis, Carnage, would be added into the game. This never materialized. During Chapter 2 Season 5, there was a lot of buzz around the Tomb Raider franchise's female star appearing in Fortnite. While there are certainly many female hunters, like Sarah Connor, released over the course of Season 5, Lara Croft was not one of them. A skin fans have been clamoring about in Fortnite for ages comes from the Dragonball anime series. In his various Super Saiyan and Ultra Instinct forms, Son Goku is the subject of countless fan-made images, YouTube images, and more. While Epic has never given Fortnite players a reason to believe this would actually happen, players still hope it will happen. The Battle Bus is heading into Fortnite Chapter 4 Season 4 soon! Check out the final Fortnite item shop today!
english
We know that God is able to heal us and can heal us but so often we don’t know how to pray specifically for our healing or that for a loved one. In this 3-day plan, pray with us to Jesus for specific physical, emotional, and mental healing. That our bodies and minds would be restored. That the Lord would release the fullness of health and healing to you, your friends, and family.
english
{ "resourceType" : "Encounter", "identifier<list::Identifier>$(Identifier(s) by which this encounter is known$)": null, "status<code=planned|arrived|triaged|in-progress|onleave|finished|cancelled>$(R! planned | arrived | triaged | in-progress | onleave | finished | cancelled +$)": null, "statusHistory<list>$(List of past encounter statuses$)": [{ "status<code=planned|arrived|triaged|in-progress|onleave|finished|cancelled>$(R! planned | arrived | triaged | in-progress | onleave | finished | cancelled +$)": null, "period<Period>$(R! The time that the episode was in the specified status$)": null }], "class<Coding>$(inpatient | outpatient | ambulatory | emergency +$)": null, "classHistory<list>$(List of past encounter classes$)": [{ "class<Coding>$(R! inpatient | outpatient | ambulatory | emergency +$)": null, "period<Period>$(R! The time that the episode was in the specified class$)": null }], "type<list::CodeableConcept>$(Specific type of encounter$)": null, "priority<CodeableConcept>$(Indicates the urgency of the encounter$)": null, "subject<Reference(Patient|Group)>$(The patient ro group present at the encounter$)": null, "episodeOfCare<list::Reference(EpisodeOfCare)>$(Episode(s) of care that this encounter should be recorded against$)": null, "incomingReferral<list::Reference(ReferralRequest)>$(The ReferralRequest that initiated this encounter$)": null, "participant<list>$(List of participants involved in the encounter$)": [{ "type<list::CodeableConcept>$(Role of participant in encounter$)": null, "period<Period>$(Period of time during the encounter that the participant participated$)": null, "individual<Reference(Practitioner|RelatedPerson)>$(Persons involved in the encounter other than the patient$)": null }], "appointment<Reference(Appointment)>$(The appointment that scheduled this encounter$)": null, "period<Period>$(The start and end time of the encounter$)": null, "length<Duration>$(Quantity of time the encounter lasted (less time absent)$)": null, "reason<list::CodeableConcept>$(Reason the encounter takes place (code)$)": null, "diagnosis<list>$(The list of diagnosis relevant to this encounter$)": [{ "condition<Reference(Condition|Procedure)>$(R! Reason the encounter takes place (resource)$)": null, "role<CodeableConcept>$(Role that this diagnosis has within the encounter (e.g. admission, billing, discharge …)$)": null, "rank<positiveInt>$(Ranking of the diagnosis (for each role type)$)": null }], "account<list::Reference(Account)>$(The set of accounts that may be used for billing for this Encounter$)": null, "hospitalization" : { "preAdmissionIdentifier<Identifier>$(Pre-admission identifier$)": null, "origin<Reference(Location)>$(The location from which the patient came before admission$)": null, "admitSource<CodeableConcept>$(From where patient was admitted (physician referral, transfer)$)": null, "reAdmission<CodeableConcept>$(The type of hospital re-admission that has occurred (if any). If the value is absent, then this is not identified as a readmission$)": null, "dietPreference<list::CodeableConcept>$(Diet preferences reported by the patient$)": null, "specialCourtesy<list::CodeableConcept>$(Special courtesies (VIP, board member)$)": null, "specialArrangement<list::CodeableConcept>$(Wheelchair, translator, stretcher, etc.$)": null, "destination<Reference(Location)>$(Location to which the patient is discharged$)": null, "dischargeDisposition<CodeableConcept>$(Category or kind of location after discharge$)": null }, "location<list>$(List of locations where the patient has been$)": [{ "location<Reference(Location)>$(R! Location the encounter takes place$)": null, "status<code>$(planned | active | reserved | completed$)": null, "period<Period>$(Time period during which the patient was present at the location$)": null }], "serviceProvider<Reference(Organization)>$(The custodian organization of this Encounter record$)": null, "partOf<Reference(Encounter)>$(Another Encounter this encounter is part of$)": null }
json
Tuesday April 07, 2020, A team at the Indian Institute of Technology (IIT) has developed a low-cost LED-based machine which can be used for disinfection of floors at hospitals, as well as buses and trains, to aid the fight against COVID-19 in the country. IIT Guwahati is also filing a patent for the machine, which when commercialised, will be available at a cost of Rs 1,000. The current prototype of the machine requires to be manually manned, however, the team is working on making it a robot-manned machine which requires limited human intervention. The machine developed at the request from the Karnataka government to be used in its hospitals and buses will now be pitched to other governments for commercial usage. "Sanitisation of public places is going to be very crucial in the coming months when movements will not be restricted, but due precautions will need to be taken to prevent the spread of the virus. As of now, the disinfection measures are only being taken on surfaces, walls, and open areas, and there is no system yet for sanitisation of floors unless someone decides to manually mop the floor using an alcohol-based cleaner," Senthilmurugan Subbiah, Associate Professor at the institute's Department of Chemical Engineering, told PTI. "UVC system is a proven technology to sanitise the micro-organism infected non-porous surface. Ninety percent killing rate can be achieved by UVC for one of the highly stable viruses MS-2 Coliphase with 186 J dose, whereas 36 J dose is needed for Influenza virus which is similar to COVID-19. The team has developed a UVC LED system capable of providing 400 J dose in 30 seconds, such that virus-infected surface will be sanitised. The unique design of this UVC system will ensure uniform exposure in the virus-infected non-porous area," he added. The machine, which has been tested at the institute's laboratory, is also equipped with an object movement identification feature so that UVC exposure to human skin is avoided during the operation. "We are working closely with government agencies and industrial partners to develop several other smart and low-cost technologies to fight against COVID-19," Subbiah said. The death toll due to novel coronavirus rose to 114 and the number of cases in the country climbed to 4,421 on Tuesday, according to the Union Health Ministry. While the number of active COVID-19 cases stood at 3,981, as many as 325 people were cured and discharged, and one had migrated, it stated. The total number of cases include 66 foreign nationals. (Edited by Suman Singh)
english
<reponame>dangra/scrapy-sci {"origin": "http://wallbase.cc/wallpaper/178108", "download_link": "http://wallpapers.wallbase.cc/rozne/wallpaper-178108.jpg", "views": 5970, "x_resolution": 2048, "filetype": "jpg", "site": "wallbase.cc", "y_resolution": 1536, "colors": ["#060709", "#34424c", "#f5f7f8", "#7e8385", "#c3cccd"], "comments": [""], "favorites": 25, "descriptors": ["cars", "sports", "Lamborghini", "vehicles", "supercars", "black background"], "uploader": " 4chan"}
json
{"title": "PDT 2.0 Requirements on a Query Language.", "fields": ["object query language", "rdf query language", "web search query", "sargable", "data control language"], "abstract": "Linguistically annotated treebanks play an essential part in the modern computational linguistics. The more complex the treebanks become, the more sophisticated tools are required for using them, namely for searching in the data. We study linguistic phenomena annotated in the Prague Dependency Treebank 2.0 and create a list of requirements these phenomena set on a search tool, especially on its query language.", "citation": "Citations (7)", "year": "2008", "departments": ["Charles University in Prague"], "conf": "acl", "authors": ["<NAME>00edrovsk\u00fd.....http://dblp.org/pers/hd/m/M=iacute=rovsk=yacute=:Jir=iacute="], "pages": 9}
json
<gh_stars>1-10 {"ast":null,"code":"import { useMemo, useContext } from 'react';\nimport { MotionContext } from './index.js';\nimport { getCurrentTreeVariants } from './utils.js';\n\nfunction useCreateMotionContext(props, isStatic) {\n var _a = getCurrentTreeVariants(props, useContext(MotionContext)),\n initial = _a.initial,\n animate = _a.animate;\n\n return useMemo(function () {\n return {\n initial: initial,\n animate: animate\n };\n },\n /**\n * Only break memoisation in static mode\n */\n isStatic ? [variantLabelsAsDependency(initial), variantLabelsAsDependency(animate)] : []);\n}\n\nfunction variantLabelsAsDependency(prop) {\n return Array.isArray(prop) ? prop.join(\" \") : prop;\n}\n\nexport { useCreateMotionContext };","map":null,"metadata":{},"sourceType":"module"}
json
<gh_stars>0 [ { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/notification_action_background.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/696067750e22afe271b0815aac60d926/core-1.0.1/res/drawable-v21/notification_action_background.xml" }, { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/abc_dialog_material_background.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/b0643dbf53780483c46d3d0638e66f40/appcompat-1.0.2/res/drawable-v21/abc_dialog_material_background.xml" }, { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/abc_btn_colored_material.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/b0643dbf53780483c46d3d0638e66f40/appcompat-1.0.2/res/drawable-v21/abc_btn_colored_material.xml" }, { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/abc_action_bar_item_background_material.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/b0643dbf53780483c46d3d0638e66f40/appcompat-1.0.2/res/drawable-v21/abc_action_bar_item_background_material.xml" }, { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/abc_edit_text_material.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/b0643dbf53780483c46d3d0638e66f40/appcompat-1.0.2/res/drawable-v21/abc_edit_text_material.xml" }, { "merged": "/Users/khiem/Desktop/Ibid/node_modules/react-native-i18n/android/build/intermediates/res/merged/release/drawable-v21/abc_list_divider_material.xml", "source": "/Users/khiem/.gradle/caches/transforms-2/files-2.1/b0643dbf53780483c46d3d0638e66f40/appcompat-1.0.2/res/drawable-v21/abc_list_divider_material.xml" } ]
json
{ "computername": "Nom d'ordinateur", "architecture": "Architecture", "dstudio-auto-disable": "Désactivation auto", "dstudio-auto-reset-workflow": "Initialisation Auto Workflow", "dstudio-auto-started-workflow": "Démarrage Auto Workflow", "dstudio-bootcamp-windows-computer-name": "Nom Windows Boot Camp", "dstudio-completed": "Complété", "dstudio-disabled": "Désactivé", "dstudio-failed": "Echoué", "dstudio-group": "Groupe", "dstudio-host-ard-field-1": "ARD Info 1", "dstudio-host-ard-field-2": "ARD Info 2", "dstudio-host-ard-field-3": "ARD Info 3", "dstudio-host-ard-field-4": "ARD Info 4", "dstudio-host-ard-ignore-empty-fields": "Ignorer les champs ARD vides", "dstudio-host-delete-other-locations": "Supprimer les autres emplacements", "dstudio-host-model-identifier": "Identifiant du modèle", "dstudio-hostname": "Nom de l'hôte", "dstudio-host-new-network-location": "Nouvel emplacement réseau", "dstudio-host-serial-number": "Numéro de série", "dstudio-host-type": "Type d'hôte", "dstudio-last-workflow": "Dernier workflow exécuté", "dstudio-last-workflow-duration": "Durée du dernier workflow", "dstudio-last-workflow-execution-date": "Date d'exécution du dernier workflow", "dstudio-last-workflow-status": "Etat Workflow", "dstudio-mac-addr": "Adresse MAC", "recheck": "Données DeployStudio", "report": "Rapport DeployStudio", "title": "DeployStudio" }
json
<filename>@types/jdk/javax.sql.PooledConnection.d.ts declare namespace javax { namespace sql { interface PooledConnection { getConnection(): java.sql.Connection close(): void addConnectionEventListener(arg0: javax.sql.ConnectionEventListener): void removeConnectionEventListener(arg0: javax.sql.ConnectionEventListener): void addStatementEventListener(arg0: javax.sql.StatementEventListener): void removeStatementEventListener(arg0: javax.sql.StatementEventListener): void } } }
typescript
<filename>demands-csv/src/assets/translatedKeys.json { "owner": "Nome", "email": "E-mail", "tid": "TID", "type": "Peça", "size": "Tamanho", "amount": "Quantidade", "moletom": "Moletom", "camisa1": "Camiseta Oficial", "camisa2": "Camiseta (Listras)", "corta1": "Corta Vento (Degradê)", "corta2": "Corta Vento (Preto)", "calca": "Calça" }
json
For most of us,a classic is a book that stays with us over the years,giving us an inkling of who we are,and who we are not. But it isnt always easy to find a classic of our own,not thrust on us from someplace else. Let me explain. I was 19 years old,and for four years I had been indoctrinated in the honour list of classics. As proof,I had just received a degree in English Literature. My teachers had done their best,but I was left with the unhappy conclusion that the real classics lived in a distant place. Between their home and mine stood insurmountable geographical obstacles. The makers of the classical canon,the pundits of Great Literature,never looked like us,spoke like us,or ate things we did. What they wrote became classics because they were European,mostly English. (The Americans then got only an endnote or two. ) So in 1974,emerging from the portals of Bombay University,I had to acknowledge a subversive suspicion. In the real world,literary life did not begin with Chaucer or end with T. S. Eliot. My friends and I took to ransacking the Lost and Found bookstalls of this real world,and read what we could find of that other,unmentionable baggage of classics. Thanks to Macaulay and Co. ,we had to look for English translations. The Thousand and One Nights,for example,came back to us via Burton or the Penguin Classics. (We didnt know then that in a later India,our lost and found classics would be under threat once more from a gallery of chauvinistic rogues. ) But in those days of innocent discovery,I also stumbled on books in English by non-English authors. For instance,I came across a strange and wonderful novel first published in 1948,All About H. Hatterr by G. V. Desani. All About H. Hatterr is a quicksilver-tongued autobiographical of an Anglo-Indian seeking wisdom from the seven sages of India. The language swells and flows in torrents,but its originality is tempered with a carefully designed structure. If Hatterr,the eponymous hero,takes the reader on a wild roller-coaster ride,the entire holus-bolus has a Desani in masterful control. When I first read Hatterr,I immediately knew it was an important book. But a classic? I mean,is it allowed? Can a classic be so funny,make a fine art of standing Classical Language on its venerable English head? Can a classic be written by a fifty-fifty,starring a hybrid hero,cooking up a dish of khichdi,the eclectic,nourishing,do-it-yourself subcontinental stew? The answer to all these questions turned out to be Yes. And as a result,Desanis novel managed to get the visa required for writers with strange names to travel in the English-speaking world. Now maybe we could give them a few new classics,and change the direction of the traffic.
english
Hajj 2023: Plateau pilgrims 'stranded' over 'failure to remit funds' the aircraft windscreen was hit by lightning. Sudanese army suspends US and Saudi-brokered ceasefire talks after accusing paramilitary foes of failing to honour commitments, resulting in artillery blasts in Khartoum. France coach Didier Deschamps has named Ousmane Dembele and Christopher Nkunku in the squad for two Euro 2024 qualifiers in June. They missed March's matches due to fitness problems. Philippine lawmakers approved the creation of an $8.9 billion sovereign wealth fund on Wednesday to boost growth and cut poverty, but critics insisted it was a “scam” and should be scrapped.
english
<reponame>nstarke/raspberrypi4-bootloader-analysis #!/usr/bin/env python3 import sys TARGET_FILE = sys.argv[1] DICT_FILE = sys.argv[2] target = open(TARGET_FILE, 'r') d = open(DICT_FILE, 'r').readlines() # allow list these d.append("usb") d.append("pi") d.append("bcm") # these are long enough they don't need to be allowlisted d.append(".bin") d.append(".sig") d.append(".upd") d.append("tftp") d.append("dhcp") allowlist = [ 'bcm', 'pi', 'usb' ] result = [] for t_line in target.readlines(): for d_line in d: if len(d_line.strip()) < 4 and d_line not in allowlist: continue if d_line.lower() in t_line.lower(): print(t_line) result.append(t_line) break target.close() open(TARGET_FILE, 'w').write("\n".join(result))
python
package io.github.darealturtywurty.turtybot.commands.moderation; import java.time.Instant; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import io.github.darealturtywurty.turtybot.commands.core.CommandCategory; import io.github.darealturtywurty.turtybot.commands.core.CoreCommandContext; import io.github.darealturtywurty.turtybot.commands.core.GuildCommand; import io.github.darealturtywurty.turtybot.commands.core.RegisterBotCmd; import io.github.darealturtywurty.turtybot.util.Constants; import io.github.darealturtywurty.turtybot.util.core.WarnUtils; import net.dv8tion.jda.api.EmbedBuilder; import net.dv8tion.jda.api.entities.Member; import net.dv8tion.jda.api.entities.User; import net.dv8tion.jda.api.interactions.commands.OptionMapping; import net.dv8tion.jda.api.interactions.commands.OptionType; import net.dv8tion.jda.api.interactions.commands.build.OptionData; @RegisterBotCmd public class WarningsCommand implements GuildCommand { @Override public CommandCategory getCategory() { return CommandCategory.MODERATION; } @Override public String getDescription() { return "Lists all the warnings for a user."; } @Override public String getName() { return "warnings"; } @Override public List<OptionData> getOptions() { return List.of(new OptionData(OptionType.USER, "user", "User to get warnings for", false)); } @Override public void handle(final CoreCommandContext ctx) { final var guild = ctx.getGuild(); final OptionMapping userOption = ctx.getEvent().getOption("user"); User toGetUser = null; Member toGetMember = null; if (userOption == null) { toGetUser = ctx.getAuthor(); toGetMember = ctx.getMember(); } else { toGetUser = userOption.getAsUser(); toGetMember = userOption.getAsMember(); } final var userWarns = WarnUtils.getUserWarns(guild, toGetUser); final var warnsEmbed = new EmbedBuilder() .setColor(toGetMember != null ? toGetMember.getColorRaw() : 0xFFFFFF) .setTitle("Warnings for: " + (toGetMember != null ? toGetMember.getEffectiveName() : toGetUser.getName())) .setDescription(toGetMember != null ? toGetMember.getEffectiveName() + " has " + userWarns.getNumberWarns() + " warnings!" : toGetUser.getName() + " has " + userWarns.getNumberWarns() + " warnings!") .setTimestamp(Instant.now()); final var counter = new AtomicInteger(1); userWarns.warns.forEach((uuid, warnInfo) -> warnsEmbed.addField(counter.getAndIncrement() + ".", "**UUID:** " + uuid.toString() + "\n**Warned By (ID):** " + warnInfo.left + "\n**Date:** " + Constants.DATE_FORMAT.format(warnInfo.middle) + "\n**Reason:** " + warnInfo.right, false)); ctx.getEvent().deferReply().addEmbeds(warnsEmbed.build()).mentionRepliedUser(false).queue(); } @Override public boolean productionReady() { return true; } }
java
#include "ovum/test.h" #include "ovum/node.h" #include "ovum/module.h" #include <cmath> #define EGG_VM_MAGIC_BYTE(byte) byte, #define MAGIC EGG_VM_MAGIC(EGG_VM_MAGIC_BYTE) using namespace egg::ovum; namespace { void expectFailureFromMemory(const uint8_t memory[], size_t bytes, const char* needle) { egg::test::Allocator allocator; ASSERT_THROW_E(ModuleFactory::fromMemory(allocator, "<memory>", memory, memory + bytes), std::runtime_error, ASSERT_STARTSWITH(e.what(), needle)); } void toModuleArray(ModuleBuilder& builder, const Nodes& avalues, Module& out) { // Create a module that just constructs an array of values auto array = builder.createValueArray(avalues); ASSERT_NE(nullptr, array); auto block = builder.createNode(OPCODE_BLOCK, std::move(array)); ASSERT_NE(nullptr, block); auto module = builder.createModule(std::move(block)); ASSERT_NE(nullptr, module); out = ModuleFactory::fromRootNode(builder.allocator, "<resource>", *module); } void toModuleMemoryArray(ModuleBuilder& builder, const Nodes& avalues, std::ostream& out) { // Create a module memory image that just constructs an array of values Module module; toModuleArray(builder, avalues, module); ModuleFactory::toBinaryStream(*module, out); } void fromModuleArray(const Module& in, Node& avalue) { // Extract an array of values from a module ASSERT_NE(nullptr, in); Node root{ &in->getRootNode() }; ASSERT_EQ(OPCODE_MODULE, root->getOpcode()); ASSERT_EQ(1u, root->getChildren()); Node child{ &root->getChild(0) }; ASSERT_EQ(OPCODE_BLOCK, child->getOpcode()); ASSERT_EQ(1u, child->getChildren()); avalue.set(&child->getChild(0)); ASSERT_EQ(OPCODE_AVALUE, avalue->getOpcode()); } void fromModuleMemoryArray(IAllocator& allocator, std::istream& in, Node& avalue) { // Extract an array of values from a module memory image in.clear(); ASSERT_TRUE(in.seekg(0).good()); auto module = ModuleFactory::fromBinaryStream(allocator, "<memory>", in); fromModuleArray(module, avalue); } Node roundTripArray(ModuleBuilder& builder, const Nodes& avalues) { // Create a module memory image and then extract the array values std::stringstream ss; toModuleMemoryArray(builder, avalues, ss); Node avalue; fromModuleMemoryArray(builder.allocator, ss, avalue); return avalue; } } TEST(TestModule, ChildrenFromMachineByte) { ASSERT_EQ(0u, Module::childrenFromMachineByte(0)); ASSERT_EQ(1u, Module::childrenFromMachineByte(1)); ASSERT_EQ(2u, Module::childrenFromMachineByte(2)); ASSERT_EQ(3u, Module::childrenFromMachineByte(3)); ASSERT_EQ(4u, Module::childrenFromMachineByte(4)); ASSERT_EQ(SIZE_MAX, Module::childrenFromMachineByte(5)); ASSERT_EQ(0u, Module::childrenFromMachineByte(6)); ASSERT_EQ(4u, Module::childrenFromMachineByte(250)); ASSERT_EQ(SIZE_MAX, Module::childrenFromMachineByte(251)); ASSERT_EQ(0u, Module::childrenFromMachineByte(252)); ASSERT_EQ(1u, Module::childrenFromMachineByte(253)); ASSERT_EQ(2u, Module::childrenFromMachineByte(254)); ASSERT_EQ(3u, Module::childrenFromMachineByte(255)); } TEST(TestModule, OpcodeFromMachineByte) { // Taken from egg-notes.xlsx ASSERT_EQ(OPCODE_END, Module::opcodeFromMachineByte(0)); ASSERT_EQ(OPCODE_UNARY, Module::opcodeFromMachineByte(1)); ASSERT_EQ(OPCODE_BINARY, Module::opcodeFromMachineByte(2)); ASSERT_EQ(OPCODE_TERNARY, Module::opcodeFromMachineByte(3)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(4)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(5)); ASSERT_EQ(OPCODE_IVALUE, Module::opcodeFromMachineByte(6)); ASSERT_EQ(OPCODE_META, Module::opcodeFromMachineByte(7)); ASSERT_EQ(OPCODE_COMPARE, Module::opcodeFromMachineByte(8)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(9)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(10)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(11)); ASSERT_EQ(OPCODE_FVALUE, Module::opcodeFromMachineByte(12)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(13)); ASSERT_EQ(OPCODE_MUTATE, Module::opcodeFromMachineByte(14)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(15)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(16)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(17)); ASSERT_EQ(OPCODE_SVALUE, Module::opcodeFromMachineByte(18)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(19)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(20)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(21)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(22)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(23)); ASSERT_EQ(OPCODE_ANY, Module::opcodeFromMachineByte(24)); ASSERT_EQ(OPCODE_ASSERT, Module::opcodeFromMachineByte(25)); ASSERT_EQ(OPCODE_ASSIGN, Module::opcodeFromMachineByte(26)); ASSERT_EQ(OPCODE_CATCH, Module::opcodeFromMachineByte(27)); ASSERT_EQ(OPCODE_FOR, Module::opcodeFromMachineByte(28)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(29)); ASSERT_EQ(OPCODE_ANYQ, Module::opcodeFromMachineByte(30)); ASSERT_EQ(OPCODE_DECREMENT, Module::opcodeFromMachineByte(31)); ASSERT_EQ(OPCODE_BYNAME, Module::opcodeFromMachineByte(32)); ASSERT_EQ(OPCODE_FOREACH, Module::opcodeFromMachineByte(33)); ASSERT_EQ(OPCODE_INDEXABLE, Module::opcodeFromMachineByte(34)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(35)); ASSERT_EQ(OPCODE_BREAK, Module::opcodeFromMachineByte(36)); ASSERT_EQ(OPCODE_ELLIPSIS, Module::opcodeFromMachineByte(37)); ASSERT_EQ(OPCODE_DO, Module::opcodeFromMachineByte(38)); ASSERT_EQ(OPCODE_GUARD, Module::opcodeFromMachineByte(39)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(40)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(41)); ASSERT_EQ(OPCODE_CONTINUE, Module::opcodeFromMachineByte(42)); ASSERT_EQ(OPCODE_IDENTIFIER, Module::opcodeFromMachineByte(43)); ASSERT_EQ(OPCODE_INDEX, Module::opcodeFromMachineByte(44)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(45)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(46)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(47)); ASSERT_EQ(OPCODE_FALSE, Module::opcodeFromMachineByte(48)); ASSERT_EQ(OPCODE_INCREMENT, Module::opcodeFromMachineByte(49)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(50)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(51)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(52)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(53)); ASSERT_EQ(OPCODE_FINITE, Module::opcodeFromMachineByte(54)); ASSERT_EQ(OPCODE_ITERABLE, Module::opcodeFromMachineByte(55)); ASSERT_EQ(OPCODE_NAMED, Module::opcodeFromMachineByte(56)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(57)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(58)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(59)); ASSERT_EQ(OPCODE_INFERRED, Module::opcodeFromMachineByte(60)); ASSERT_EQ(OPCODE_NOT, Module::opcodeFromMachineByte(61)); ASSERT_EQ(OPCODE_PROPERTY, Module::opcodeFromMachineByte(62)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(63)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(64)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(65)); ASSERT_EQ(OPCODE_NOOP, Module::opcodeFromMachineByte(66)); ASSERT_EQ(OPCODE_POINTEE, Module::opcodeFromMachineByte(67)); ASSERT_EQ(OPCODE_PROPERTYQ, Module::opcodeFromMachineByte(68)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(69)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(70)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(71)); ASSERT_EQ(OPCODE_NULL, Module::opcodeFromMachineByte(72)); ASSERT_EQ(OPCODE_POINTER, Module::opcodeFromMachineByte(73)); ASSERT_EQ(OPCODE_WHILE, Module::opcodeFromMachineByte(74)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(75)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(76)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(77)); ASSERT_EQ(OPCODE_TRUE, Module::opcodeFromMachineByte(78)); ASSERT_EQ(OPCODE_PREDICATE, Module::opcodeFromMachineByte(79)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(80)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(81)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(82)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(83)); ASSERT_EQ(OPCODE_VOID, Module::opcodeFromMachineByte(84)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(85)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(86)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(87)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(88)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(89)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(90)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(91)); ASSERT_EQ(OPCODE_DECLARE, Module::opcodeFromMachineByte(92)); ASSERT_EQ(OPCODE_DECLARE, Module::opcodeFromMachineByte(93)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(94)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(95)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(96)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(97)); ASSERT_EQ(OPCODE_FUNCTION, Module::opcodeFromMachineByte(98)); ASSERT_EQ(OPCODE_FUNCTION, Module::opcodeFromMachineByte(99)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(100)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(101)); ASSERT_EQ(OPCODE_BOOL, Module::opcodeFromMachineByte(102)); ASSERT_EQ(OPCODE_BOOL, Module::opcodeFromMachineByte(103)); ASSERT_EQ(OPCODE_GENERATOR, Module::opcodeFromMachineByte(104)); ASSERT_EQ(OPCODE_GENERATOR, Module::opcodeFromMachineByte(105)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(106)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(107)); ASSERT_EQ(OPCODE_RETURN, Module::opcodeFromMachineByte(108)); ASSERT_EQ(OPCODE_RETURN, Module::opcodeFromMachineByte(109)); ASSERT_EQ(OPCODE_IF, Module::opcodeFromMachineByte(110)); ASSERT_EQ(OPCODE_IF, Module::opcodeFromMachineByte(111)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(112)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(113)); ASSERT_EQ(OPCODE_THROW, Module::opcodeFromMachineByte(114)); ASSERT_EQ(OPCODE_THROW, Module::opcodeFromMachineByte(115)); ASSERT_EQ(OPCODE_TRY, Module::opcodeFromMachineByte(116)); ASSERT_EQ(OPCODE_TRY, Module::opcodeFromMachineByte(117)); ASSERT_EQ(OPCODE_TRY, Module::opcodeFromMachineByte(118)); ASSERT_EQ(OPCODE_TRY, Module::opcodeFromMachineByte(119)); ASSERT_EQ(OPCODE_YIELD, Module::opcodeFromMachineByte(120)); ASSERT_EQ(OPCODE_YIELD, Module::opcodeFromMachineByte(121)); ASSERT_EQ(OPCODE_SWITCH, Module::opcodeFromMachineByte(122)); ASSERT_EQ(OPCODE_SWITCH, Module::opcodeFromMachineByte(123)); ASSERT_EQ(OPCODE_SWITCH, Module::opcodeFromMachineByte(124)); ASSERT_EQ(OPCODE_SWITCH, Module::opcodeFromMachineByte(125)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(126)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(127)); ASSERT_EQ(OPCODE_CASE, Module::opcodeFromMachineByte(128)); ASSERT_EQ(OPCODE_CASE, Module::opcodeFromMachineByte(129)); ASSERT_EQ(OPCODE_CASE, Module::opcodeFromMachineByte(130)); ASSERT_EQ(OPCODE_CASE, Module::opcodeFromMachineByte(131)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(132)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(133)); ASSERT_EQ(OPCODE_VARARGS, Module::opcodeFromMachineByte(134)); ASSERT_EQ(OPCODE_VARARGS, Module::opcodeFromMachineByte(135)); ASSERT_EQ(OPCODE_VARARGS, Module::opcodeFromMachineByte(136)); ASSERT_EQ(OPCODE_VARARGS, Module::opcodeFromMachineByte(137)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(138)); ASSERT_EQ(OPCODE_OPTIONAL, Module::opcodeFromMachineByte(139)); ASSERT_EQ(OPCODE_OPTIONAL, Module::opcodeFromMachineByte(140)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(141)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(142)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(143)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(144)); ASSERT_EQ(OPCODE_REQUIRED, Module::opcodeFromMachineByte(145)); ASSERT_EQ(OPCODE_REQUIRED, Module::opcodeFromMachineByte(146)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(147)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(148)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(149)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(150)); ASSERT_EQ(OPCODE_ATTRIBUTE, Module::opcodeFromMachineByte(151)); ASSERT_EQ(OPCODE_ATTRIBUTE, Module::opcodeFromMachineByte(152)); ASSERT_EQ(OPCODE_ATTRIBUTE, Module::opcodeFromMachineByte(153)); ASSERT_EQ(OPCODE_ATTRIBUTE, Module::opcodeFromMachineByte(154)); ASSERT_EQ(OPCODE_ATTRIBUTE, Module::opcodeFromMachineByte(155)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(156)); ASSERT_EQ(OPCODE_BLOCK, Module::opcodeFromMachineByte(157)); ASSERT_EQ(OPCODE_BLOCK, Module::opcodeFromMachineByte(158)); ASSERT_EQ(OPCODE_BLOCK, Module::opcodeFromMachineByte(159)); ASSERT_EQ(OPCODE_BLOCK, Module::opcodeFromMachineByte(160)); ASSERT_EQ(OPCODE_BLOCK, Module::opcodeFromMachineByte(161)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(162)); ASSERT_EQ(OPCODE_CALL, Module::opcodeFromMachineByte(163)); ASSERT_EQ(OPCODE_CALL, Module::opcodeFromMachineByte(164)); ASSERT_EQ(OPCODE_CALL, Module::opcodeFromMachineByte(165)); ASSERT_EQ(OPCODE_CALL, Module::opcodeFromMachineByte(166)); ASSERT_EQ(OPCODE_CALL, Module::opcodeFromMachineByte(167)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(168)); ASSERT_EQ(OPCODE_CALLABLE, Module::opcodeFromMachineByte(169)); ASSERT_EQ(OPCODE_CALLABLE, Module::opcodeFromMachineByte(170)); ASSERT_EQ(OPCODE_CALLABLE, Module::opcodeFromMachineByte(171)); ASSERT_EQ(OPCODE_CALLABLE, Module::opcodeFromMachineByte(172)); ASSERT_EQ(OPCODE_CALLABLE, Module::opcodeFromMachineByte(173)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(174)); ASSERT_EQ(OPCODE_CHOICE, Module::opcodeFromMachineByte(175)); ASSERT_EQ(OPCODE_CHOICE, Module::opcodeFromMachineByte(176)); ASSERT_EQ(OPCODE_CHOICE, Module::opcodeFromMachineByte(177)); ASSERT_EQ(OPCODE_CHOICE, Module::opcodeFromMachineByte(178)); ASSERT_EQ(OPCODE_CHOICE, Module::opcodeFromMachineByte(179)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(180)); ASSERT_EQ(OPCODE_DEFAULT, Module::opcodeFromMachineByte(181)); ASSERT_EQ(OPCODE_DEFAULT, Module::opcodeFromMachineByte(182)); ASSERT_EQ(OPCODE_DEFAULT, Module::opcodeFromMachineByte(183)); ASSERT_EQ(OPCODE_DEFAULT, Module::opcodeFromMachineByte(184)); ASSERT_EQ(OPCODE_DEFAULT, Module::opcodeFromMachineByte(185)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(186)); ASSERT_EQ(OPCODE_EXTENSIBLE, Module::opcodeFromMachineByte(187)); ASSERT_EQ(OPCODE_EXTENSIBLE, Module::opcodeFromMachineByte(188)); ASSERT_EQ(OPCODE_EXTENSIBLE, Module::opcodeFromMachineByte(189)); ASSERT_EQ(OPCODE_EXTENSIBLE, Module::opcodeFromMachineByte(190)); ASSERT_EQ(OPCODE_EXTENSIBLE, Module::opcodeFromMachineByte(191)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(192)); ASSERT_EQ(OPCODE_LAMBDA, Module::opcodeFromMachineByte(193)); ASSERT_EQ(OPCODE_LAMBDA, Module::opcodeFromMachineByte(194)); ASSERT_EQ(OPCODE_LAMBDA, Module::opcodeFromMachineByte(195)); ASSERT_EQ(OPCODE_LAMBDA, Module::opcodeFromMachineByte(196)); ASSERT_EQ(OPCODE_LAMBDA, Module::opcodeFromMachineByte(197)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(198)); ASSERT_EQ(OPCODE_LENGTH, Module::opcodeFromMachineByte(199)); ASSERT_EQ(OPCODE_LENGTH, Module::opcodeFromMachineByte(200)); ASSERT_EQ(OPCODE_LENGTH, Module::opcodeFromMachineByte(201)); ASSERT_EQ(OPCODE_LENGTH, Module::opcodeFromMachineByte(202)); ASSERT_EQ(OPCODE_LENGTH, Module::opcodeFromMachineByte(203)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(204)); ASSERT_EQ(OPCODE_UNION, Module::opcodeFromMachineByte(205)); ASSERT_EQ(OPCODE_UNION, Module::opcodeFromMachineByte(206)); ASSERT_EQ(OPCODE_UNION, Module::opcodeFromMachineByte(207)); ASSERT_EQ(OPCODE_UNION, Module::opcodeFromMachineByte(208)); ASSERT_EQ(OPCODE_UNION, Module::opcodeFromMachineByte(209)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(210)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(211)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(212)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(213)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(214)); ASSERT_EQ(OPCODE_AVALUE, Module::opcodeFromMachineByte(215)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(216)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(217)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(218)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(219)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(220)); ASSERT_EQ(OPCODE_FLOAT, Module::opcodeFromMachineByte(221)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(222)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(223)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(224)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(225)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(226)); ASSERT_EQ(OPCODE_INT, Module::opcodeFromMachineByte(227)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(228)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(229)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(230)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(231)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(232)); ASSERT_EQ(OPCODE_OBJECT, Module::opcodeFromMachineByte(233)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(234)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(235)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(236)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(237)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(238)); ASSERT_EQ(OPCODE_OVALUE, Module::opcodeFromMachineByte(239)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(240)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(241)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(242)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(243)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(244)); ASSERT_EQ(OPCODE_STRING, Module::opcodeFromMachineByte(245)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(246)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(247)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(248)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(249)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(250)); ASSERT_EQ(OPCODE_TYPE, Module::opcodeFromMachineByte(251)); ASSERT_EQ(OPCODE_reserved, Module::opcodeFromMachineByte(252)); ASSERT_EQ(OPCODE_MODULE, Module::opcodeFromMachineByte(253)); ASSERT_EQ(OPCODE_MODULE, Module::opcodeFromMachineByte(254)); ASSERT_EQ(OPCODE_MODULE, Module::opcodeFromMachineByte(255)); } TEST(TestModule, OpcodeEncode0) { ASSERT_EQ(72, OpcodeProperties::from(OPCODE_NULL).encode(0)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_NULL).encode(1)); } TEST(TestModule, OpcodeEncode1) { ASSERT_EQ(0, OpcodeProperties::from(OPCODE_UNARY).encode(0)); ASSERT_EQ(1, OpcodeProperties::from(OPCODE_UNARY).encode(1)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_UNARY).encode(2)); } TEST(TestModule, OpcodeEncode2) { ASSERT_EQ(0, OpcodeProperties::from(OPCODE_BINARY).encode(0)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_BINARY).encode(1)); ASSERT_EQ(2, OpcodeProperties::from(OPCODE_BINARY).encode(2)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_BINARY).encode(3)); } TEST(TestModule, OpcodeEncode3) { ASSERT_EQ(0, OpcodeProperties::from(OPCODE_TERNARY).encode(0)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_TERNARY).encode(1)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_TERNARY).encode(2)); ASSERT_EQ(3, OpcodeProperties::from(OPCODE_TERNARY).encode(3)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_TERNARY).encode(4)); } TEST(TestModule, OpcodeEncode4) { ASSERT_EQ(0, OpcodeProperties::from(OPCODE_FOR).encode(0)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_FOR).encode(1)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_FOR).encode(2)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_FOR).encode(3)); ASSERT_EQ(28, OpcodeProperties::from(OPCODE_FOR).encode(4)); ASSERT_EQ(0, OpcodeProperties::from(OPCODE_FOR).encode(5)); } TEST(TestModule, OpcodeEncode5) { ASSERT_EQ(210, OpcodeProperties::from(OPCODE_AVALUE).encode(0)); ASSERT_EQ(211, OpcodeProperties::from(OPCODE_AVALUE).encode(1)); ASSERT_EQ(212, OpcodeProperties::from(OPCODE_AVALUE).encode(2)); ASSERT_EQ(213, OpcodeProperties::from(OPCODE_AVALUE).encode(3)); ASSERT_EQ(214, OpcodeProperties::from(OPCODE_AVALUE).encode(4)); ASSERT_EQ(215, OpcodeProperties::from(OPCODE_AVALUE).encode(5)); ASSERT_EQ(215, OpcodeProperties::from(OPCODE_AVALUE).encode(6)); ASSERT_EQ(215, OpcodeProperties::from(OPCODE_AVALUE).encode(7)); } TEST(TestModule, OperatorUnary) { ASSERT_STREQ("-", OperatorProperties::from(OPERATOR_NEG).name); ASSERT_EQ(OPCLASS_UNARY, OperatorProperties::from(OPERATOR_NEG).opclass); ASSERT_EQ(1u, OperatorProperties::from(OPERATOR_NEG).operands); } TEST(TestModule, OperatorBinary) { ASSERT_STREQ("-", OperatorProperties::from(OPERATOR_SUB).name); ASSERT_EQ(OPCLASS_BINARY, OperatorProperties::from(OPERATOR_SUB).opclass); ASSERT_EQ(2u, OperatorProperties::from(OPERATOR_SUB).operands); } TEST(TestModule, OperatorTernary) { ASSERT_STREQ("?:", OperatorProperties::from(OPERATOR_TERNARY).name); ASSERT_EQ(OPCLASS_TERNARY, OperatorProperties::from(OPERATOR_TERNARY).opclass); ASSERT_EQ(3u, OperatorProperties::from(OPERATOR_TERNARY).operands); } TEST(TestModule, OperatorCompare) { ASSERT_STREQ("<", OperatorProperties::from(OPERATOR_LT).name); ASSERT_EQ(OPCLASS_COMPARE, OperatorProperties::from(OPERATOR_LT).opclass); ASSERT_EQ(2u, OperatorProperties::from(OPERATOR_LT).operands); } TEST(TestModule, Constants) { // Test that the magic header starts with a UTF-8 continuation byte const uint8_t magic[] = { MAGIC }; ASSERT_EQ(0x80, magic[0] & 0xC0); // Test that the "end" opcode is zero ASSERT_EQ(OPCODE_END, 0); // Test that well-known opcodes have implicit operands ASSERT_LT(OPCODE_IVALUE, EGG_VM_ISTART); ASSERT_LT(OPCODE_FVALUE, EGG_VM_ISTART); ASSERT_LT(OPCODE_SVALUE, EGG_VM_ISTART); ASSERT_LT(OPCODE_UNARY, EGG_VM_ISTART); ASSERT_LT(OPCODE_BINARY, EGG_VM_ISTART); ASSERT_LT(OPCODE_TERNARY, EGG_VM_ISTART); // Test that operator enums fit into [0..128] for operand fitting ASSERT_EQ(OPERATOR_TERNARY, 128); } TEST(TestModule, FromMemoryBad) { const uint8_t zero[] = { 0 }; expectFailureFromMemory(zero, sizeof(zero), "Invalid magic signature in binary module"); const uint8_t magic[] = { MAGIC 99 }; // This is an invalid section number expectFailureFromMemory(magic, 0, "Truncated section in binary module"); expectFailureFromMemory(magic, 1, "Truncated section in binary module"); expectFailureFromMemory(magic, sizeof(magic) - 1, "Missing code section in binary module"); expectFailureFromMemory(magic, sizeof(magic), "Unrecognized section in binary module"); } TEST(TestModule, FromMemoryMinimal) { egg::test::Allocator allocator; const uint8_t minimal[] = { MAGIC SECTION_CODE, OPCODE_MODULE, OPCODE_BLOCK, OPCODE_NOOP }; auto module = ModuleFactory::fromMemory(allocator, "<memory>", std::begin(minimal), std::end(minimal)); ASSERT_NE(nullptr, module); Node root{ &module->getRootNode() }; ASSERT_NE(nullptr, root); ASSERT_EQ(OPCODE_MODULE, root->getOpcode()); ASSERT_EQ(1u, root->getChildren()); Node child{ &root->getChild(0) }; ASSERT_EQ(OPCODE_BLOCK, child->getOpcode()); ASSERT_EQ(1u, child->getChildren()); Node grandchild{ &child->getChild(0) }; ASSERT_EQ(OPCODE_NOOP, grandchild->getOpcode()); ASSERT_EQ(0u, grandchild->getChildren()); } TEST(TestModule, ToBinaryStream) { egg::test::Allocator allocator; const uint8_t minimal[] = { MAGIC SECTION_CODE, OPCODE_MODULE, OPCODE_BLOCK, OPCODE_NOOP }; auto module = ModuleFactory::fromMemory(allocator, "<memory>", std::begin(minimal), std::end(minimal)); ASSERT_NE(nullptr, module); std::stringstream ss; ModuleFactory::toBinaryStream(*module, ss); auto binary = ss.str(); ASSERT_EQ(sizeof(minimal), binary.length()); ASSERT_EQ(0, std::memcmp(binary.data(), minimal, sizeof(minimal))); } TEST(TestModule, ToMemory) { egg::test::Allocator allocator; const uint8_t minimal[] = { MAGIC SECTION_CODE, OPCODE_MODULE, OPCODE_BLOCK, OPCODE_NOOP }; auto module = ModuleFactory::fromMemory(allocator, "<memory>", std::begin(minimal), std::end(minimal)); ASSERT_NE(nullptr, module); auto memory = ModuleFactory::toMemory(allocator, *module); ASSERT_NE(nullptr, memory); ASSERT_EQ(sizeof(minimal), memory->bytes()); ASSERT_EQ(0, std::memcmp(memory->begin(), minimal, sizeof(minimal))); } TEST(TestModule, ModuleBuilder) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto noop = builder.createNode(OPCODE_NOOP); auto block = builder.createNode(OPCODE_BLOCK, std::move(noop)); auto original = builder.createModule(std::move(block)); auto module = ModuleFactory::fromRootNode(allocator, "<resource>", *original); ASSERT_NE(nullptr, module); Node root{ &module->getRootNode() }; ASSERT_EQ(original.get(), root.get()); ASSERT_EQ(OPCODE_MODULE, root->getOpcode()); ASSERT_EQ(1u, root->getChildren()); Node child{ &root->getChild(0) }; ASSERT_EQ(OPCODE_BLOCK, child->getOpcode()); ASSERT_EQ(1u, child->getChildren()); Node grandchild{ &child->getChild(0) }; ASSERT_EQ(OPCODE_NOOP, grandchild->getOpcode()); ASSERT_EQ(0u, grandchild->getChildren()); } TEST(TestModule, BuildConstantInt) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto avalue = roundTripArray(builder, { builder.createValueInt(123456789), builder.createValueInt(-123456789) }); ASSERT_EQ(2u, avalue->getChildren()); Node value; value.set(&avalue->getChild(0)); ASSERT_EQ(OPCODE_IVALUE, value->getOpcode()); ASSERT_EQ(123456789, value->getInt()); ASSERT_EQ(0u, value->getChildren()); value.set(&avalue->getChild(1)); ASSERT_EQ(OPCODE_IVALUE, value->getOpcode()); ASSERT_EQ(-123456789, value->getInt()); ASSERT_EQ(0u, value->getChildren()); } TEST(TestModule, BuildConstantFloat) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto avalue = roundTripArray(builder, { builder.createValueFloat(123456789), builder.createValueFloat(-123456789), builder.createValueFloat(-0.125), builder.createValueFloat(std::numeric_limits<double>::quiet_NaN()) }); ASSERT_EQ(4u, avalue->getChildren()); Node value; value.set(&avalue->getChild(0)); ASSERT_EQ(OPCODE_FVALUE, value->getOpcode()); ASSERT_EQ(123456789.0, value->getFloat()); ASSERT_EQ(0u, value->getChildren()); value.set(&avalue->getChild(1)); ASSERT_EQ(OPCODE_FVALUE, value->getOpcode()); ASSERT_EQ(-123456789.0, value->getFloat()); ASSERT_EQ(0u, value->getChildren()); value.set(&avalue->getChild(2)); ASSERT_EQ(OPCODE_FVALUE, value->getOpcode()); ASSERT_EQ(-0.125, value->getFloat()); ASSERT_EQ(0u, value->getChildren()); value.set(&avalue->getChild(3)); ASSERT_EQ(OPCODE_FVALUE, value->getOpcode()); ASSERT_TRUE(std::isnan(value->getFloat())); ASSERT_EQ(0u, value->getChildren()); } TEST(TestModule, BuildConstantString) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto avalue = roundTripArray(builder, { builder.createValueString(""), builder.createValueString("hello") }); ASSERT_EQ(2u, avalue->getChildren()); Node value; value.set(&avalue->getChild(0)); ASSERT_EQ(OPCODE_SVALUE, value->getOpcode()); ASSERT_STRING("", value->getString()); ASSERT_EQ(0u, value->getChildren()); value.set(&avalue->getChild(1)); ASSERT_EQ(OPCODE_SVALUE, value->getOpcode()); ASSERT_STRING("hello", value->getString()); ASSERT_EQ(0u, value->getChildren()); } TEST(TestModule, BuildOperator) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto avalue = roundTripArray(builder, { builder.createOperator(OPCODE_UNARY, OPERATOR_REF, { builder.createNode(OPCODE_NULL) }) }); ASSERT_EQ(1u, avalue->getChildren()); Node value; value.set(&avalue->getChild(0)); ASSERT_EQ(OPCODE_UNARY, value->getOpcode()); ASSERT_EQ(OPERATOR_REF, value->getInt()); // the integer operator code ASSERT_EQ(1u, value->getChildren()); value.set(&value->getChild(0)); ASSERT_EQ(OPCODE_NULL, value->getOpcode()); ASSERT_EQ(0u, value->getChildren()); } TEST(TestModule, BuildWithAttribute) { egg::test::Allocator allocator; ModuleBuilder builder(allocator); auto avalue = roundTripArray(builder, { builder.withAttribute("a", String("alpha")).withAttribute("b", 123).createOperator(OPCODE_UNARY, OPERATOR_REF,{ builder.createNode(OPCODE_NULL) }) }); ASSERT_EQ(1u, avalue->getChildren()); Node value; value.set(&avalue->getChild(0)); ASSERT_EQ(OPCODE_UNARY, value->getOpcode()); ASSERT_EQ(OPERATOR_REF, value->getInt()); // the integer operator code ASSERT_EQ(1u, value->getChildren()); ASSERT_EQ(2u, value->getAttributes()); value.set(&value->getAttribute(0)); ASSERT_EQ(OPCODE_ATTRIBUTE, value->getOpcode()); ASSERT_EQ(2u, value->getChildren()); value.set(&value->getChild(1)); ASSERT_EQ(OPCODE_SVALUE, value->getOpcode()); ASSERT_EQ("alpha", value->getString().toUTF8()); }
cpp
<filename>tests/test-samples.py # Copyright 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. import tqdm import cv2 import numpy as np import os import queue import time import argparse from House3D import objrender, Environment, MultiHouseEnv, load_config, House from House3D.objrender import RenderMode from threading import Thread, Lock RANDOM_SEED = 0 MAX_QSIZE = 20 LOADING_THREADS = 10 RENDER_THREADS = 1 SAMPLES_PER_ROOM = 3 ROOM_TYPES = set(['living_room']) ROBOT_RAD = 1.0 RENDER_MODES = [RenderMode.RGB, RenderMode.DEPTH, RenderMode.SEMANTIC, RenderMode.INSTANCE] RENDER_NAMES = ['rgb', 'depth', 'semantic', 'instance'] class RestrictedHouse(House): def __init__(self, **kwargs): super(RestrictedHouse, self).__init__(**kwargs) def _getRegionsOfInterest(self): result = [] for roomTp in ROOM_TYPES: rooms = self._getRooms(roomTp) for room in rooms: result.append(self._getRoomBounds(room)) return result def create_house(houseID, config, robotRadius=ROBOT_RAD): print('Loading house {}'.format(houseID)) objFile = os.path.join(config['prefix'], houseID, 'house.obj') jsonFile = os.path.join(config['prefix'], houseID, 'house.json') assert (os.path.isfile(objFile) and os.path.isfile(jsonFile)), '[Environment] house objects not found! objFile=<{}>'.format(objFile) cachefile = os.path.join(config['prefix'], houseID, 'cachedmap1k.pkl') if not os.path.isfile(cachefile): cachefile = None house = RestrictedHouse(JsonFile=jsonFile, ObjFile=objFile, MetaDataFile=config["modelCategoryFile"], CachedFile=cachefile, RobotRadius=robotRadius, SetTarget=False) return house def get_house_dir(houseID): return os.path.join(args.output, houseID) def gen_rand_house(cfg): all_house_ids = os.listdir(cfg['prefix']) np.random.shuffle(all_house_ids) house = None for houseID in all_house_ids: house_dir = get_house_dir(houseID) if os.path.exists(house_dir): print('{} already exists, skipping'.format(house_dir)) continue yield houseID def reset_random(env, house, room): location = house.getRandomLocationForRoom(room) if not location: return False env.reset(*location) return True def render_current_location(env, houseID, room_type, index): output_dir = get_house_dir(houseID) if not os.path.exists(output_dir): os.mkdir(output_dir) print('Created directory {}'.format(output_dir)) for mode_idx in range(len(RENDER_MODES)): render_mode = RENDER_MODES[mode_idx] render_name = RENDER_NAMES[mode_idx] env.set_render_mode(RENDER_MODES[mode_idx]) img = env.render_cube_map(copy=True) if render_mode == RenderMode.DEPTH: img = img[:,:,0] else: img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) output_filename = '{}-room_{}-loc_{}-render_{}.png'.format( houseID, room_type, index, RENDER_NAMES[mode_idx]) cv2.imwrite(os.path.join(output_dir, output_filename), img) def get_valid_rooms(house): result = [] for room in house.all_rooms: for tp in room['roomTypes']: if tp.lower() in ROOM_TYPES: result.append(room) break print('Valid rooms: {}'.format(len(result))) return result def house_loader(house_gen, cfg, house_queue, gen_lock): while True: while house_queue.qsize() > MAX_QSIZE: # Wait until we clear up the queue time.sleep(0) houseID = None with gen_lock: try: houseID = next(house_gen) except StopIteration: print('Done processing houses, stopping loading thread...') return house = None try: house = create_house(houseID, cfg) except Exception as e: print('!! Error loading house {}: {}'.format(houseID, e)) continue house_queue.put((houseID, house)) print('Put house {} in queue, total: {}'.format(houseID, house_queue.qsize())) def house_renderer(cfg, house_queue, progress_queue): while True: houseID, house = house_queue.get() api = objrender.RenderAPIThread(w=args.width, h=args.height, device=0) env = Environment(api, house, cfg) cam = api.getCamera() loc_idx = 0 valid_rooms = get_valid_rooms(house) for room in valid_rooms: for i in range(SAMPLES_PER_ROOM): if not reset_random(env, house, room): print('Unable to sample location for house {}'.format(houseID)) break render_current_location(env, houseID, room['id'], loc_idx) loc_idx += 1 house_queue.task_done() progress_queue.put(1) print('Rendered house {}'.format(houseID)) def progress_tracker(total, progress_queue): tracker = tqdm.trange(total) while True: count = progress_queue.get() tracker.update(count) progress_queue.task_done() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--output', help='output directory', default='./') parser.add_argument('--width', type=int, default=1024) parser.add_argument('--height', type=int, default=1024) args = parser.parse_args() assert os.path.isdir(args.output) np.random.seed(RANDOM_SEED) cfg = load_config('config.json') total = len(os.listdir(cfg['prefix'])) house_gen = gen_rand_house(cfg) gen_lock = Lock() house_queue = queue.Queue() progress_queue = queue.Queue() loader_threads = [] for i in range(LOADING_THREADS): t = Thread(target=house_loader, args=(house_gen, cfg, house_queue, gen_lock)) t.start() loader_threads.append(t) render_threads = [] for i in range(RENDER_THREADS): t = Thread(target=house_renderer, args=(cfg, house_queue, progress_queue)) t.daemon = True t.start() render_threads.append(t) progress_thread = Thread(target=progress_tracker, args=(total, progress_queue)) progress_thread.daemon = True progress_thread.start() # Wait for queue to be fully populated for t in loader_threads: t.join() # Wait for queue to be fully processed house_queue.join() print('Done processing!')
python
EXCLUSIVE: The Republican National Committee is launching a new campaign to focus on maximizing pre-Election Day voting to build on absentee returns and early in-person voting ahead of the 2024 presidential election. Fox News Digital has learned the RNC is rolling out a Bank Your Vote nationwide campaign, which is expected to "encourage, educate and activate Republican voters on when, where and how to lock in their votes as early as possible" through in-person early voting, absentee voting and ballot harvesting where legal. "To beat Joe Biden and the Democrats in 2024, we must ensure that Republicans bank as many pre-Election Day votes as possible," RNC Chair Ronna McDaniel told Fox News Digital. "The RNC is proud to build on our historic efforts from last cycle and work with the entire Republican ecosystem to reach every state. " McDaniel added that "banking votes early needs to be the focus of every single Republican campaign in the country, and the Republican National Committee will lead the charge. " NRCC Chairman Richard Hudson appointed Rep. Byron Donalds, R-Fla. , and NRSC Chairman Steve Daines tapped Sen. Bill Hagerty, R-Tenn. , to co-chair the effort. "I am proud to co-chair the RNC’s efforts to activate Republicans to 'Bank Your Vote’ before Election Day," Donalds told Fox News Digital. "To take back the White House and Senate and strengthen our House majority in 2024, Republicans must play the game by today’s rules, which means maximizing our efforts to bank votes before Election Day," Hagerty told Fox News Digital. "We cannot afford to sacrifice most of the opportunities to bank votes in key states while Democrats run up the score. " Hagerty said encouraging Republicans to "securely ‘Bank Your Vote’ is the only way to protect the vote and reclaim our out-of-control government. " The campaign comes as Republicans seek to build on early voting gains from the 2022 election cycle. Republicans, at a higher rate than in 2020, opted to cast their ballots before Election Day. Nevertheless, the GOP still lagged behind Democrats. According to statistics compiled by the U. S. Election Project in November, 33. 3% of early votes came from registered Republicans in states that report such information. That was up from 30. 5% during the 2020 presidential election; whereas Democrats voted early at about the same rate — 40. 6% in 2022 and 40. 8% in 2020. The Bank Your Vote campaign is set to build on the RNC’s Election Integrity operation, which has more than 80,000 team members to "protect" the vote. "In the courts, we will continue to fight against bad ballot harvesting laws while also ensuring that it’s easy to vote and hard to cheat in American elections," an RNC official said. The RNC says a critical part of getting voters to become pre-Election Day voters will be "ensuring voter confidence in elections" through its "Protect Your Vote efforts. " The RNC is expected to have staff and lawyers on the ground training poll watchers to observe every step of the election process. The campaign’s website, BankYourVote. com, is encouraging voters to pledge to "bank" their votes, which will activate digital reminders from the RNC about all applicable pre-Election Day voting options. The campaign will have the RNC partner with state parties and campaigns to create pages outlining pre-Election Day voting processes for the 56 states and territories with links to state government sites where voters can request their ballots directly. The RNC also plans to "aggressively target young voters on social media platforms and minority voters at our RNC Community Centers. " The community centers were created in an effort to promote minority engagement in Asian-Pacific American, Black and Hispanic communities in key states across the nation. An RNC official told Fox News Digital that the RNC’s field operation made more than 300 million volunteer door knocks and phone calls during the 2020 and 2022 election cycles.
english
21 (A)“You shall not eat anything that has died naturally. You may give it to the sojourner who is within your towns, that he may eat it, or you may sell it to a foreigner. For (B)you are a people holy to the Lord your God. English Standard Version (ESV) The Holy Bible, English Standard Version. ESV® Text Edition: 2016. Copyright © 2001 by Crossway Bibles, a publishing ministry of Good News Publishers.
english
<reponame>samibadra/Red-Black-Tree <NAME> CS 310, Spring 2014 Assignment #3 FILE: README Copyright 2015 <NAME>. All Rights Reserved ##Description This assignment required an implemention of my own Red-Black (balanced) binary search tree to store GENERIC data types. Furthermore, the "Word" class is the datatype to be stored/searched for in the tree. The TimingTests class shall read from a text file, and store all of the words from the file, into the binary search tree, while stil maintaining a complexity of O(1) for adding/searching. It will analyze and print the total time elapsed (millisec) to add 'N' words to the tree, as well as the average time (nanosec) to search for a word in the tree. NOTE BEFORE RUNNING: In 'TimingTests.java', replace the strings being used to construct Words a-j. Change these to strings that are actually in the text file that it is reading the words from, so that it searches for words that are actually in the tree. ##Lessons Learned - After completing this project, I am much more experienced in the concepts of data structures - especially (of course) with Binary Search Trees. - I became MUCH more efficient with my debugging skills, as I was tasked with solving numerous bugs in the code since I started it. - I also learned a lot about how to work with Generic data types, and how to use the compareTo() method to implement generics.
markdown
Major-General Sanaullah, General Officer Commanding 17 Division, Swat, was on his way back after visiting troops deployed at the Pakistan-Afghanistan border when an improvised explosive device (IED) blew up his vehicle, Upper Dir, Khyber Pakhtunkhwa. Lieutenant Colonel Tauseef Ahmad and Sepoy Irfan Sattar also died in the attack. Sunday was marked by other acts of terrorism in the Federally Administered Tribal Areas (FATA) near Bannu and Miranshah, killing Frontier Corps personnel. These incidents come after the recent All Parties Meeting called by Prime Minister Nawaz Sharif where a dialogue process with Pakistani Taliban was stressed. Reportedly, even the army was on the same page as the government on this. However, sources familiar with the area said that though the Tehreek-e-Taliban Pakistan (TTP) has taken responsibility, it does not have a presence in that region, mostly under the control of Mullah Fazlullah, who ran a parallel administration in Swat before the army was called in. Lieutenant General (retired) Talat Masood, chief coordinator of Pugwash, said it will not be so easy after the army chief’s statement for the government to initiate dialogue. He felt that even if it was not the work of TTP, there were many groups which owe allegiance to it. While the government has strongly condemned the incident, there are those who feel the talks should go on. Lieutenant-Colonel Shafqat Saeed (retired), a defence analyst said that though TTP acted irresponsibly, the government should not pull back from talks. Instead, it should force the TTP to exercise restraint. Mr. Sharif is away on a three- day trip to Turkey and the while the incident has been condemned, little is being said of the future of talks. Rustom Shah Mohmand, former Pakistan ambassador to Afghanistan said the government, after deciding on talks, should have appointed a point person to take it forward.
english
//! [GET /_matrix/client/r0/login/sso/redirect](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-login-sso-redirect) use ruma_api::ruma_api; ruma_api! { metadata: { description: "", method: GET, name: "sso_login", path: "/_matrix/client/r0/login/sso/redirect", rate_limited: false, authentication: None, } request: { /// URL to which the homeserver should return the user after completing /// authentication with the SSO identity provider. #[ruma_api(query)] #[serde(rename = "redirectUrl")] pub redirect_url: &'a str, } response: { /// Redirect URL to the SSO identity provider. #[ruma_api(header = LOCATION)] pub location: String, } error: crate::Error } impl<'a> Request<'a> { /// Creates a new `Request` with the given redirect URL. pub fn new(redirect_url: &'a str) -> Self { Self { redirect_url } } } impl Response { /// Creates a new `Response` with the given SSO URL. pub fn new(location: String) -> Self { Self { location } } } #[cfg(test)] mod tests { use ruma_api::OutgoingRequest; use super::Request; #[test] fn serialize_sso_login_request_uri() { let req: http::Request<Vec<u8>> = Request { redirect_url: "https://example.com/sso" } .try_into_http_request("https://homeserver.tld", None) .unwrap(); assert_eq!( req.uri().to_string(), "https://homeserver.tld/_matrix/client/r0/login/sso/redirect?redirectUrl=https%3A%2F%2Fexample.com%2Fsso" ); } }
rust
<reponame>jthelin/cause_crash_dump #include <iostream> int main() { std::cout << "This program will core dump in ...." << std::endl; for (auto i: {3, 2, 1, 0}) { if (i != 0) { std::cout << "... " << i << " " << std::flush; } else { // Ensure we flush output message before we crash std::cout << "... Boom!" << std::endl; // This will cause division-by-zero error and core dump. int x = 10 / i; // [[NEVER_REACHED]] std::cout << "x = " << x; } } // [[NEVER_REACHED]] return 0; }
cpp
<reponame>esw0116/Super-SloMo #[Super SloMo] ##High Quality Estimation of Multiple Intermediate Frames for Video Interpolation import argparse import torch import torchvision import torchvision.transforms as transforms import torch.optim as optim import torch.nn as nn from torch.utils.tensorboard import SummaryWriter from model import superslomo_half as superslomo from model.extraction import center, ends from data import gopro_blur_half as gopro_blur import dataloader from utils import quantize, eval_metrics, meanshift from copy import deepcopy from math import log10 import datetime import os, sys import numpy as np import pandas as pd import tqdm # For parsing commandline arguments parser = argparse.ArgumentParser() parser.add_argument("--dataset_root", type=str, required=True, help='path to dataset folder containing train-test-validation folders') parser.add_argument("--checkpoint_dir", type=str, required=True, help='path to folder for saving checkpoints') parser.add_argument("--checkpoint", type=str, help='path of checkpoint for pretrained model') parser.add_argument("--train_continue", action='store_true', help='If resuming from checkpoint, set to True and set `checkpoint` path. Default: False.') parser.add_argument("--test_only", action='store_true', help='If resuming from checkpoint, set to True and set `checkpoint` path. Default: False.') parser.add_argument("--add_blur", action='store_true', help='Add blurry image') parser.add_argument("--extract11", action='store_true', help='Add blurry image') parser.add_argument("--epochs", type=int, default=40, help='number of epochs to train. Default: 200.') parser.add_argument("--seq_len", type=int, default=11, help='number of frames that composes a sequence.') parser.add_argument("--train_batch_size", type=int, default=8, help='batch size for training. Default: 6.') parser.add_argument("--validation_batch_size", type=int, default=4, help='batch size for validation. Default: 10.') parser.add_argument("--init_learning_rate", type=float, default=0.0001, help='set initial learning rate. Default: 0.0001.') parser.add_argument("--milestones", type=list, default=[20, 30], help='Set to epoch values where you want to decrease learning rate by a factor of 0.1. Default: [100, 150]') parser.add_argument("--progress_iter", type=int, default=1000, help='frequency of reporting progress and validation. N: after every N iterations. Default: 100.') parser.add_argument("--checkpoint_epoch", type=int, default=1, help='checkpoint saving frequency. N: after every N epochs. Each checkpoint is roughly of size 151 MB.Default: 5.') parser.add_argument("--amp", action='store_true', help='If True, use mixed precision.') args = parser.parse_args() ### For visualizing loss and interpolated frames writer = SummaryWriter('log') if not os.path.exists(args.checkpoint_dir): os.makedirs(args.checkpoint_dir) scaler = torch.cuda.amp.GradScaler() ###Initialize flow computation and arbitrary-time flow interpolation CNNs. device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") flowComp = superslomo.UNet(6, 4) flowComp.to(device) if args.add_blur: ArbTimeFlowIntrp = superslomo.UNet(23, 5) else: ArbTimeFlowIntrp = superslomo.UNet(20, 5) ArbTimeFlowIntrp.to(device) ###Initialze backward warpers for train and validation datasets trainFlowBackWarp = superslomo.backWarp(352, 352, device) trainFlowBackWarp = trainFlowBackWarp.to(device) validationFlowBackWarp = superslomo.backWarp(1280, 704, device) validationFlowBackWarp = validationFlowBackWarp.to(device) ### Load Pretrained Extraction Models center_estimation = center.Center() border_estimation = ends.Ends() if not args.extract11: print('Estimation Network: best_gopro07.ckpt') pretrained_weight = torch.load('pretrained_models/best_gopro07.ckpt')['state_dict'] else: print('Estimation Network: best_gopro11.ckpt') pretrained_weight = torch.load('pretrained_models/best_gopro11.ckpt')['state_dict'] center_state_dict = {} ends_state_dict = {} for key, value in pretrained_weight.items(): if key.startswith('center_est.'): center_state_dict[key[11:]] = value elif key.startswith('gen.'): ends_state_dict[key[4:]] = value # print(center_state_dict.keys()) center_estimation.load_state_dict(center_state_dict) border_estimation.load_state_dict(ends_state_dict) center_estimation = center_estimation.to(device) border_estimation = border_estimation.to(device) ### Load Datasets # Channel wise mean calculated on adobe240-fps training dataset mean = [0.429, 0.431, 0.397] std = [1, 1, 1] normalize = transforms.Normalize(mean=mean, std=std) transform = transforms.Compose([transforms.ToTensor(), normalize]) # GoPro trainset = gopro_blur.GoPro(root=args.dataset_root, transform=transform, seq_len=args.seq_len, train=True) trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.train_batch_size, shuffle=True) validationset = gopro_blur.GoPro(root=args.dataset_root, transform=transform, randomCropSize=(1280, 704), seq_len=args.seq_len, train=False) validationloader = torch.utils.data.DataLoader(validationset, batch_size=args.validation_batch_size, shuffle=False) print(trainset, validationset) ###Create transform to display image from tensor negmean = [x * -1 for x in mean] revNormalize = transforms.Normalize(mean=negmean, std=std) TP = transforms.Compose([revNormalize, transforms.ToPILImage()]) ###Utils def get_lr(optimizer): for param_group in optimizer.param_groups: return param_group['lr'] ###Loss and Optimizer seq_len = args.seq_len ctr_idx = seq_len // 2 L1_lossFn = nn.L1Loss() MSE_LossFn = nn.MSELoss() compare_ftn = nn.L1Loss(reduction='none') params = list(ArbTimeFlowIntrp.parameters()) + list(flowComp.parameters()) optimizer = optim.Adam(params, lr=args.init_learning_rate) # scheduler to decrease learning rate by a factor of 10 at milestones. # scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=args.milestones, gamma=0.1) scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'max', factor=0.1, patience=3, verbose=True) ###Initializing VGG16 model for perceptual loss vgg16 = torchvision.models.vgg16(pretrained=True) vgg16_conv_4_3 = nn.Sequential(*list(vgg16.children())[0][:22]) vgg16_conv_4_3.to(device) for param in vgg16_conv_4_3.parameters(): param.requires_grad = False ### Validation function def validate(): # For details see training. psnr = 0 tloss = 0 flag = 1 with torch.no_grad(): for validationIndex, (validationData, validationFrameIndex, _) in enumerate(validationloader, 0): # frame0, frameT, frame1 = validationData # I0 = frame0.to(device) # I1 = frame1.to(device) # IFrame = frameT.to(device) blurred_img = torch.zeros_like(validationData[0]) for image in validationData: blurred_img += image blurred_img /= len(validationData) blurred_img = blurred_img.to(device) blurred_img = meanshift(blurred_img, mean, std, device, False) center = center_estimation(blurred_img) start, end = border_estimation(blurred_img, center) start = meanshift(start, mean, std, device, True) end = meanshift(end, mean, std, device, True) center = meanshift(center, mean, std, device, True) blurred_img = meanshift(blurred_img, mean, std, device, True) frame0 = validationData[0].to(device) frame1 = validationData[-1].to(device) batch_size = blurred_img.shape[0] parallel = torch.mean(compare_ftn(start, frame0) + compare_ftn(end, frame1), dim=(1,2,3)) cross = torch.mean(compare_ftn(start, frame1) + compare_ftn(end, frame0), dim=(1,2,3)) # I0 = torch.zeros_like(blurred_img) IFrame = torch.zeros_like(blurred_img) choose_start = torch.zeros(batch_size).to(device) for b in range(batch_size): if (validationFrameIndex[b] < (ctr_idx - 1) and parallel[b] <= cross[b]) or (validationFrameIndex[b] > (ctr_idx - 1) and parallel[b] > cross[b]): # I0[b] = start[b] choose_start[b] = 1 else: # I0[b] = end[b] choose_start[b] = 0 IFrame[b] = validationData[validationFrameIndex[b]+1][b] if validationFrameIndex[b] > (ctr_idx - 1): validationFrameIndex[b] = seq_len - 3 - validationFrameIndex[b] choose_start = choose_start.reshape((-1, 1, 1, 1)) I0 = choose_start * start + (1-choose_start) * end I1 = center if args.amp: with torch.cuda.amp.autocast(): flowOut = flowComp(torch.cat((I0, I1), dim=1)) F_0_1 = flowOut[:,:2,:,:] F_1_0 = flowOut[:,2:,:,:] fCoeff = superslomo.getFlowCoeff(validationFrameIndex, device, seq_len) F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0 F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0 g_I0_F_t_0 = validationFlowBackWarp(I0, F_t_0) g_I1_F_t_1 = validationFlowBackWarp(I1, F_t_1) if args.add_blur: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0, blurred_img), dim=1)) else: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)) F_t_0_f = intrpOut[:, :2, :, :] + F_t_0 F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1 V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :]) V_t_1 = 1 - V_t_0 g_I0_F_t_0_f = validationFlowBackWarp(I0, F_t_0_f) g_I1_F_t_1_f = validationFlowBackWarp(I1, F_t_1_f) wCoeff = superslomo.getWarpCoeff(validationFrameIndex, device, seq_len) Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1) #loss recnLoss = L1_lossFn(Ft_p, IFrame) prcpLoss = MSE_LossFn(vgg16_conv_4_3(Ft_p), vgg16_conv_4_3(IFrame)) warpLoss = L1_lossFn(g_I0_F_t_0, IFrame) + L1_lossFn(g_I1_F_t_1, IFrame) + L1_lossFn(validationFlowBackWarp(I0, F_1_0), I1) + L1_lossFn(validationFlowBackWarp(I1, F_0_1), I0) loss_smooth_1_0 = torch.mean(torch.abs(F_1_0[:, :, :, :-1] - F_1_0[:, :, :, 1:])) + torch.mean(torch.abs(F_1_0[:, :, :-1, :] - F_1_0[:, :, 1:, :])) loss_smooth_0_1 = torch.mean(torch.abs(F_0_1[:, :, :, :-1] - F_0_1[:, :, :, 1:])) + torch.mean(torch.abs(F_0_1[:, :, :-1, :] - F_0_1[:, :, 1:, :])) loss_smooth = loss_smooth_1_0 + loss_smooth_0_1 loss = 204 * recnLoss + 102 * warpLoss + 0.005 * prcpLoss + loss_smooth else: flowOut = flowComp(torch.cat((I0, I1), dim=1)) F_0_1 = flowOut[:,:2,:,:] F_1_0 = flowOut[:,2:,:,:] fCoeff = superslomo.getFlowCoeff(validationFrameIndex, device, seq_len) F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0 F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0 g_I0_F_t_0 = validationFlowBackWarp(I0, F_t_0) g_I1_F_t_1 = validationFlowBackWarp(I1, F_t_1) if args.add_blur: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0, blurred_img), dim=1)) else: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)) F_t_0_f = intrpOut[:, :2, :, :] + F_t_0 F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1 V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :]) V_t_1 = 1 - V_t_0 g_I0_F_t_0_f = validationFlowBackWarp(I0, F_t_0_f) g_I1_F_t_1_f = validationFlowBackWarp(I1, F_t_1_f) wCoeff = superslomo.getWarpCoeff(validationFrameIndex, device, seq_len) Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1) #loss recnLoss = L1_lossFn(Ft_p, IFrame) prcpLoss = MSE_LossFn(vgg16_conv_4_3(Ft_p), vgg16_conv_4_3(IFrame)) warpLoss = L1_lossFn(g_I0_F_t_0, IFrame) + L1_lossFn(g_I1_F_t_1, IFrame) + L1_lossFn(validationFlowBackWarp(I0, F_1_0), I1) + L1_lossFn(validationFlowBackWarp(I1, F_0_1), I0) loss_smooth_1_0 = torch.mean(torch.abs(F_1_0[:, :, :, :-1] - F_1_0[:, :, :, 1:])) + torch.mean(torch.abs(F_1_0[:, :, :-1, :] - F_1_0[:, :, 1:, :])) loss_smooth_0_1 = torch.mean(torch.abs(F_0_1[:, :, :, :-1] - F_0_1[:, :, :, 1:])) + torch.mean(torch.abs(F_0_1[:, :, :-1, :] - F_0_1[:, :, 1:, :])) loss_smooth = loss_smooth_1_0 + loss_smooth_0_1 loss = 204 * recnLoss + 102 * warpLoss + 0.005 * prcpLoss + loss_smooth tloss += loss.item() # For tensorboard if flag: retImg = torchvision.utils.make_grid([revNormalize(frame0.cpu()[0]), revNormalize(IFrame.cpu()[0]), revNormalize(Ft_p.cpu()[0]), revNormalize(frame1.cpu()[0])], padding=10) flag = 0 #psnr MSE_val = MSE_LossFn(Ft_p, IFrame) psnr += (10 * log10(1 / MSE_val.item())) # Make benchmark csv file return (psnr / len(validationloader)), (tloss / len(validationloader)), retImg def test(): df_column = ['Name'] df_column.extend([str(i) for i in range(1, seq_len + 1)]) df = pd.DataFrame(columns=df_column) psnr_array = np.zeros((0, seq_len)) ssim_array = np.zeros((0, seq_len)) tqdm_loader = tqdm.tqdm(validationloader, ncols=80) imgsave_folder = os.path.join(args.checkpoint_dir, 'Saved_imgs') if not os.path.exists(imgsave_folder): os.mkdir(imgsave_folder) with torch.no_grad(): for validationIdx, (validationData, _, validationFile) in enumerate(tqdm_loader): blurred_img = torch.zeros_like(validationData[0]) for image in validationData: blurred_img += image blurred_img /= len(validationData) blurred_img = blurred_img.to(device) batch_size = blurred_img.shape[0] blurred_img = meanshift(blurred_img, mean, std, device, False) center = center_estimation(blurred_img) start, end = border_estimation(blurred_img, center) # start, end, center = quantize(start, rgb_range=255), quantize(end, rgb_range=255), quantize(center, rgb_range=255) start = meanshift(start, mean, std, device, True) end = meanshift(end, mean, std, device, True) center = meanshift(center, mean, std, device, True) blurred_img = meanshift(blurred_img, mean, std, device, True) frame0 = validationData[0].to(device) frame1 = validationData[-1].to(device) batch_size = blurred_img.shape[0] parallel = torch.mean(compare_ftn(start, frame0) + compare_ftn(end, frame1), dim=(1,2,3)) cross = torch.mean(compare_ftn(start, frame1) + compare_ftn(end, frame0), dim=(1,2,3)) I0 = torch.zeros_like(blurred_img) I1 = center psnrs = np.zeros((batch_size, seq_len)) ssims = np.zeros((batch_size, seq_len)) for vindex in range(seq_len): frameT = validationData[vindex] IFrame = frameT.to(device) if vindex == 0: Ft_p = torch.zeros_like(blurred_img) for b in range(batch_size): if parallel[b] <= cross[b]: Ft_p[b] = start[b].clone() else: Ft_p[b] = end[b].clone() elif vindex == seq_len-1: Ft_p = torch.zeros_like(blurred_img) for b in range(batch_size): if parallel[b] <= cross[b]: Ft_p[b] = end[b].clone() else: Ft_p[b] = start[b].clone() elif vindex == ctr_idx: Ft_p = center.clone() else: validationIndex = torch.ones(batch_size) * (vindex - 1) validationIndex = validationIndex.long() if vindex > ctr_idx: validationIndex = seq_len - 3 - validationIndex for b in range(batch_size): if (vindex < ctr_idx and parallel[b] <= cross[b]) or (vindex > ctr_idx and parallel[b] > cross[b]): I0[b] = start[b] else: I0[b] = end[b] flowOut = flowComp(torch.cat((I0, I1), dim=1)) F_0_1 = flowOut[:,:2,:,:] F_1_0 = flowOut[:,2:,:,:] fCoeff = superslomo.getFlowCoeff(validationIndex, device, seq_len) F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0 F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0 g_I0_F_t_0 = validationFlowBackWarp(I0, F_t_0) g_I1_F_t_1 = validationFlowBackWarp(I1, F_t_1) if args.add_blur: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0, blurred_img), dim=1)) else: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)) F_t_0_f = intrpOut[:, :2, :, :] + F_t_0 F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1 V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :]) V_t_1 = 1 - V_t_0 g_I0_F_t_0_f = validationFlowBackWarp(I0, F_t_0_f) g_I1_F_t_1_f = validationFlowBackWarp(I1, F_t_1_f) wCoeff = superslomo.getWarpCoeff(validationIndex, device, seq_len) Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1) Ft_p = meanshift(Ft_p, mean, std, device, False) IFrame = meanshift(IFrame, mean, std, device, False) out = quantize(Ft_p[b]) foldername = os.path.basename(os.path.dirname(validationFile[ctr_idx][b])) if not os.path.exists(os.path.join(imgsave_folder, foldername)): os.makedirs(os.path.join(imgsave_folder, foldername)) for b in range(batch_size): filename = os.path.splitext(os.path.basename(validationFile[vindex][b]))[0] out_fname = foldername + '_' + filename + '_out.png' # gt_fname = foldername + '_' + filename + '.png' # Comment two lines below if you want to save images torchvision.utils.save_image(out, os.path.join(imgsave_folder, foldername, out_fname), normalize=True, range=(0,255)) # torchvision.utils.save_image(gt, os.path.join(imgsave_folder, gt_fname), normalize=True, range=(0,255)) psnr, ssim = eval_metrics(Ft_p, IFrame) psnrs[:, vindex] = psnr.cpu().numpy() ssims[:, vindex] = ssim.cpu().numpy() for b in range(batch_size): rows = [validationFile[ctr_idx][b]] rows.extend(list(psnrs[b])) df = df.append(pd.Series(rows, index=df.columns), ignore_index=True) df.to_csv('{}/results_PSNR.csv'.format(args.checkpoint_dir)) ### Initialization if args.train_continue or args.test_only: dict1 = torch.load(args.checkpoint) checkpoint_counter = dict1['epoch'] + 1 ArbTimeFlowIntrp.load_state_dict(dict1['state_dictAT']) flowComp.load_state_dict(dict1['state_dictFC']) print() else: dict1 = {'loss': [], 'valLoss': [], 'valPSNR': [], 'epoch': -1} if args.test_only: print("Test Start") test() print("Test End") sys.exit(0) ### Training import time starttime = time.time() cLoss = dict1['loss'] valLoss = dict1['valLoss'] valPSNR = dict1['valPSNR'] checkpoint_counter = 0 ### Main training loop for epoch in range(dict1['epoch'] + 1, args.epochs): print("Epoch: ", epoch) # Append and reset cLoss.append([]) valLoss.append([]) valPSNR.append([]) iLoss = 0 tqdm_trainloader = tqdm.tqdm(trainloader, ncols=80) for trainIndex, (trainData, trainFrameIndex, _) in enumerate(tqdm_trainloader): ## Getting the input and the target from the training set # frame0, frameT, frame1 = trainData blurred_img = torch.zeros_like(trainData[0]) for image in trainData: blurred_img += image blurred_img /= len(trainData) blurred_img = blurred_img.to(device) with torch.no_grad(): blurred_img = meanshift(blurred_img, mean, std, device, False) center = center_estimation(blurred_img) start, end = border_estimation(blurred_img, center) start = meanshift(start, mean, std, device, True) end = meanshift(end, mean, std, device, True) center = meanshift(center, mean, std, device, True) blurred_img = meanshift(blurred_img, mean, std, device, True) frame0 = trainData[0].to(device) frame1 = trainData[-1].to(device) batch_size = blurred_img.shape[0] parallel = torch.mean(compare_ftn(start, frame0) + compare_ftn(end, frame1), dim=(1,2,3)) cross = torch.mean(compare_ftn(start, frame1) + compare_ftn(end, frame0), dim=(1,2,3)) # I0 = torch.zeros_like(blurred_img) IFrame = torch.zeros_like(blurred_img) choose_start = torch.zeros(batch_size).to(device) for b in range(batch_size): if (trainFrameIndex[b] < (ctr_idx - 1) and parallel[b] <= cross[b]) or (trainFrameIndex[b] > (ctr_idx - 1) and parallel[b] > cross[b]): # I0[b] = start[b] choose_start[b] = 1 else: # I0[b] = end[b] choose_start[b] = 0 IFrame[b] = trainData[trainFrameIndex[b]+1][b] if trainFrameIndex[b] > (ctr_idx - 1): trainFrameIndex[b] = seq_len - 3 - trainFrameIndex[b] choose_start = choose_start.reshape((-1, 1, 1, 1)) I0 = choose_start * start + (1-choose_start) * end I1 = center optimizer.zero_grad() if args.amp: with torch.cuda.amp.autocast(): # Calculate flow between reference frames I0 and I1 flowOut = flowComp(torch.cat((I0, I1), dim=1)) # Extracting flows between I0 and I1 - F_0_1 and F_1_0 F_0_1 = flowOut[:,:2,:,:] F_1_0 = flowOut[:,2:,:,:] fCoeff = superslomo.getFlowCoeff(trainFrameIndex, device, seq_len) # Calculate intermediate flows F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0 F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0 # Get intermediate frames from the intermediate flows g_I0_F_t_0 = trainFlowBackWarp(I0, F_t_0) g_I1_F_t_1 = trainFlowBackWarp(I1, F_t_1) # Calculate optical flow residuals and visibility maps if args.add_blur: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0, blurred_img), dim=1)) else: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)) # Extract optical flow residuals and visibility maps F_t_0_f = intrpOut[:, :2, :, :] + F_t_0 F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1 V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :]) V_t_1 = 1 - V_t_0 # Get intermediate frames from the intermediate flows g_I0_F_t_0_f = trainFlowBackWarp(I0, F_t_0_f) g_I1_F_t_1_f = trainFlowBackWarp(I1, F_t_1_f) wCoeff = superslomo.getWarpCoeff(trainFrameIndex, device, seq_len) # Calculate final intermediate frame Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1) # Loss recnLoss = L1_lossFn(Ft_p, IFrame) prcpLoss = MSE_LossFn(vgg16_conv_4_3(Ft_p), vgg16_conv_4_3(IFrame)) warpLoss = L1_lossFn(g_I0_F_t_0, IFrame) + L1_lossFn(g_I1_F_t_1, IFrame) + L1_lossFn(trainFlowBackWarp(I0, F_1_0), I1) + L1_lossFn(trainFlowBackWarp(I1, F_0_1), I0) loss_smooth_1_0 = torch.mean(torch.abs(F_1_0[:, :, :, :-1] - F_1_0[:, :, :, 1:])) + torch.mean(torch.abs(F_1_0[:, :, :-1, :] - F_1_0[:, :, 1:, :])) loss_smooth_0_1 = torch.mean(torch.abs(F_0_1[:, :, :, :-1] - F_0_1[:, :, :, 1:])) + torch.mean(torch.abs(F_0_1[:, :, :-1, :] - F_0_1[:, :, 1:, :])) loss_smooth = loss_smooth_1_0 + loss_smooth_0_1 # Total Loss - Coefficients 204 and 102 are used instead of 0.8 and 0.4 # since the loss in paper is calculated for input pixels in range 0-255 # and the input to our network is in range 0-1 loss = 204 * recnLoss + 102 * warpLoss + 0.005 * prcpLoss + loss_smooth # Backpropagate scaler.scale(loss).backward() scaler.step(optimizer) scaler.update() else: # Calculate flow between reference frames I0 and I1 flowOut = flowComp(torch.cat((I0, I1), dim=1)) # Extracting flows between I0 and I1 - F_0_1 and F_1_0 F_0_1 = flowOut[:,:2,:,:] F_1_0 = flowOut[:,2:,:,:] fCoeff = superslomo.getFlowCoeff(trainFrameIndex, device, seq_len) # Calculate intermediate flows F_t_0 = fCoeff[0] * F_0_1 + fCoeff[1] * F_1_0 F_t_1 = fCoeff[2] * F_0_1 + fCoeff[3] * F_1_0 # Get intermediate frames from the intermediate flows g_I0_F_t_0 = trainFlowBackWarp(I0, F_t_0) g_I1_F_t_1 = trainFlowBackWarp(I1, F_t_1) # Calculate optical flow residuals and visibility maps if args.add_blur: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0, blurred_img), dim=1)) else: intrpOut = ArbTimeFlowIntrp(torch.cat((I0, I1, F_0_1, F_1_0, F_t_1, F_t_0, g_I1_F_t_1, g_I0_F_t_0), dim=1)) # Extract optical flow residuals and visibility maps F_t_0_f = intrpOut[:, :2, :, :] + F_t_0 F_t_1_f = intrpOut[:, 2:4, :, :] + F_t_1 V_t_0 = torch.sigmoid(intrpOut[:, 4:5, :, :]) V_t_1 = 1 - V_t_0 # Get intermediate frames from the intermediate flows g_I0_F_t_0_f = trainFlowBackWarp(I0, F_t_0_f) g_I1_F_t_1_f = trainFlowBackWarp(I1, F_t_1_f) wCoeff = superslomo.getWarpCoeff(trainFrameIndex, device, seq_len) # Calculate final intermediate frame Ft_p = (wCoeff[0] * V_t_0 * g_I0_F_t_0_f + wCoeff[1] * V_t_1 * g_I1_F_t_1_f) / (wCoeff[0] * V_t_0 + wCoeff[1] * V_t_1) # Loss recnLoss = L1_lossFn(Ft_p, IFrame) prcpLoss = MSE_LossFn(vgg16_conv_4_3(Ft_p), vgg16_conv_4_3(IFrame)) warpLoss = L1_lossFn(g_I0_F_t_0, IFrame) + L1_lossFn(g_I1_F_t_1, IFrame) + L1_lossFn(trainFlowBackWarp(I0, F_1_0), I1) + L1_lossFn(trainFlowBackWarp(I1, F_0_1), I0) loss_smooth_1_0 = torch.mean(torch.abs(F_1_0[:, :, :, :-1] - F_1_0[:, :, :, 1:])) + torch.mean(torch.abs(F_1_0[:, :, :-1, :] - F_1_0[:, :, 1:, :])) loss_smooth_0_1 = torch.mean(torch.abs(F_0_1[:, :, :, :-1] - F_0_1[:, :, :, 1:])) + torch.mean(torch.abs(F_0_1[:, :, :-1, :] - F_0_1[:, :, 1:, :])) loss_smooth = loss_smooth_1_0 + loss_smooth_0_1 # Total Loss - Coefficients 204 and 102 are used instead of 0.8 and 0.4 # since the loss in paper is calculated for input pixels in range 0-255 # and the input to our network is in range 0-1 loss = 204 * recnLoss + 102 * warpLoss + 0.005 * prcpLoss + loss_smooth # Backpropagate loss.backward() optimizer.step() iLoss += loss.item() # Validation and progress every `args.progress_iter` iterations # if (trainIndex % args.progress_iter) == args.progress_iter - 1: endtime = time.time() psnr, vLoss, valImg = validate() valPSNR[epoch].append(psnr) valLoss[epoch].append(vLoss) #Tensorboard itr = trainIndex + epoch * len(trainloader) writer.add_scalars('Loss', {'trainLoss': iLoss/len(trainloader), 'validationLoss': vLoss}, itr) writer.add_scalar('PSNR', psnr, itr) writer.add_image('Validation', valImg, itr) ##### endVal = time.time() print("Loss: %0.6f TrainExecTime: %0.1f ValLoss:%0.6f ValPSNR: %0.4f ValEvalTime: %0.2f LR: %f" % (iLoss / len(trainloader), endtime - starttime, vLoss, psnr, endVal - endtime, get_lr(optimizer))) # Increment scheduler count scheduler.step(psnr) # Create checkpoint after every `args.checkpoint_epoch` epochs if ((epoch % args.checkpoint_epoch) == args.checkpoint_epoch - 1): dict1 = { 'Detail':"End to end Super SloMo.", 'epoch':epoch, 'timestamp':datetime.datetime.now(), 'trainBatchSz':args.train_batch_size, 'validationBatchSz':args.validation_batch_size, 'learningRate':get_lr(optimizer), 'loss':cLoss, 'valLoss':valLoss, 'valPSNR':valPSNR, 'state_dictFC': flowComp.state_dict(), 'state_dictAT': ArbTimeFlowIntrp.state_dict(), } torch.save(dict1, args.checkpoint_dir + "/SuperSloMo" + str(checkpoint_counter) + ".ckpt") checkpoint_counter += 1
python
<reponame>alexboii/Discover-Poetry { "author": "<NAME>", "classification": "", "keywords": [], "period": "", "reference": "http://www.poetryfoundation.org/poetrymagazine/poem/178241", "region": "", "text": [ "It seems to head from its last stop too fast,", "my transbay train\u2019s strungout", ", deep", "inside the tunnel, and starts to bleed", "into the baritone wail of that guy", "at platform\u2019s end, a sort of lullaby", "rubbed against the wall then caught in a squall", "of wind darkening toward us, his whippy voice", "skinning its tired song off the tiled dome:", "he\u2019s determined, the silky lyric says,", ", while we all", "wait to be chuted to car lot or home,", "closer to love, or farther, and sooner to loss,", "our bashful shoes and arms like lives crossed,", "every plural presence now some thing alone,", "thanks to our singer-man. We wait for the train,", "patient with hope, a hope that\u2019s like complaint." ], "title": "Solo R&B Vocal Underground", "year": "" }
json
The most common abdominal pain in the abdomen during pregnancy is a consequence of changes occurring at this time in the uterus. Most often it is associated with the growth of the uterus itself, and directly its muscle layer. However, sharp stabbing pain in the lower abdomen during pregnancy can be a symptom and a disease, for example, cystitis, which during pregnancy is not uncommon. Let's try to understand what can be indicated by sharp cramping pains in the abdomen in pregnancy, depending on their location. What can a sharp pain in the lower abdomen on the left side of pregnancy mean? This kind of symptomatology may indicate the presence of such a disorder as diverticulitis (inflammation of saccular protrusion that appears in any part of the gastrointestinal tract). Moreover, in addition to pain, nausea, vomiting, fever, chills, and stool disorders (constipation) are observed. Also, the pain on the left can be caused by the infringement of the hernia. In this case, most often it has a sharp, paroxysmal character. However, probably the most frequent violation, accompanied by the appearance of pain in the lower abdomen on the left side of pregnancy, is cystitis. Diagnosing this disease is not difficult, because pain accompanied by frequent and painful urination. Often in the urine can detect impurities of blood. If you have this symptomatology, you should contact your doctor as soon as possible. What is the evidence of a sharp pain in the lower abdomen on the right side of pregnancy? First of all, this symptomatology indicates the presence of lesions of organs located directly in the right iliac region. So, in the first place, it is necessary to exclude, the so-called inflammation of the appendix, known in the people as "appendicitis". In addition, a sharp short-term pain in the abdomen during pregnancy can be caused by a right-sided lesion of the ovaries, appendages or fallopian tubes. At the same time, if these signs are related to gynecological disorders, then the existing pain is often given to the rectum or sacrum.
english
<reponame>JulianJorgensen/rise-public .times{ margin-bottom: rem(20); & .time{ cursor: pointer; } }
css
<reponame>jlongster/octoshot { "name": "binarypack", "description": "BinaryPack is a JSON-like binary serialization format", "version": "0.0.2", "homepage": "https://github.com/binaryjs/node-binarypack", "author": { "name": "<NAME>" }, "repository": { "type": "git", "url": "https://github.com/binaryjs/node-binarypack" }, "main": "./lib/binarypack", "engines": { "node": ">=0.6.0" }, "dependencies": { "buffercursor": ">=0.0.3" }, "_id": "binarypack@0.0.2", "readme": "ERROR: No README.md file found!", "dist": { "shasum": "9cf28ae5158f1568e035fbbea777db365f08ed8d" }, "_from": "binarypack@>=0.0.0" }
json
{ "id": "subspace_boulder_verdant_peak", "originalId": 372103, "rarity": 3, "name": "Roca dimensional: Cima verde", "description": "Una de las rocas que forman parte del paisaje básico de las moradas de los Adeptus. Esta parece realmente estable.\\nSería bueno preguntarle a cierto Arconte Anemo su opinión sobre cómo convertirla en una torre alta. Después de todo, tiene bastante experiencia en evitar diseños deplorables.", "load": 25, "energy": 20, "exp": 40, "category": [ { "id": 10001, "category": "Montañas", "type": "Exterior" } ], "recipe": [ { "id": "pine_wood", "name": "Madera de pino", "amount": 20 } ] }
json
2 Guns movie Based on a cult graphic novel by Steven Grant, the Hollywood film 2 Guns is an intriguing, action packed buddy film, complete with greed, corruption, deception and double crossing. Two buddies - Bobby Trench (Denzel Washington) and Marcus Stigman (Mark Wahlberg) after being cheated by a Mexican drug lord (Edward James Olmos) decide to rob a bank that holds his ill-gotten wealth. Unwittingly, instead of picking the drug lord's three million dollars, they land up stacking a little over 43 million dollars belonging to an even more ruthless organisation, the CIA. Once the heist is done and when their plans go awry, they dwell into their partner's past and trustworthiness. They realise that they haven't been truthful to each other; Bobby is an undercover DEA officer, while Stigman is an ex-Navy officer, each of whom seems to be putting up a charade. That's not all, there are other go-getters trying to claim a stake in the heist. Apart from the Mexican drug lord and the ruthless CIA agent (Bill Paxton), the duo find themselves pitted against their own colleagues with divided loyalties. They are the seductive DEA agent (Paula Patton) and a slimy Navy official (James Marsden).
english
<reponame>ErGonario/website --- title: "发布尔格主网3.0.4版本" date: 2019-07-19 draft: false subtitle: "此次发布在候选区块生成算法中引入了一个带有时间标记的缺陷修复。向各位矿工强烈推荐" link: "https://github.com/ergoplatform/ergo/releases/tag/v3.0.4" ---
markdown
<reponame>LeetCode-in-Java/LeetCode-in-Java package g1901_2000.s1998_gcd_sort_of_an_array; // #Hard #Array #Math #Sorting #Union_Find #2022_05_16_Time_51_ms_(100.00%)_Space_71.9_MB_(67.50%) import java.util.Arrays; public class Solution { public boolean gcdSort(int[] nums) { int[] sorted = nums.clone(); Arrays.sort(sorted); int len = nums.length; int max = sorted[len - 1]; // grouping tree child(index)->parent(value), index==value is root int[] nodes = new int[max + 1]; for (int j : nums) { nodes[j] = -1; } // value: <=0 not sieved, <0 leaf node, 0 or 1 not in nums, >1 grouped for (int p = 2; p <= max / 2; p++) { if (nodes[p] > 0) { // sieved so not a prime number. continue; } // p is now a prime number, set self as root. nodes[p] = p; int group = p; int num = p + p; while (num <= max) { int existing = nodes[num]; if (existing < 0) { // 1st hit, set group nodes[num] = group; } else if (existing <= 1) { // value doesn't exist in nums nodes[num] = 1; } else if ((existing = root(nodes, existing)) < group) { nodes[group] = existing; group = existing; } else { nodes[existing] = group; } num += p; } } for (int i = 0; i < len; i++) { if (root(nodes, nums[i]) != root(nodes, (sorted[i]))) { return false; } } return true; } private static int root(final int[] nodes, int num) { int group; while ((group = nodes[num]) > 0 && group != num) { num = group; } return num; } }
java
Topics for Today’s questions: Question 1) Q.1 Ocean deoxygenation is one of the most detrimental, yet under-reported side-effects of human- induced climate change. Identify the causes behind it. Also, mention its socio-economic and environmental implications for the world. (15 Marks) Question 2) Q.2 What are the issues with the provisions of Armed Forces Special Powers Act? Do you agree with the view that repealing it would strengthen the Constitution? (10 Marks) Question 3) Q.3 What changes were made by the RBI in its monetary policy in the wake of Covid? Examine the steps taken by the RBI towards policy normalisation without causing disruption. (10 Marks) Question 4) Q.4 How the interests of private sphere and public sphere become conflictual? What should be done in the event of such conflict? Explain. (10 Marks) HOW TO ATTEMPT ANSWERS IN DAILY ANSWER WRITING ENHANCEMENT(AWE)? Daily 4 questions from General studies 1, 2, 3, and 4 will be provided to you. A Mentor’s Comment will be available for all answers. This can be used as a guidance tool but we encourage you to write original answers. You can write your answer on an A4 sheet and scan/click pictures of the same. Upload the scanned answer in the comment section of the same question. Along with the scanned answer, please share your Razor payment ID, so that paid members are given priority. If you are writing answers late, for example, 11th October is uploaded on 13th October, then these answers will be evaluated as per the mentor’s schedule. We encourage you to write answers on the same day. However, if you are uploading an answer late then tag the mentor like @Staff so that the mentor is notified about your answer. *In case your answer is not reviewed, reply to your answer saying *NOT CHECKED*. For the philosophy of AWE and payment:
english
<reponame>YUChoe/Large-screen-slideshare-Chrome-extension { "browser_action": { "default_icon": "slideshare.png" }, "content_scripts": [ { "all_frames": true, "js": [ "large_screen_slideshare.js" ], "matches": [ "http://www.slideshare.net/*" ] } ], "description": "This plusin will enlarge slides for slideshare.com", "icons": { "128": "slideshare.png", "16": "slideshare16.png", "48": "slideshare48.png" }, "key": <KEY>", "manifest_version": 2, "name": "Large screen slideshare Plugin", "permissions": [ "activeTab" ], "update_url": "https://clients2.google.com/service/update2/crx", "version": "0.9", "web_accessible_resources": [ "large_screen_slideshare.css" ] }
json
Russia has warned that members of the Takfiri Hayat Tahrir al-Sham (HTS) terrorist outfit together with the so-called civil defense group White Helmets are planning to carry out chemical attacks across Syria in a bid to incriminate Syrian government forces and invent pretexts for possible acts of aggression on army troops. “Via several channels, we received confirmation of reports that militants from the group of field commander Abu Malek, which is a part of Hayat Tahrir al-Sham, jointly with the White Helmets organization, plan to carry out provocations involving staged air raids and use of chemical weapons in populated areas of the Idlib de-escalation zone,” Russia's Defense Ministry said in a statement released on Tuesday. The statement further noted that the residents of the town of Sarmada in the extreme northwest of Syria had seen a group of unidentified people and three trucks transporting various containers with unknown chemicals arriving in the area earlier this month. “One of the vehicles carried professional video equipment and fragments of air and artillery shells with Soviet and Russian identification marks,” the statement pointed out. The Russian Foreign Ministry also highlighted that the militants are planning to make falsified footage of civilian facilities allegedly destroyed in airstrikes, artillery shelling and chemical attacks in Idlib province, and then publish the videos on social media to blame the Syrian government and Russia for their actions against civilians.
english
#pragma vtordisp(off) #include <iostream> using std::cout; using std::endl; class Base1 { public: Base1() : _iBase1(10) {} virtual void f() { cout << "Base1::f()" << endl; } virtual void g() { cout << "Base1::g()" << endl; } virtual void h() { cout << "Base1::h()" << endl; } private: int _iBase1; }; class Base2 { public: Base2() : _iBase2(100) {} virtual void f() { cout << "Base2::f()" << endl; } virtual void g() { cout << "Base2::g()" << endl; } virtual void h() { cout << "Base2::h()" << endl; } private: int _iBase2; }; class Base3 { public: Base3() : _iBase3(1000) {} virtual void f() { cout << "Base3::f()" << endl; } virtual void g() { cout << "Base3::g()" << endl; } virtual void h() { cout << "Base3::h()" << endl; } private: int _iBase3; }; /* 32位系统下 测试三:多重继承(带虚函数)   1. 每个基类都有自己的虚函数表 2. 派生类如果有自己的虚函数,会被加入到第一个虚函数表之中 3. 内存布局中, 其基类的布局按照基类被声明时的顺序进行排列  4. 派生类会覆盖基类的虚函数,只有第一个虚函数表中存放的是 真实的被覆盖的函数的地址;其它的虚函数表中存放的并不是真实的 对应的虚函数的地址,而只是一条跳转指令 1>class Derived size(28): 1> +--- 1> 0 | +--- (base class Base1) 1> 0 | | {vfptr} 1> 4 | | _iBase1 1> | +--- 1> 8 | +--- (base class Base2) 1> 8 | | {vfptr} 1>12 | | _iBase2 1> | +--- 1>16 | +--- (base class Base3) 1>16 | | {vfptr} 1>20 | | _iBase3 1> | +--- 1>24 | _iDerived 1> +--- 1>Derived::$vftable@Base1@: 1> | &Derived_meta 1> | 0 1> 0 | &Derived::f 1> 1 | &Base1::g 1> 2 | &Base1::h 1> 3 | &Derived::g1 1>Derived::$vftable@Base2@: 1> | -8 1> 0 | &thunk: this-=8; goto Derived::f 跳转指令 1> 1 | &Base2::g 1> 2 | &Base2::h 1> 1>Derived::$vftable@Base3@: 1> | -16 1> 0 | &thunk: this-=16; goto Derived::f 跳转指令 1> 1 | &Base3::g 1> 2 | &Base3::h */ /* 虚继承Base1 1>class Derived size(32): 1> +--- 1> 0 | +--- (base class Base2) 1> 0 | | {vfptr} 1> 4 | | _iBase2 1> | +--- 1> 8 | +--- (base class Base3) 1> 8 | | {vfptr} 1>12 | | _iBase3 1> | +--- 1>16 | {vbptr} ==> 谁虚继承的基类,该虚基指针就跟着谁 1>20 | _iDerived 1> +--- 1> +--- (virtual base Base1) 1>24 | {vfptr} 1>28 | _iBase1 1> +--- */ class Derived : virtual public Base1, public Base2, public Base3 { public: Derived() : _iDerived(10000) {} void f() { cout << "Derived::f()" << endl; } virtual void g1() { cout << "Derived::g1()" << endl; } private: int _iDerived; }; int main(void) { Derived d; Base2* pBase2 = &d; Base3* pBase3 = &d; Derived* pDerived = &d; pBase2->f(); cout << "sizeof(d) = " << sizeof(d) << endl; cout << "&Derived = " << &d << endl; // 这三个地址值是不一样的 cout << "pBase2 = " << pBase2 << endl; // cout << "pBase3 = " << pBase3 << endl; // return 0; }
cpp
- News Who is Monk Amogh Lila Das and Why Has ISKCON Banned Him? - Travel Have you been to Vishveshwaraya Falls in Mandya yet? If not, this is the best time! Today, for some zodiac signs there will be happiness and for others there will be challenges, so it is best to know what the stars have in store for you. Read your daily horoscope to find out what the stars of your fate have to say about you. Your marital life will be happy. You will be very happy to get support and love from your spouse. Today, the soul of the spouse will be much better. Things seem to be trending in your favor in romantic life. If your partner is angry with you, then today the differences between you can end, so that your proximity will increase once again. Financials will improve as the day progresses. Financial benefit from mother or father is possible. If you do business then you can get some big profit today. It is possible that some of your business plans may be completed today. Talking about health, today will be a good day for you. You will feel quite energetic. You will be very strong physically and will be able to complete many tasks fast. Today will be a wonderful day in terms of love and love. Today you will be in a more romantic mood. Although you will not be able to meet today, you will be connected with your partner through other means. Money will be in good condition. If you have recently borrowed from someone you will be able to return it today. On the other hand, if you do business, then it is possible for you to have a conflict with your partner today. It will be better if you are not very fierce otherwise it can have negative effects on your work. Today will be a wonderful day with your friends. Relations with family will be good. Having some influential people will increase your enthusiasm. Health matters will be good today. Do not neglect your health, but take care of yourself. Excessive stress is not good for your health at this time. On the family front it will be a good day. You will feel happy after spending a good time with your family. At the same time, mutual love and unity will remain among all. Today you will be able to dominate your enemies. Today there is a chance of getting a big success for the employed people. Your honest and hard work will give you more sweet fruit than expected. The day will also be very profitable for businessmen. Today, your stuck work can start again. Talking about married life, today will be a bit boring. Your spouse will be very busy with his work. Today will be a good day on the economic front. Today, there is a possibility of getting some big financial benefit suddenly, although only after tough struggle, you will get this money. It is better that you use it correctly. Anyway, you have to avoid negligence in financial matters at this time. Today some domestic matters can bother you. In such a situation, you must talk to your family in peace. The burden of work today can increase your stress. You must settle your work according to a better plan. It will be good to avoid any of your work for tomorrow. If you do business then today you can take a big and important decision. Take care of your health as well. Control your emotions otherwise you can take some wrong decisions today that will not only have a negative effect on you but your entire family. A relationship with a spouse can cause problems. It is possible that they do not agree with your words today. In such a situation, if you keep calm by not arguing, it will be better. Financial benefits are possible on the economic front. For some time, the efforts you were engaged in to strengthen your financial aspect, it is possible that today they will be successful. If you are working on a new project in the office, then you can also take the advice of your elders. His advice for you will prove to be the key to success. Today is not a good day for loving couples. The rift between you is weakening your relationship. You can see some positive changes at your workplace today. Your hard work that has been going on for a long time will bring color today and any of your wishes will be fulfilled today. Today some of your important projects will be completed successfully. The day will be full of ups and downs for businessmen. If you work related to property, today some loss is possible. On the economic front, the day will be normal. Spend it thoughtfully and keep your budget in mind. There will be tension in the house. The health of a household member can increase your anxiety. Do not be careless and consult a good doctor as soon as possible. Do not expect too much from others, you will feel disappointed. Today there is every possibility of improvement in your financial situation. You may get a new source of income today. Today will be one of the special days of your married life. Today some of your old memories will be refreshed once again. On the work front, the day is auspicious. If you do a job, today, along with keeping the workload light, the mood of your boss will also be good. Maybe today they will be more impressed by your work today. Time is favorable for students. You just keep working sincerely. There will be compatibility in your personal life. Your relationship with relatives will be good. Your health will be good. Today you will feel better mentally as well as you will be quite agile. You will be mentally disturbed today. Many negative thoughts can also come to the mind. In such a situation, you will feel a lot of pressure and cumbersome today. You have to understand, not every day is the same. If today you are not getting the results as expected then tomorrow's day will bring a new ray of hope for you, so do not waste your time in despair and sadness, but continue your efforts. Work hard with positive thinking. Today is not a good day on the economic front. Avoid spending too much without thinking. To maintain peace at home, today you have to take a lot of control over your speech. Avoid doing something that worsens the home environment. Today is not good for you in terms of health. If things are not going according to you, then you do not have to be disappointed and desperate. You must keep trying and at the same time you will have to work patiently, soon you will improve the situation. On the work front, if you are thinking of doing something new, time is not favorable for it. You must wait a bit, especially for employed people, it is advised that you must avoid any kind of change now. Businessmen also have to refrain from doing any new work today. It is good for you to be careful in terms of money, especially do not take loans. Apart from this, pay more attention to savings, it will be better not to waste money. You will get support from family members, which will reduce your stress significantly. You must not be careless about your health. Pay more attention to rest today, it will be better for you, as well as eat on time. On the economic front you may get good results. You can get back the money lent today. At this time, it is advisable to avoid making your financial decisions randomly. Today will be challenging on the work front, you will have to work very patiently. Avoid getting entangled with your superiors. your personal life will be happy. Relations with family will be good. On the other hand, today the life-partner's mood will not be right. Today they may behave quite rudely. In such a situation, you must try to convince them with love. There will be stability in romantic life. The mutual understanding will be good, which will deepen the love between you two. On the work front, your hard work you be successful. If you do business then today you can get good profit and once again your work will move fast. If you work then today you will have some important responsibilities. You will try to complete them honestly. Your financial condition will be good. Money is the sum of profit. Today, any stuck work related to money can also be completed. Any problem going on in married life will be solved. If you are worried about your spouse's health then you will see improvement in their health today but you have to avoid any carelessness while looking after them. Today will be a normal day if you are in love. Today your mood will be very good. You will pay more attention to the decoration of your home. It is possible that you will make some changes in the house today. In the second part of the day, a friend may suddenly come home. Today you will have a good time. Today you will feel mental peace and you will be refreshed. Talking about the work, where the day of businessmen will be normal, the work of employed people will go on smoothly. There will be happiness and peace in family life, today you will enjoy the time with your family. The relationship with your spouse will be good. Talking about your health, you will not face any kind of problem today. It will be better if you take care of your health in this way.
english
<gh_stars>0 import { Entity, Column, PrimaryGeneratedColumn, BeforeUpdate, BeforeInsert, ManyToOne, JoinColumn, OneToMany } from 'typeorm'; import { Rol, Suscription, City, Order } from '.'; @Entity('User') export class User { @PrimaryGeneratedColumn({ type: 'bigint' }) id: number; @Column('bigint') rolId: number; @Column('bigint') suscriptionId: number; @Column('bigint', { default: 2 }) cityId: number; @Column('varchar', { length: 255, nullable: true }) resetPasswordUuid: string; @Column('varchar', { length: 200 }) name: string; @Column('varchar', { length: 200, nullable: true }) lastName: string; @Column('varchar', { length: 200 }) email: string; @Column('varchar', { length: 200 }) password: string; @Column('varchar', { length: 100, default: '+34' }) indicator: string; @Column('varchar', { length: 200 }) phone: string; @Column('varchar', { length: 200, nullable: true }) gender: string; @Column('varchar', { length: 200, nullable: true }) latitude: string; @Column('varchar', { length: 200, nullable: true }) longitude: string; @Column('varchar', { length: 200, nullable: true }) ocupation: string; @Column('timestamptz', { nullable: true }) birthdayAt: Date; @Column('timestamptz', { nullable: true }) createdAt: Date; @Column('timestamptz', { nullable: true }) updatedAt: Date; @Column('timestamptz', { nullable: true }) deletedAt: Date; @ManyToOne(() => Rol, (rol: Rol) => rol.users, {}) @JoinColumn({ name: 'rolId' }) rol: Rol | null; @ManyToOne(() => Suscription, (suscription: Suscription) => suscription.users, {}) @JoinColumn({ name: 'suscriptionId' }) suscription: Suscription | null; @ManyToOne(() => City, (city: City) => city.users, {}) @JoinColumn({ name: 'cityId' }) city: City | null; @OneToMany(() => Order, (order: Order) => order.user) orders: Order[]; @BeforeUpdate() beforeUpdate() { this.updatedAt = new Date(); } @BeforeInsert() beforeSave() { this.createdAt = new Date(); this.updatedAt = new Date(); } }
typescript
@media ( min-width: 1200px ) { .toolbar-mode .col-md-5 { width: 43.666667%; padding-right: 5px; margin: 7px 0px; } .toolbar-mode .col-md-3 { width: 23%; } .col-md-7.product-shop { width: 55.5%;} .col-md-5.product-img-box { width: 44.5%;} } @media (max-width: 1199px) and (min-width: 1025px) { .nav-tabs > li > a { padding: 0 10px; } } @media (max-width: 1199px) { .block-title h2 { font-size: 14px; } .home-social { padding: 0 15px 30px; text-align: center; } .newsletter { padding: 30px 0; } #block-home-icon .static-block { display: block; padding-left: 0; text-align: center; padding-top: 10px; } .product-featured-content .tabs-content .owl-item { padding: 0 5px; } .flex-direction-nav{top:45%;margin-top:0 !important} .product-quickview .product-content{width:70%} .slideshow .content-link .link {margin-top: 45px;} .ajax-success-cbox .content{width:600px} .ajax-success-cbox .content .mesage-cart{padding-top:7px} } @media (min-width: 768px) and (max-width: 900px) { body .product-list-item .short-description { display: none; } .header-top-right i, .top-currency .dropdown-icon i{ font-size: 22px;width:20px} .menu-action .site-nav > li > a{padding-left:2px !important;padding-right:2px} .flex-direction-nav{top:50%;margin-top:-15px} .flex-direction-nav li a{width:30pxheight:30px} .main-slideshow .flexslider .flex-direction-nav a:before{font-size: 24px; position: relative; bottom: 10px;} } @media (min-width: 768px) and (max-width: 800px) { body #popup-newsletter .popupnewsletter{width:700px;} } @media (min-width: 768px) and (max-width: 990px) { body .banner-slide .feature-wrap-content .features-item-2{height: 352px;margin-bottom: 0;} .main-content .block-top iframe {height:172px!important;} .main-blog .owl-theme .owl-controls{margin-bottom:15px;} .main-blog .owl-theme .owl-controls .owl-buttons div{margin:0 6px;} } @media (max-width: 767px){ .grid-item .add-to-link,.quickview-button { display: none; } #header-content .header-top .text-right > div.wishlist { padding: 0 3px; } #header-content .header-top .text-right > div.lang-block { padding-right: 5px; } #header-content .header-top .col-md-6.text-right {text-align: center;} .footer-logo-top .link-list-footer { margin-top: 15px; } .footer-logo-top .row > div { text-align: center; } .footer-logo-top .footer-icon-share { text-align: center; } body .product-list-item .short-description { display: none; } .col-fillter-product.col-md-3{width:300px} .ajax-success-cbox .content{width: 320px; padding-left: 15px; padding-right: 15px;} .ajax-success-cbox .content .mesage-cart{ width: 100%; font-size: 15px; padding-top: 0px;} .ajax-success-cbox .button-link-cart{ float: left; text-align: center; width: 100%;} .ajax-success-cbox .content .button-link-cart .view-cart{float: left; margin-bottom: 10px; margin-right: 10px;} .ajax-success-cbox .content .btn{ float: left;} } /* width: 970px */ @media (min-width: 992px){ .col-sidebar-right .filter-collection-left{display:none;} .row.col-sidebar-right .col-fillter-product,.row.col-sidebar-left .col-fillter-product{ position: relative;left:0} .row.col-sidebar-right .col-fillter-product #filter-sidebar{overflow-y:unset;} } @media (min-width: 992px) and (max-width: 1199px) { .col-sidebar-right .col-main{padding:0;} body #block-top .block-item .block-icon{width:50px;} body #block-top .block-item .block-icon i{font-size:30px;} body #block-top .block-item{min-height:120px;padding-bottom:5px} body #block-top .block-item h3{letter-spacing:0;font-size:16px;} body .site-nav li.level-top{margin-right:20px} body .footer-top .social ul li a{margin-right:5px;} body .newsletter .input-group-field{width:100%;} body .main-slideshow .slide-des,body .allinone_bannerRotator_texts{top:15%} body .main-slideshow .slide-des .title,body .slideshow .content-slideshow h2,.breadcrumb h2{font-size: 50px; line-height: 1.2;} .banner-slide .slideshow .sbu-tile-slide {margin-top: 20px;} body .static-collection .content-slider h2,.block-top .static-collection p.price{font-size:18px;} body .main-slideshow .slide-des .slide-2{left:10%;} body .main-slideshow .slide-des .slide-1{left:40%;} /*body .main-slideshow .caption{width:520px;}*/ .newsletter .input-group-field { width: 245px; } body .load-more-product .load-product .product-item{width:33.3%;} body .static-collection.item-2 .content-slider p, body .static-collection.item-3 .content-slider p{display:none;} .toolbar .btn-group { margin-left: 15px; } body #header-inner-right{position:absolute;right:0;z-index:10;width:20%;padding:0;} .static-collection .block-content{padding:20px 30px 20px;} div#carousel-top .newsletter{padding: 40px 30px 30px; border: 15px solid #f2f2f2;} div#carousel-top .newsletter .title-top { margin-bottom: 25px;} .site-nav a{font-size: 12px;} body .product-list .grid-item .product-image{width:36%} body .product-list .grid-item .product-content{width:61%} .product-content h3 a { min-height: 40px;/* display: inline-block;*/vertical-align: top; line-height: 20px; } .product-quickview .product-tabs .nav.nav-tabs > li > a { margin: 0 17px;} .product-quickview .product-content { width: 835px;height:576px;} #header-content .menu-top .site-nav li.level-top.active:hover ul a, #header-content .menu-top .site-nav li.level-top.active:focus ul a, #header-content .menu-top .site-nav .dropdown-sub ul li a{font-size:11px;line-height:26px} #header-content .menu-top .site-nav li.level-top { padding: 0 12px;} .grid-item .product-image .add-to-link div.add-to-cart span:before, span.add-cart:before{margin-left:-67px;} .aboutus .about-our-team h2 {font-size: 60px;} .about-brand .content-brand .about-content p:last-child {max-height: 145px;} .about-brand .content-brand .about-content p:first-child {max-height: 120px;} } @media (min-width: 992px) and (max-width: 999px) { .grid-item .product-image .add-to-link div.add-to-cart span:before, span.add-cart:before { margin-left: -60px;} .grid-item.product-item.col-md-4.col-sm-4.col-xs-12 span.add-cart:before{margin-left:63px;} } @media (min-width: 1025px){ .block-megamenu-content:hover #verticalmenu,.template-index .header.is-ticky .block-megamenu-content:hover #verticalmenu { opacity: 1; margin-top: 0; visibility: visible; animation-delay: 100ms; animation-duration: 340ms; animation-fill-mode: both; animation-name: animation_vertial_categories; backface-visibility: hidden; display: block; } .template-index .header.is-ticky .block-megamenu-content #verticalmenu {opacity: 0;visibility: hidden;} .is-ticky{ background: #fff none repeat scroll 0 0; position: fixed;left:0; top: 0;z-index: 9999;box-shadow: none; width: 100%; } #menu-vertical #verticalmenu .navbar-nav > li .dropdown-menu { backface-visibility: hidden; left: 100% !important; list-style: outside none none; margin-left: 0; padding-left: 15px; margin-top: 0; min-height: 70px; border: 0; border-top: 2px solid; box-shadow: none; opacity: 0; right: auto !important; top: 0; min-width: 230px; transform-origin: 0 100% 0; visibility: hidden; border: 0; border-radius: 0; z-index:999; height: 470px; } #menu-vertical #verticalmenu .navbar-nav > li.dropdown:hover .dropdown-menu { visibility: visible; top: -1px; opacity: 1; transition: all 0.3s ease-in-out 0s; animation-delay: 100ms; animation-duration: 340ms; animation-fill-mode: both; animation-name: animation_vertical_menu; } .template-index .header.is-ticky .block-megamenu-content #verticalmenu { opacity: 0; visibility: hidden; } .template-index .header.is-ticky .block-megamenu-content:hover #verticalmenu { opacity: 1; visibility: visible; } .header.is-ticky .header-body { display: none; } .tabs-product .nav-tabs { display: block !important; overflow: visible !important; } .tabs-product .show-tabs { display: none; } .toggle-menu { display: none; } .search-none {display: none;} .icon-search,div#search-top .search-close { display: none; } div#search-top .search-form { display: block !important; position: inherit; border: 0; width: 100%; background: transparent; } } @media (max-width: 1024px){ .header-cart #dropdown-cart {display: none;} .header.is-ticky {display: static;} #verticalmenu { position: inherit;} #menu-vertical #verticalmenu .navbar-nav > li.open .dropdown-menu { left: 0 !important; top: 1px; position: relative; height: auto; opacity: 1; visibility: visible; width: 100% !important; } #menu-vertical #verticalmenu .navbar-nav > li .dropdown-menu { backface-visibility: hidden; left: 0 !important; list-style: outside none none; margin-left: 0; margin-top: 0; border-top: 2px solid; box-shadow: none; opacity: 0; right: auto !important; top: 0; min-width: 230px; transform-origin: 0 100% 0; visibility: hidden; border: 0; height: 0; border-radius: 0; z-index: 999; position: relative; transition: all 0s; transform: translateX(0); -ms-transform: translateX(0); -webkit-transform: translateX(0); -moz-transform: translateX(0); } .block-megamenu { width: 100%; padding: 0 15px; } .block-megamenu .nav-verticalmenu i.fa-angle-right { font-size: 24px; } .nav .open>a, .nav .open>a:focus, .nav .open>a:hover { background-color: transparent; border-color: transparent; } #menu-vertical #verticalmenu img {display: none;} #verticalmenu { position: initial; margin-bottom: 20px; } .banner-slide { padding-left: 15px; margin-left: 0; width: 100%; } div#search-top .search-close { position: absolute; right: 40px; top: 61px; cursor: pointer; z-index: 99999; } #header-content .header-body {position: relative;} div#search-top .search-form { background: #fff none repeat scroll 0 0; display: block; height: 50px; position: absolute; right: 0; top: 45px; z-index: 8880; width: 100%; padding: 0 15px; } #search-top .input-group { width: 100%; } #search-top .input-group .input-group-field { transition: all 0.3s ease-in-out 0s; min-width: 100%; font-size: 14px; letter-spacing: 0.4px; width: 100%; margin: 0; padding: 0 10px 0 40px; height: 46px; border: 1px solid #ebebeb; color: #bcbbb9; padding-right: 60px; position: absolute; top: -12px; left: 0; z-index: 8; display: block; background: transparent; } #search-top .input-group .input-group-btn {display: none;} #search-top.active .search-form,#search-top.active .search-close {display: block !important;} #search-top .search-close{display: none;} #search-top.active .icon-search {opacity: 0;} #header-content .header-bottom {height: 0;} .toggle-menu { display: block; } .newsletter { padding: 30px 0; } .home-social { padding: 0 15px 30px; text-align: center; } .icon-bar { font-size: 24px; line-height: 50px; text-align: right; padding-right: 15px; cursor: pointer; } .tabs-product-home .nav.nav-tabs { padding: 10px 0; position: absolute; min-width: 250px; max-width: 300px; width: 100%; text-align: left; height: auto; background: #fff; z-index: 9999; right: 30px; top: 51px; box-shadow: 0 0 6px 0 rgba(1,1,1,.3); } .tabs-product-home .nav-tabs > li { display: block; border: 0 !important; background: transparent !important; line-height: 35px; } .header-body .header-top-right i {line-height: 0;padding-top: 20px;} .header-body a#cartToggle .top-icon-cart { background: transparent; border: 0; width: auto; height: auto; border-radius: 0; } div#search-top i { font-size: 20px; color: #252525; cursor: pointer; } .search-right { text-align: right; float: right; position: static; margin-top: 5px; } #search-top.search-none { display: inline-block; vertical-align: middle; padding-top: 10px; } .header-top-right { float: none; display: inline-block; vertical-align: middle; } .search-none {display: block;} .header-tag,.menu-none,.header-body .text-cart-left { display: none; } .menu-action{display:none;} body .navbar-toggle{display:block;margin:0 auto;float:left;font-size:26px;padding: 5px 0;} #header-menu .menu-top{text-align:center;} .header-top-right .top-currency .ion-navicon:before {content: "\f43c";} .main-slideshow .slide-des .title::before, body .slideshow .content-slideshow h2::before {border-bottom: 3px solid #ff66cc;bottom: -5px;} } @media (max-width: 991px){ #header-content .header-top .header-top-text {display: none;} .tabs-product-home .owl-theme .owl-controls,.home-blog .owl-theme .owl-controls { display: block !important; } .home-blog .owl-theme .owl-pagination {display: none;} .color-block-icons { margin-bottom: 20px; } .block-megamenu .nav-verticalmenu { margin: 0 0 15px; } .block-megamenu,.banner-slide {padding: 0 15px;} .col-md-4.total-page { text-align: left; clear: both;display:none} .show-fillter .col-fillter-product{padding:0} } @media (min-width: 768px) and (max-width: 810px) { home #owl_tabnew.owl-carousel { padding: 0 70px;} .tabs-product-home .owl-theme .owl-controls .owl-next { right: 38px;} .tabs-product-home .owl-theme .owl-controls .owl-prev { left: 35px;} } /* width: 750px */ @media (min-width: 768px) and (max-width: 991px) { .block-bottom-sidebar .content-block-left{bottom:13px} a.let{font-size:13px;line-height:20px;padding:4px 0} .col-main.col-md-9 .related-products .add-to-link{display:none} .product-quickview .product-content {width: 630px;height:550px} .product-quickview .product-tabs .nav.nav-tabs > li > a { margin: 0 8px; font-size: 13px;letter-spacing: 0;} .product-quickview .product-shop .prices { margin-top: 8px; margin-bottom: 0;} .product-quickview .product-tabs .nav-tabs { margin-top: 8px;} .product-quickview .product-shop .quantity{height:44px;width:44px; font-size: 18px;} .related-products .add-to-link .quickview-button{display:none} body#products .grid-item.product-item.col-md-4 .variants .btn,body#products .grid-item.product-item.col-md-4 .variants .btn:hover{ padding: 0 7px 0 13px; font-size: 9px;} body#products .grid-item.product-item.col-md-4.col-sm-6 .variants .btn,body#products .grid-item.product-item.col-md-4.col-sm-6 .variants .btn:hover{ font-size: 10px;} #products .grid-item.product-item.col-md-4.col-sm-6 .product-image .add-to-link div.add-to-cart span:before{ margin-left: -58px;} #products .grid-item .product-image .add-to-link div.add-to-cart span:before{ font-size: 15px; top: 16px; margin-left: -48px;} #products .grid-item .add-to-link .wishlist span i { font-size: 13px;} #products .quickview-button a,#products .add-to-link .quickview-button a:hover{font-size:17px} #products .quickview-button a i,#products .add-to-link .quickview-button a:hover i{ position: relative; top: 2px;} .logo h1 { margin-bottom: 10px; margin-top: 9px;} .page-error .main-content .icon{margin-top:200px} .page-error .main-content{margin-bottom:170px} body .product-list .grid-item .product-image, body .product-list .grid-item .product-content{width:49%} body .owl-theme .owl-controls .owl-prev{left:8px;} body .owl-theme .owl-controls .owl-next{right:8px;} .collection-title h3.h5 { font-size: 18px; } .product-item .btn{padding:0 8px;} .refined-widgets > a { float: left; } .toolbar .view-mode label { display: none; } .toolbar .view-mode { margin: 16px 8px 16px 16px; } /* cart page */ .cart-row .btn { font-size: 12px; padding: 0 10px; } /* contact page */ #get-rates-submit { padding: 0 10px; } body .site-nav a{font-size: 12px; letter-spacing: 0;} .block-top .static-collection .block-content{padding:20px;line-height:18px} } @media (max-width: 1024px) { .typo-slider .rev_slider_0 .typo-title { padding: 15px 8px !important; } #offcanvas .navbar-nav li a { text-transform: uppercase; font-weight: 400; line-height: 26px;} #offcanvas .navbar-nav li ul a{ font-weight: 400;font-size: 12px;} } @media (max-width: 999px) and (min-width: 768px){ } @media (max-width: 999px){ } @media (max-width: 991px) and (min-width: 768px){ .block-top .static-collection .block-content .content-slider { font-size: 12px; line-height: 25px; } .block-top .item-3 .static-collection .block-content { bottom: 0; right: 15px; } .block-top .static-collection .block-content h3.block-title { font-size: 16px; line-height: 20px; } } @media (min-width: 768px) { } .zoomWrapper { display: block; height: 100% !important; width: 100% !important; } /* width: 100% */ @media (max-width: 767px) { .footer-container .footer-bottom address,.paypal-footer {text-align: center;} .home-social .footer-icon-share a { width: 35px; height: 35px;; line-height: 35px; } .home-social .footer-icon-share {padding-left: 0;} .newsletter .content { width: 100%; } .newsletter h3 { float: none; display: block; text-align: center; } .typo-slider .rev_slider_0 .typo-title { font-size: 30px!important; line-height: 46px!important; border: 0 !important; padding: 0 !important; } .typo-slider .rev_slider_1 .typo-subtitle { font-size: 24px!important; line-height: 26px!important; } .typo-slider .rev_slider_1 .typo-title { font-size: 33px!important; line-height: 26px!important; } .typo-slider .rev_slider_1 .typo-content { font-size: 24px!important; line-height: 26px!important; } .typo-slider .rev_slider_2 .typo-title { font-size: 30px!important; line-height: 26px!important; } body #popup-newsletter{display:none!important;} .zoomContainer {display: none !important;visibility: hidden!important;} a#placeholder{display: inline-block; width: 100%;position: relative;} a#placeholder:before { content: "";height: 100%;width: 100%; position: absolute;top: 0;left: 0; z-index: 1;} #offcanvas .navbar-nav li a{ text-transform: uppercase; font-weight: 400;} #offcanvas .navbar-nav li ul a{font-weight:400;font-size:12px;} #cart table tr td, #cart table th{padding:5px;} #cart table tr td img{max-width:80%;} #cart table .tc.item-quantity{width:30px;padding:0;text-align:center;} .toolbar-mode .col-md-4.hidden-sm.hidden-xs{width:100%} .toolbar-mode .hidden-xs { margin: 5px 0; display: block !important; width: 50%; overflow: hidden; float: left;} .toolbar .browse-tags { text-align: left;display:none} .collection-view { line-height: 39px; text-align: right;float: right;} .toolbar-mode .col-md-4.hidden-sm.hidden-xs{text-align:left;} } @media (max-width: 730px) { } @media (max-width: 567px) { } @media (max-width: 530px) { .product-deal .timeline > i,.product-deal .timeline .timeline-block > div { width: 52px; } .typo-slider .rev_slider_1 .typo-subtitle { font-size: 18px!important; line-height: 20px!important; } .typo-slider .rev_slider_1 .typo-title { font-size: 22px!important; line-height: 24px!important; } .typo-slider .rev_slider_1 .typo-content { font-size: 18px!important; line-height: 26px!important; } .typo-slider .rev_slider_2 .typo-subtitle { font-size: 18px!important; line-height: 20px!important; letter-spacing: 0 !important; } .typo-slider .rev_slider_2 .typo-title { font-size: 22px!important; line-height: 24px!important; } .typo-slider .rev_slider_2 .typo-content { font-size: 18px!important; line-height: 20px!important; } .typo-slider .rev_slider_0 .typo-title { font-size: 22px!important; line-height: 40px!important; } .typo-slider .rev_slider_0 .typo-subtitle { font-size: 18px!important; line-height: 22px!important; letter-spacing: 0 !important; text-transform: capitalize; font-weight: normal; } .typo-slider .rev_slider_0 .typo-content { font-size: 18px!important; line-height: 22px!important; } .typo-slider .rev_slider_0 > div { left: 10% !important; right: auto !important; } } @media (max-width: 499px) { } @media (max-width: 479px) { .product-deal .timeline { float: left; margin-bottom: 10px; } #header-content .header-top .text-right .currency-title {display: none;} } @media (max-width: 360px) { .logo-container {padding: 0;} .icon-search { padding-top: 5px; } #search-top.search-none {padding-top: 5px;} .product-deal .timeline > i {display: none;} .is-ticky .logo-sticky {margin-top: 8px;} .header-body .header-top-right i {font-size: 18px;} .logo { margin-top: 5px; } } @media (min-width: 600px) and (max-width: 767px) { .products-grid .grid-item.product-item.col-xs-12 { width: 50%;} .products-grid .grid-item.product-item.col-md-6.col-sm-6.col-xs-12 { width: 50%;} .products-grid .grid-item.product-item.col-md-3.col-sm-3.col-xs-12 { width: 50%;} } @media (min-width: 480px) and (max-width: 767px) {body .product-list-item .short-description{display:none}} @media (min-width: 481px) and (max-width: 767px) { .bestseller-homepage .col-lg-3.col-md-4.col-sm-6.col-xs-12 { width: 50%;} } .product-grid-isotope.product-list-item{height:auto!important;} .rte img{display:none;}
css
It is now more or less certain that Kamal Haasan is going to enter the political arena sooner rather than later. Thirunavukkarasar and Nagma from the Congress party and S. Ve Shekar from the BJP have recently met the Ulaganayagan fuelling speculations that both the national parties are wooing him. Political analysts are of the opinion that Kamal is drawn towards communism, which is evident in his outlook and speeches. The celebrated actor recently had a lunch meeting with Kerala Chief Minister Pinarayi Vijayan and unconfirmed sources claim that his future political strategy was discussed. A news started doing the rounds that Kamal will now take part and deliver a speech in a Communist Conference on Religious Intolerance to be held in Kozhikode, Kerala which will be presided over by Pinarayi Vijayan and the Marxist Communist Party of Kerala, leader Kodiyeri Balakrishnan on the 16th of September. However Kamal has denied being asked to take part in the conference and he has stated " Embarrassing. Was not asked for Calicut meeting with Kerala CM. I am at Bigg boss all saturdays till Oct. Best wishes for the function. " Follow us on Google News and stay updated with the latest!
english
<reponame>emcleod/OG-Platform /** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.datasets; import org.threeten.bp.LocalDate; import com.opengamma.financial.convention.calendar.MondayToFridayCalendar; /** * */ public class CalendarGBP extends MondayToFridayCalendar { /** * Calendar for Target non-good business days. Only for test purposes, is not accurate enough for production. */ private static final long serialVersionUID = 1L; /** * Constructor * @param name The name */ public CalendarGBP(final String name) { super(name); final int startYear = 2013; final int endYear = 2063; for (int loopy = startYear; loopy <= endYear; loopy++) { addNonWorkingDay(LocalDate.of(loopy, 1, 1)); addNonWorkingDay(LocalDate.of(loopy, 12, 25)); addNonWorkingDay(LocalDate.of(loopy, 12, 26)); } final LocalDate easter[] = new LocalDate[] {LocalDate.of(2013, 3, 31), LocalDate.of(2014, 4, 20), LocalDate.of(2015, 4, 5), LocalDate.of(2016, 3, 27), LocalDate.of(2017, 4, 16), LocalDate.of(2018, 4, 1), LocalDate.of(2019, 4, 21), LocalDate.of(2020, 4, 12), LocalDate.of(2021, 4, 4), LocalDate.of(2022, 4, 17), LocalDate.of(2023, 4, 9), LocalDate.of(2024, 3, 31), LocalDate.of(2025, 4, 20), LocalDate.of(2026, 4, 5), LocalDate.of(2027, 3, 28), LocalDate.of(2028, 4, 16), LocalDate.of(2029, 4, 1), LocalDate.of(2030, 4, 21), LocalDate.of(2031, 4, 13), LocalDate.of(2032, 3, 28), LocalDate.of(2033, 4, 17), LocalDate.of(2034, 4, 9), LocalDate.of(2035, 3, 25), LocalDate.of(2036, 4, 13), LocalDate.of(2037, 4, 5), LocalDate.of(2038, 4, 25), LocalDate.of(2039, 4, 10), LocalDate.of(2040, 4, 1), LocalDate.of(2041, 4, 21), LocalDate.of(2042, 4, 6), LocalDate.of(2043, 3, 29), LocalDate.of(2044, 4, 17), LocalDate.of(2045, 4, 9), LocalDate.of(2046, 3, 25), LocalDate.of(2047, 4, 14), LocalDate.of(2048, 4, 5), LocalDate.of(2049, 4, 18) }; for (final LocalDate element : easter) { addNonWorkingDay(element.minusDays(2)); // Easter Friday addNonWorkingDay(element.plusDays(1)); // Easter Monday } addNonWorkingDay(LocalDate.of(2014, 4, 18)); addNonWorkingDay(LocalDate.of(2014, 4, 21)); addNonWorkingDay(LocalDate.of(2014, 5, 5)); addNonWorkingDay(LocalDate.of(2014, 5, 26)); addNonWorkingDay(LocalDate.of(2014, 8, 25)); addNonWorkingDay(LocalDate.of(2015, 4, 3)); addNonWorkingDay(LocalDate.of(2015, 4, 6)); addNonWorkingDay(LocalDate.of(2015, 5, 4)); addNonWorkingDay(LocalDate.of(2015, 5, 25)); addNonWorkingDay(LocalDate.of(2015, 8, 31)); addNonWorkingDay(LocalDate.of(2015, 12, 28)); addNonWorkingDay(LocalDate.of(2016, 3, 25)); addNonWorkingDay(LocalDate.of(2016, 3, 28)); addNonWorkingDay(LocalDate.of(2016, 5, 2)); addNonWorkingDay(LocalDate.of(2016, 5, 30)); addNonWorkingDay(LocalDate.of(2016, 8, 29)); addNonWorkingDay(LocalDate.of(2016, 12, 27)); addNonWorkingDay(LocalDate.of(2017, 1, 2)); addNonWorkingDay(LocalDate.of(2017, 4, 14)); addNonWorkingDay(LocalDate.of(2017, 4, 17)); addNonWorkingDay(LocalDate.of(2017, 5, 1)); addNonWorkingDay(LocalDate.of(2017, 5, 29)); addNonWorkingDay(LocalDate.of(2017, 8, 28)); addNonWorkingDay(LocalDate.of(2018, 3, 30)); addNonWorkingDay(LocalDate.of(2018, 4, 2)); addNonWorkingDay(LocalDate.of(2018, 5, 7)); addNonWorkingDay(LocalDate.of(2018, 5, 28)); addNonWorkingDay(LocalDate.of(2018, 8, 27)); addNonWorkingDay(LocalDate.of(2019, 4, 19)); addNonWorkingDay(LocalDate.of(2019, 4, 22)); addNonWorkingDay(LocalDate.of(2019, 5, 6)); addNonWorkingDay(LocalDate.of(2019, 5, 27)); addNonWorkingDay(LocalDate.of(2019, 8, 26)); } }
java
Asian stocks edged lower on Tuesday as the prospect of the U. S. central bank having to stay on its hawkish path weighed on sentiment, with investors looking to the minutes of the latest Federal Reserve meeting for further monetary policy clues. MSCI's broadest index of Asia-Pacific shares outside Japan eased 0. 34% to 531. 85, hovering around six-week lows of 529. 30 it touched last week. The index is down nearly 3% this month after jumping 8. 6% in January as a slew of robust U. S. economic data reinforced fears that interest rates may need to rise further and stay higher for longer. The market is now pricing U. S. interest rates to peak at 5. 30% in July and remain above 5% by the end of the year, moving away from expectations of deeper rate cuts this year. Japan's Nikkei was 0. 01% higher, while Australia's S&P/ASX 200 index fell 0. 52%. China's shares were set to start flat while Hong Kong's Hang Seng Index opened 0. 1% lower. "The backdrop of inflation concerns in the U. S. is still keeping risks of a tighter than expected monetary policy, and yields remain a key focus as U. S. markets return later today," strategists at Saxo Markets said. U. S. markets were closed on Monday due to President's Day holiday. The yield on 10-year Treasury notes was up 3. 5 basis points to 3. 863%, after touching a three-month high of 3. 929% on Friday. The yield on the 30-year Treasury bond was up 1 basis points to 3. 899%, while that of the two-year U. S. Treasury paper, which typically moves in step with interest rate expectations, was up 5. 4 basis points at 4. 677%. Investor focus is firmly on the release on Wednesday of the minutes of the Fed's latest meeting when it raised interest rates by 25 basis points. DBS currency strategist Philip Wee said the market is bracing for another surprise in the PCE data after the strong U. S. nonfarm payrolls and CPI readings this month. The dollar index, which measures the U. S. currency against six other rivals, was last at 104. 01, just below a six-week high of 104. 67 it touched on Friday. The euro fell 0. 12% to $1. 0669, and is set to snap four straight months of gains and end February lower. The yen weakened 0. 12% to 134. 40 per dollar, while sterling was last at $1. 2022, down 0. 13%.
english
def grayscale(image): for row in range(image.shape[0]): for col in range(image.shape[1]): avg = sum(image[row][col][i] for i in range(3)) // 3 image[row][col] = [avg for _ in range(3)]
python
<filename>Cantera-data-examples/examples/reactors/ic_engine.py # -*- coding: utf-8 -*- """ Simulation of a (gaseous) Diesel-type internal combustion engine. The use of pure propane as fuel requires an unrealistically high compression ratio. """ import cantera as ct import numpy as np ####################################################################### # Input Parameters ####################################################################### f = 3000. / 60. # engine speed [1/s] (3000 rpm) V_H = .5e-3 # displaced volume [m**3] epsilon = 50. # compression ratio [-] d_piston = 0.083 # piston diameter [m] # turbocharger temperature, pressure, and composition T_inlet = 300. # K p_inlet = 1.3e5 # Pa comp_inlet = 'O2:1, N2:3.76' # outlet pressure p_outlet = 1.2e5 # Pa # fuel properties (gaseous!) T_injector = 300. # K p_injector = 1600e5 # Pa comp_injector = 'C3H8:1' # ambient properties T_ambient = 300. # K p_ambient = 1e5 # Pa comp_ambient = 'O2:1, N2:3.76' # Reaction mechanism name reaction_mechanism = 'gri30.xml' # Inlet valve friction coefficient, open and close timings inlet_valve_coeff = 1.e-6 inlet_open = -18. / 180. * np.pi inlet_close = 198. / 180. * np.pi # Outlet valve friction coefficient, open and close timings outlet_valve_coeff = 1.e-6 outlet_open = 522. / 180 * np.pi outlet_close = 18. / 180. * np.pi # Fuel mass, injector open and close timings injector_open = 350. / 180. * np.pi injector_close = 365. / 180. * np.pi injector_mass = 3.2e-5 # kg injector_t_open = (injector_close - injector_open) / 2. / np.pi / f # Simulation time and resolution sim_n_revolutions = 8. sim_n_timesteps = 100000. ################################################################### # load reaction mechanism gas = ct.Solution(reaction_mechanism) # define initial state gas.TPX = T_inlet, p_inlet, comp_inlet r = ct.IdealGasReactor(gas) # define inlet state gas.TPX = T_inlet, p_inlet, comp_inlet inlet = ct.Reservoir(gas) # define injector state (gaseous!) gas.TPX = T_injector, p_injector, comp_injector injector = ct.Reservoir(gas) # define outlet pressure (temperature and composition don't matter) gas.TPX = T_ambient, p_outlet, comp_ambient outlet = ct.Reservoir(gas) # define ambient pressure (temperature and composition don't matter) gas.TPX = T_ambient, p_ambient, comp_ambient ambient_air = ct.Reservoir(gas) # set up connecting devices inlet_valve = ct.Valve(inlet, r) injector_mfc = ct.MassFlowController(injector, r) outlet_valve = ct.Valve(r, outlet) piston = ct.Wall(ambient_air, r) # convert time to crank angle def crank_angle(t): return np.remainder(2 * np.pi * f * t, 4 * np.pi) # set up IC engine parameters V_oT = V_H / (epsilon - 1.) A_piston = .25 * np.pi * d_piston ** 2 stroke = V_H / A_piston r.volume = V_oT piston.area = A_piston def piston_speed(t): return - stroke / 2 * 2 * np.pi * f * np.sin(crank_angle(t)) piston.set_velocity(piston_speed) # create a reactor network containing the cylinder sim = ct.ReactorNet([r]) # set up output data arrays states = ct.SolutionArray(r.thermo) t_sim = sim_n_revolutions / f t = (np.arange(sim_n_timesteps) + 1) / sim_n_timesteps * t_sim V = np.zeros_like(t) m = np.zeros_like(t) test = np.zeros_like(t) mdot_in = np.zeros_like(t) mdot_out = np.zeros_like(t) d_W_v_d_t = np.zeros_like(t) heat_release_rate = np.zeros_like(t) # set parameters for the automatic time step refinement n_last_refinement = -np.inf # for initialization only n_wait_coarsening = 10 # do simulation for n1, t_i in enumerate(t): # define opening and closing of valves and injector if (np.mod(crank_angle(t_i) - inlet_open, 4 * np.pi) < np.mod(inlet_close - inlet_open, 4 * np.pi)): inlet_valve.set_valve_coeff(inlet_valve_coeff) test[n1] = 1 else: inlet_valve.set_valve_coeff(0) if (np.mod(crank_angle(t_i) - outlet_open, 4 * np.pi) < np.mod(outlet_close - outlet_open, 4 * np.pi)): outlet_valve.set_valve_coeff(outlet_valve_coeff) else: outlet_valve.set_valve_coeff(0) if (np.mod(crank_angle(t_i) - injector_open, 4 * np.pi) < np.mod(injector_close - injector_open, 4 * np.pi)): injector_mfc.set_mass_flow_rate(injector_mass / injector_t_open) else: injector_mfc.set_mass_flow_rate(0) # perform time integration, refine time step if necessary for n2 in range(4): if n2 is 4: raise 'Error: Refinement limit reached' try: sim.advance(t_i) except Exception: sim.set_max_time_step(1e-6 * 10. ** -n2) n_last_refinement = n1 # coarsen time step if too long ago if n1 - n_last_refinement is n_wait_coarsening: sim.set_max_time_step(1e-5) # write output data states.append(r.thermo.state) V[n1] = r.volume m[n1] = r.mass mdot_in[n1] = inlet_valve.mdot(0) mdot_out[n1] = outlet_valve.mdot(0) d_W_v_d_t[n1] = - (r.thermo.P - ambient_air.thermo.P) * A_piston * \ piston_speed(t_i) heat_release_rate[n1] = - r.volume * ct.gas_constant * r.T * \ np.sum(gas.standard_enthalpies_RT * r.thermo.net_production_rates, 0) ##################################################################### # Plot Results in matplotlib ##################################################################### import matplotlib.pyplot as plt # pressure and temperature plt.clf() plt.subplot(211) plt.plot(t, states.P / 1.e5) plt.ylabel('$p$ [bar]') plt.xlabel('$\phi$ [deg]') plt.xticks(plt.xticks()[0], []) plt.subplot(212) plt.plot(t, states.T) plt.ylabel('$T$ [K]') plt.xlabel('$\phi$ [deg]') plt.xticks(plt.xticks()[0], crank_angle(plt.xticks()[0]) * 180 / np.pi, rotation=17) plt.show() plt.savefig('ic_engine_t_p_T.png') # p-V diagram plt.clf() plt.plot(V[t > 0.04] * 1000, states.P[t > 0.04] / 1.e5) plt.xlabel('$V$ [l]') plt.ylabel('$p$ [bar]') plt.show() plt.savefig('ic_engine_p_V.png') # T-S diagram plt.clf() plt.plot(m[t > 0.04] * states.s[t > 0.04], states.T[t > 0.04]) plt.xlabel('$S$ [J/K]') plt.ylabel('$T$ [K]') plt.show() plt.savefig('ic_engine_T_S.png') # heat of reaction and expansion work plt.clf() plt.plot(t, heat_release_rate, label='$\dot{Q}$') plt.plot(t, d_W_v_d_t, label='$\dot{W}_v$') plt.ylim(-1e5, 1e6) plt.legend(loc=0) plt.ylabel('[W]') plt.xlabel('$\phi$ [deg]') plt.xticks(plt.xticks()[0], crank_angle(plt.xticks()[0]) * 180 / np.pi, rotation=17) plt.show() plt.savefig('ic_engine_Q_W.png') # gas composition plt.clf() plt.plot(t, states('O2').X, label='O2') plt.plot(t, states('CO2').X, label='CO2') plt.plot(t, states('CO').X, label='CO') plt.plot(t, states('C3H8').X * 10, label='C3H8 x10') plt.legend(loc=0) plt.ylabel('$X_i$ [-]') plt.xlabel('$\phi$ [deg]') plt.xticks(plt.xticks()[0], crank_angle(plt.xticks()[0]) * 180 / np.pi, rotation=17) plt.show() plt.savefig('ic_engine_t_X.png') ##################################################################### # Integral Results ##################################################################### from scipy.integrate import trapz Q = trapz(heat_release_rate, t) W = trapz(d_W_v_d_t, t) eta = W / Q MW = states.mean_molecular_weight CO_emission = trapz(MW * mdot_out * states('CO').X[:,0], t) / trapz(MW * mdot_out, t) print('Heat release rate per cylinder (estimate):\t' + format(Q / t_sim / 1000., ' 2.1f') + ' kW') print('Expansion power per cylinder (estimate):\t' + format(W / t_sim / 1000., ' 2.1f') + ' kW') print('Efficiency (estimate):\t\t\t' + format(eta * 100., ' 2.1f') + ' %') print('CO emission (estimate):\t\t' + format(CO_emission * 1.e6, ' 2.1f') + ' ppm')
python
{ "app": { "description": "Turn Docker Hub repository comments into Zendesk tickets", "name": "Docker Hub Repository Comments", "long_description": "Integrate comments from any Docker Hub repository into your Zendesk turning them into tickets. Then simply address them taking advantage of Zendesk's views, macros, and triggers.\n\nFeatures:\n\n* All comments for a repository: This integration creates tickets for all comments for a specified Docker Hub repository.\n* Clickthrough support: Links to Docker Hub comments on their source website are added to every ticket created by the integration.\n* Up to 100 tickets every 2 minutes: Integration is capable of turning 100 Docker Hub comments into tickets every 2 minutes (per configured repository).\n* Multiple repositories supported: You can enable this integration to work with any number of Docker Hub repositories at a time.\n* Easy installation: Just install the application from Zendesk Marketplace to your instance, add contact information and website/tag - that's it!\n\nMore information in [FAQ](https://zendesk.mvink.me/faq.htm)\n\nPlease submit bug reports to [<EMAIL>](<EMAIL>)", "installation_instructions": "To use the Docker Hub Repository Comments Integration, you'll need to install the app from Zendesk's App Marketplace.\n\nTo install the Docker Hub Repository Comments channel integration:\n\n* Click the Admin icon, then select Apps > Marketplace. Locate and click the Docker Hub icon.\n* Read the Overview, then click the Install app button in the upper right.\n* Edit the installation info if needed, and click the link to view the Zendesk Marketplace Terms of Use.\n* Click the Install button.\n* The integration is added to your Channel Integrations page. Click Channels > Channel Integrations to access it.\n\nConnecting your Docker Hub repository to the Integration:\n\n* Make sure you've navigated to Channels > Channel Integrations\n* Click \"Add Account\"\n* Provide all the necessary data\n* Click \"Save\"\n* Enjoy!\n\nAfter the integration is installed and configured, you can easily use it in triggers / views / etc." }, "loading": "Welcome to the Docker Hub Repository Comments App", "fetch": { "done": "Good", "fail": "Failed to fetch information from the server" }, "id": "ID", "email": "Email", "name": "Name", "role": "Role", "groups": "Groups" }
json
<gh_stars>0 /* * @Author: <EMAIL> * @Last Modified time: 2016-08-31 21:35:25 */ #include <stdlib.h> #include <stdio.h> #include <string.h> #include <errno.h> #include <sys/msg.h> struct msg_st { long int msg_type; char text[BUFSIZ]; }; int main() { int running = 1; int msgid = -1; struct msg_st data; long int msgtype = 2; //建立消息队列 msgid = msgget((key_t)1234, 0666 | IPC_CREAT); if(msgid == -1) { fprintf(stderr, "Msgget failed with error: %d\n", errno); exit(EXIT_FAILURE); } //从队列中获取消息,直到遇到end消息为止 while(running) { if(msgrcv(msgid, (void*)&data, BUFSIZ, msgtype, 0) == -1) { fprintf(stderr, "Msgrcv failed with errno: %d\n", errno); exit(EXIT_FAILURE); } printf("You wrote: %s\n",data.text); //遇到end结束 if(strncmp(data.text, "end", 3) == 0) running = 0; } //删除消息队列 if(msgctl(msgid, IPC_RMID, 0) == -1) { fprintf(stderr, "Msgctl(IPC_RMID) failed\n"); exit(EXIT_FAILURE); } exit(EXIT_SUCCESS); }
cpp
Henry Cejudo will make his much anticipated return to the octagon in May at UFC 288, three years after relinquishing his double-champ status. 'Triple C' appears to have suffered a cut under his left eye, which was evident in the latest video on his YouTube channel. The cut has some swelling around it, but the wound seems to be stitched up and en route to recovery. It looks like Cejudo is well on his way to fighting Aljamain Sterling at UFC 288, on May 6th, at the Prudential Center in Newark, New Jersey. Henry Cejudo gave his thoughts on the bantamweight division in the latest episode of his YouTube podcast/analysis channel. Here's what 'Triple C' said: "Is he just going to wait for a title shot? Personally, I want [to fight] Sean O'Malley, and you know why? There's a bunch of pay-per-view points and I'm here to make money, ladies and gentlemen." 'Triple C' beat Dominick Cruz to retain his bantamweight title at UFC 249, in May 2020, and announced his retirement in his post-fight speech. He will now face Aljamain Sterling for the bantamweight title in his comeback fight. The event will be co-headlined by Beneil Dariush and Charles Oliveira, who will possibly fight for title contention at lightweight. The event will also feature the return of Kron Gracie, son of the famed Rickson Gracie, as he faces Charles Jourdain in his third promotional fight. There is no love lost between former two-division champion Henry Cejudo and current bantamweight champion Aljamain Sterling. 'Triple C' and 'Funkmaster' went head-to-head in an interview with former two-division champ Daniel Cormier. Sterling and Cejudo went at each other, constantly talking over each other, with Cormier unable to effectively moderate the interview. Henry Cejudo didn't hold back when it came to shaming the Jamaican-American champion. Here's what Cejudo said: "He won the bout [and the title against Petr Yan] via Academy Award. He won the second fight [against Yan] via, I don't know what it was, judges' guilt. And then he couldn't beat a guy that I beat in 32 seconds on EPO. And it took him freaking almost two rounds to get the little one [TJ Dillashaw] with one arm! "DC, I'm a different cat. I beat Demetrious Johnson, I beat the greatest bantamweight of all time in Dominick Cruz. I beat a dude on EPO. I got rid of Marlon Moraes [and] knocked him the h*ll out." Skip to 9:20 for Henry Cejudo thrashing Aljamain Sterling:
english
// Copyright (c) 2007-2014 <NAME> // Copyright (c) 2015-2016 <NAME> // Copyright (c) 2007 <NAME> // Copyright (c) 2007 Alexandre (aka Alex) TABBAL // Copyright (c) 2011 <NAME> // // SPDX-License-Identifier: BSL-1.0 // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #if !defined(HPX_PARCELSET_PARCEL_MAR_26_2008_1051AM) #define HPX_PARCELSET_PARCEL_MAR_26_2008_1051AM #include <hpx/config.hpp> #if defined(HPX_HAVE_NETWORKING) #include <hpx/runtime/actions_fwd.hpp> #include <hpx/runtime/naming_fwd.hpp> #include <hpx/runtime/naming/address.hpp> #include <hpx/runtime/naming/name.hpp> #include <hpx/runtime/parcelset_fwd.hpp> #include <hpx/serialization/serialization_fwd.hpp> #include <hpx/serialization/traits/is_bitwise_serializable.hpp> #include <cstddef> #include <cstdint> #include <map> #include <memory> #include <string> #include <utility> #include <hpx/config/warnings_prefix.hpp> namespace hpx { namespace serialization { struct binary_filter; }} namespace hpx { namespace parcelset { namespace policies { struct message_handler; }}} namespace hpx { namespace parcelset { namespace detail { struct parcel_data { public: parcel_data(); parcel_data(naming::gid_type&& dest, naming::address&& addr, bool has_continuation); parcel_data(parcel_data && rhs); parcel_data& operator=(parcel_data && rhs); template <typename Archive> void serialize(Archive &ar, unsigned); #if defined(HPX_HAVE_PARCEL_PROFILING) naming::gid_type parcel_id_; double start_time_; double creation_time_; #endif naming::gid_type source_id_; naming::gid_type dest_; naming::address addr_; bool has_continuation_; }; } class HPX_EXPORT parcel { private: using split_gids_type = std::map<naming::gid_type const*, naming::gid_type>; #if defined(HPX_DEBUG) bool is_valid() const; #else // Only used in debug mode. bool is_valid() const { return true; } #endif public: parcel(); ~parcel(); private: parcel( naming::gid_type&& dest, naming::address&& addr, std::unique_ptr<actions::base_action> act ); friend struct detail::create_parcel; public: parcel(parcel && other); parcel &operator=(parcel && other); void reset(); actions::base_action *get_action() const; naming::id_type source_id() const; void set_source_id(naming::id_type const & source_id); void set_destination_id(naming::gid_type&& dest); naming::gid_type const& destination() const; naming::address const& addr() const; naming::address& addr(); std::uint32_t destination_locality_id() const; naming::gid_type const& destination_locality() const; double start_time() const; void set_start_time(double time); double creation_time() const; threads::thread_priority get_thread_priority() const; #if defined(HPX_HAVE_PARCEL_PROFILING) naming::gid_type const parcel_id() const; naming::gid_type & parcel_id(); #endif serialization::binary_filter* get_serialization_filter() const; policies::message_handler* get_message_handler( parcelset::parcelhandler* ph, locality const& loc) const; bool does_termination_detection() const; split_gids_type move_split_gids() const; void set_split_gids(split_gids_type&& split_gids); std::size_t const& num_chunks() const; std::size_t & num_chunks(); std::size_t const& size() const; std::size_t & size(); void schedule_action(std::size_t num_thread = std::size_t(-1)); // returns true if parcel was migrated, false if scheduled locally bool load_schedule(serialization::input_archive & ar, std::size_t num_thread, bool& deferred_schedule); // generate unique parcel id static naming::gid_type generate_unique_id( std::uint32_t locality_id = naming::invalid_locality_id); private: friend std::ostream& operator<< (std::ostream& os, parcel const& req); // serialization support friend class hpx::serialization::access; void load_data(serialization::input_archive & ar); void serialize(serialization::input_archive & ar, unsigned); void serialize(serialization::output_archive & ar, unsigned); std::pair<naming::address_type, naming::component_type> determine_lva(); detail::parcel_data data_; std::unique_ptr<actions::base_action> action_; mutable split_gids_type split_gids_; std::size_t size_; std::size_t num_chunks_; }; HPX_EXPORT std::string dump_parcel(parcel const& p); }} HPX_IS_BITWISE_SERIALIZABLE(hpx::parcelset::detail::parcel_data) #include <hpx/config/warnings_suffix.hpp> #endif #endif
cpp
The Station is your central hub for all past, present and future means of moving people and packages from Point A to Point B. Autonomous vehicle startup Pony.ai has been awarded a permit in Guangzhou to operate 100 robotaxis as traditional taxis. The license, which allows Pony to charge for rides in its autonomous vehicles, Hello and welcome back to TechCrunch’s China roundup, a digest of recent events shaping the Chinese tech landscape and what they mean to people in the rest of the world. Welcome back to The Station, your central hub for all past, present and future means of moving people and packages from Point A to Point B.
english
We bring to you the five best stories of the Indian Express you must read before beginning your day. 1. Congress allies on the offensive: Trying to bring along what it calls like-minded secular parties to take on Narendra Modi, the ruling Congress finds itself faced with allies that are for once assertive after having seemed, until recently, reconciled to the Congress’s big-brotherly attitude. 2. The man behind ‘The Hindu’ ban: Dina Nath Batra has not been on a mission to get one book banned. A key petitioner in the case that led to publisher Penguin deciding to withdraw all copies of Wendy Doniger’s book The Hindus: An Alternative History, Batra is now in the middle of reading On Hinduism by the same author. It is next on his hit-list. 3. Kejriwal’s Janlokpal caught in legal tangle: The Union law and justice ministry told Delhi Lieutenant-Governor Najeeb Jung that the controversial Delhi Janlokpal Bill, 2014 is a financial Bill and and it is “mandatory” for the government to get his approval before it is introduced in the Assembly. 4. Modi’s chai pe charcha: It was Narendra Modi in charcha, largely solitary. A glass of tea in his hand and seated on the road outside Iskcon Ganthia in Ahmedabad Wednesday, Modi turned chai pe charcha into a mostly one-way event. 5. Opinion – Kejriwal TV: It’s a question we ought to ask ourselves: how long will Kejriwal be the main feature while Narendra Modi and Rahul Gandhi, forget all the other politicians in the country, act as fillers?
english
Shall you take me to the world of music, where birds manufacture colorful sounds? Rainbows lit up the hasty clouds with their hip-dance, the cosmic rays embellish the sky! The moon showers a silver aroma! The stars dance, twinkle, and celebrate Christmas. Nimble rivers turn their blue page to unravel the kaleidoscopic array of coltish fishes. The dewy evenings smell of nightingale’s melody, and there is the charm the roses create! Love is diffused and laden all-around in red saffron balloons, the hanging flutes dance in the air! In this world, sound and music sways multi-chrome and speak a language of melodious musical love! Shall you escort me to a euphonious paradise, where time has lost to everything timeless?
english
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.cdn.implementation; import java.util.List; import com.microsoft.azure.management.cdn.QueryStringCachingBehavior; import com.microsoft.azure.management.cdn.OptimizationType; import com.microsoft.azure.management.cdn.GeoFilter; import com.microsoft.azure.management.cdn.ResourceReference; import com.microsoft.azure.management.cdn.UrlSigningKey; import com.microsoft.azure.management.cdn.EndpointPropertiesUpdateParametersDeliveryPolicy; import com.microsoft.azure.management.cdn.EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink; import com.microsoft.azure.management.cdn.DeepCreatedOrigin; import com.microsoft.azure.management.cdn.DeepCreatedOriginGroup; import com.microsoft.azure.management.cdn.EndpointResourceState; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.azure.Resource; /** * CDN endpoint is the entity within a CDN profile containing configuration * information such as origin, protocol, content caching and delivery behavior. * The CDN endpoint uses the URL format &lt;endpointname&gt;.azureedge.net. */ @JsonFlatten public class EndpointInner extends Resource { /** * A directory path on the origin that CDN can use to retrieve content * from, e.g. contoso.cloudapp.net/originpath. */ @JsonProperty(value = "properties.originPath") private String originPath; /** * List of content types on which compression applies. The value should be * a valid MIME type. */ @JsonProperty(value = "properties.contentTypesToCompress") private List<String> contentTypesToCompress; /** * The host header value sent to the origin with each request. This * property at Endpoint is only allowed when endpoint uses single origin * and can be overridden by the same property specified at origin.If you * leave this blank, the request hostname determines this value. Azure CDN * origins, such as Web Apps, Blob Storage, and Cloud Services require this * host header value to match the origin hostname by default. */ @JsonProperty(value = "properties.originHostHeader") private String originHostHeader; /** * Indicates whether content compression is enabled on CDN. Default value * is false. If compression is enabled, content will be served as * compressed if user requests for a compressed version. Content won't be * compressed on CDN when requested content is smaller than 1 byte or * larger than 1 MB. */ @JsonProperty(value = "properties.isCompressionEnabled") private Boolean isCompressionEnabled; /** * Indicates whether HTTP traffic is allowed on the endpoint. Default value * is true. At least one protocol (HTTP or HTTPS) must be allowed. */ @JsonProperty(value = "properties.isHttpAllowed") private Boolean isHttpAllowed; /** * Indicates whether HTTPS traffic is allowed on the endpoint. Default * value is true. At least one protocol (HTTP or HTTPS) must be allowed. */ @JsonProperty(value = "properties.isHttpsAllowed") private Boolean isHttpsAllowed; /** * Defines how CDN caches requests that include query strings. You can * ignore any query strings when caching, bypass caching to prevent * requests that contain query strings from being cached, or cache every * request with a unique URL. Possible values include: 'IgnoreQueryString', * 'BypassCaching', 'UseQueryString', 'NotSet'. */ @JsonProperty(value = "properties.queryStringCachingBehavior") private QueryStringCachingBehavior queryStringCachingBehavior; /** * Specifies what scenario the customer wants this CDN endpoint to optimize * for, e.g. Download, Media services. With this information, CDN can apply * scenario driven optimization. Possible values include: * 'GeneralWebDelivery', 'GeneralMediaStreaming', * 'VideoOnDemandMediaStreaming', 'LargeFileDownload', * 'DynamicSiteAcceleration'. */ @JsonProperty(value = "properties.optimizationType") private OptimizationType optimizationType; /** * Path to a file hosted on the origin which helps accelerate delivery of * the dynamic content and calculate the most optimal routes for the CDN. * This is relative to the origin path. This property is only relevant when * using a single origin. */ @JsonProperty(value = "properties.probePath") private String probePath; /** * List of rules defining the user's geo access within a CDN endpoint. Each * geo filter defines an access rule to a specified path or content, e.g. * block APAC for path /pictures/. */ @JsonProperty(value = "properties.geoFilters") private List<GeoFilter> geoFilters; /** * A reference to the origin group. */ @JsonProperty(value = "properties.defaultOriginGroup") private ResourceReference defaultOriginGroup; /** * List of keys used to validate the signed URL hashes. */ @JsonProperty(value = "properties.urlSigningKeys") private List<UrlSigningKey> urlSigningKeys; /** * A policy that specifies the delivery rules to be used for an endpoint. */ @JsonProperty(value = "properties.deliveryPolicy") private EndpointPropertiesUpdateParametersDeliveryPolicy deliveryPolicy; /** * Defines the Web Application Firewall policy for the endpoint (if * applicable). */ @JsonProperty(value = "properties.webApplicationFirewallPolicyLink") private EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink webApplicationFirewallPolicyLink; /** * The host name of the endpoint structured as {endpointName}.{DNSZone}, * e.g. contoso.azureedge.net. */ @JsonProperty(value = "properties.hostName", access = JsonProperty.Access.WRITE_ONLY) private String hostName; /** * The source of the content being delivered via CDN. */ @JsonProperty(value = "properties.origins", required = true) private List<DeepCreatedOrigin> origins; /** * The origin groups comprising of origins that are used for load balancing * the traffic based on availability. */ @JsonProperty(value = "properties.originGroups") private List<DeepCreatedOriginGroup> originGroups; /** * Resource status of the endpoint. Possible values include: 'Creating', * 'Deleting', 'Running', 'Starting', 'Stopped', 'Stopping'. */ @JsonProperty(value = "properties.resourceState", access = JsonProperty.Access.WRITE_ONLY) private EndpointResourceState resourceState; /** * Provisioning status of the endpoint. */ @JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY) private String provisioningState; /** * Get a directory path on the origin that CDN can use to retrieve content from, e.g. contoso.cloudapp.net/originpath. * * @return the originPath value */ public String originPath() { return this.originPath; } /** * Set a directory path on the origin that CDN can use to retrieve content from, e.g. contoso.cloudapp.net/originpath. * * @param originPath the originPath value to set * @return the EndpointInner object itself. */ public EndpointInner withOriginPath(String originPath) { this.originPath = originPath; return this; } /** * Get list of content types on which compression applies. The value should be a valid MIME type. * * @return the contentTypesToCompress value */ public List<String> contentTypesToCompress() { return this.contentTypesToCompress; } /** * Set list of content types on which compression applies. The value should be a valid MIME type. * * @param contentTypesToCompress the contentTypesToCompress value to set * @return the EndpointInner object itself. */ public EndpointInner withContentTypesToCompress(List<String> contentTypesToCompress) { this.contentTypesToCompress = contentTypesToCompress; return this; } /** * Get the host header value sent to the origin with each request. This property at Endpoint is only allowed when endpoint uses single origin and can be overridden by the same property specified at origin.If you leave this blank, the request hostname determines this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this host header value to match the origin hostname by default. * * @return the originHostHeader value */ public String originHostHeader() { return this.originHostHeader; } /** * Set the host header value sent to the origin with each request. This property at Endpoint is only allowed when endpoint uses single origin and can be overridden by the same property specified at origin.If you leave this blank, the request hostname determines this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this host header value to match the origin hostname by default. * * @param originHostHeader the originHostHeader value to set * @return the EndpointInner object itself. */ public EndpointInner withOriginHostHeader(String originHostHeader) { this.originHostHeader = originHostHeader; return this; } /** * Get indicates whether content compression is enabled on CDN. Default value is false. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on CDN when requested content is smaller than 1 byte or larger than 1 MB. * * @return the isCompressionEnabled value */ public Boolean isCompressionEnabled() { return this.isCompressionEnabled; } /** * Set indicates whether content compression is enabled on CDN. Default value is false. If compression is enabled, content will be served as compressed if user requests for a compressed version. Content won't be compressed on CDN when requested content is smaller than 1 byte or larger than 1 MB. * * @param isCompressionEnabled the isCompressionEnabled value to set * @return the EndpointInner object itself. */ public EndpointInner withIsCompressionEnabled(Boolean isCompressionEnabled) { this.isCompressionEnabled = isCompressionEnabled; return this; } /** * Get indicates whether HTTP traffic is allowed on the endpoint. Default value is true. At least one protocol (HTTP or HTTPS) must be allowed. * * @return the isHttpAllowed value */ public Boolean isHttpAllowed() { return this.isHttpAllowed; } /** * Set indicates whether HTTP traffic is allowed on the endpoint. Default value is true. At least one protocol (HTTP or HTTPS) must be allowed. * * @param isHttpAllowed the isHttpAllowed value to set * @return the EndpointInner object itself. */ public EndpointInner withIsHttpAllowed(Boolean isHttpAllowed) { this.isHttpAllowed = isHttpAllowed; return this; } /** * Get indicates whether HTTPS traffic is allowed on the endpoint. Default value is true. At least one protocol (HTTP or HTTPS) must be allowed. * * @return the isHttpsAllowed value */ public Boolean isHttpsAllowed() { return this.isHttpsAllowed; } /** * Set indicates whether HTTPS traffic is allowed on the endpoint. Default value is true. At least one protocol (HTTP or HTTPS) must be allowed. * * @param isHttpsAllowed the isHttpsAllowed value to set * @return the EndpointInner object itself. */ public EndpointInner withIsHttpsAllowed(Boolean isHttpsAllowed) { this.isHttpsAllowed = isHttpsAllowed; return this; } /** * Get defines how CDN caches requests that include query strings. You can ignore any query strings when caching, bypass caching to prevent requests that contain query strings from being cached, or cache every request with a unique URL. Possible values include: 'IgnoreQueryString', 'BypassCaching', 'UseQueryString', 'NotSet'. * * @return the queryStringCachingBehavior value */ public QueryStringCachingBehavior queryStringCachingBehavior() { return this.queryStringCachingBehavior; } /** * Set defines how CDN caches requests that include query strings. You can ignore any query strings when caching, bypass caching to prevent requests that contain query strings from being cached, or cache every request with a unique URL. Possible values include: 'IgnoreQueryString', 'BypassCaching', 'UseQueryString', 'NotSet'. * * @param queryStringCachingBehavior the queryStringCachingBehavior value to set * @return the EndpointInner object itself. */ public EndpointInner withQueryStringCachingBehavior(QueryStringCachingBehavior queryStringCachingBehavior) { this.queryStringCachingBehavior = queryStringCachingBehavior; return this; } /** * Get specifies what scenario the customer wants this CDN endpoint to optimize for, e.g. Download, Media services. With this information, CDN can apply scenario driven optimization. Possible values include: 'GeneralWebDelivery', 'GeneralMediaStreaming', 'VideoOnDemandMediaStreaming', 'LargeFileDownload', 'DynamicSiteAcceleration'. * * @return the optimizationType value */ public OptimizationType optimizationType() { return this.optimizationType; } /** * Set specifies what scenario the customer wants this CDN endpoint to optimize for, e.g. Download, Media services. With this information, CDN can apply scenario driven optimization. Possible values include: 'GeneralWebDelivery', 'GeneralMediaStreaming', 'VideoOnDemandMediaStreaming', 'LargeFileDownload', 'DynamicSiteAcceleration'. * * @param optimizationType the optimizationType value to set * @return the EndpointInner object itself. */ public EndpointInner withOptimizationType(OptimizationType optimizationType) { this.optimizationType = optimizationType; return this; } /** * Get path to a file hosted on the origin which helps accelerate delivery of the dynamic content and calculate the most optimal routes for the CDN. This is relative to the origin path. This property is only relevant when using a single origin. * * @return the probePath value */ public String probePath() { return this.probePath; } /** * Set path to a file hosted on the origin which helps accelerate delivery of the dynamic content and calculate the most optimal routes for the CDN. This is relative to the origin path. This property is only relevant when using a single origin. * * @param probePath the probePath value to set * @return the EndpointInner object itself. */ public EndpointInner withProbePath(String probePath) { this.probePath = probePath; return this; } /** * Get list of rules defining the user's geo access within a CDN endpoint. Each geo filter defines an access rule to a specified path or content, e.g. block APAC for path /pictures/. * * @return the geoFilters value */ public List<GeoFilter> geoFilters() { return this.geoFilters; } /** * Set list of rules defining the user's geo access within a CDN endpoint. Each geo filter defines an access rule to a specified path or content, e.g. block APAC for path /pictures/. * * @param geoFilters the geoFilters value to set * @return the EndpointInner object itself. */ public EndpointInner withGeoFilters(List<GeoFilter> geoFilters) { this.geoFilters = geoFilters; return this; } /** * Get a reference to the origin group. * * @return the defaultOriginGroup value */ public ResourceReference defaultOriginGroup() { return this.defaultOriginGroup; } /** * Set a reference to the origin group. * * @param defaultOriginGroup the defaultOriginGroup value to set * @return the EndpointInner object itself. */ public EndpointInner withDefaultOriginGroup(ResourceReference defaultOriginGroup) { this.defaultOriginGroup = defaultOriginGroup; return this; } /** * Get list of keys used to validate the signed URL hashes. * * @return the urlSigningKeys value */ public List<UrlSigningKey> urlSigningKeys() { return this.urlSigningKeys; } /** * Set list of keys used to validate the signed URL hashes. * * @param urlSigningKeys the urlSigningKeys value to set * @return the EndpointInner object itself. */ public EndpointInner withUrlSigningKeys(List<UrlSigningKey> urlSigningKeys) { this.urlSigningKeys = urlSigningKeys; return this; } /** * Get a policy that specifies the delivery rules to be used for an endpoint. * * @return the deliveryPolicy value */ public EndpointPropertiesUpdateParametersDeliveryPolicy deliveryPolicy() { return this.deliveryPolicy; } /** * Set a policy that specifies the delivery rules to be used for an endpoint. * * @param deliveryPolicy the deliveryPolicy value to set * @return the EndpointInner object itself. */ public EndpointInner withDeliveryPolicy(EndpointPropertiesUpdateParametersDeliveryPolicy deliveryPolicy) { this.deliveryPolicy = deliveryPolicy; return this; } /** * Get defines the Web Application Firewall policy for the endpoint (if applicable). * * @return the webApplicationFirewallPolicyLink value */ public EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink webApplicationFirewallPolicyLink() { return this.webApplicationFirewallPolicyLink; } /** * Set defines the Web Application Firewall policy for the endpoint (if applicable). * * @param webApplicationFirewallPolicyLink the webApplicationFirewallPolicyLink value to set * @return the EndpointInner object itself. */ public EndpointInner withWebApplicationFirewallPolicyLink(EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink webApplicationFirewallPolicyLink) { this.webApplicationFirewallPolicyLink = webApplicationFirewallPolicyLink; return this; } /** * Get the host name of the endpoint structured as {endpointName}.{DNSZone}, e.g. contoso.azureedge.net. * * @return the hostName value */ public String hostName() { return this.hostName; } /** * Get the source of the content being delivered via CDN. * * @return the origins value */ public List<DeepCreatedOrigin> origins() { return this.origins; } /** * Set the source of the content being delivered via CDN. * * @param origins the origins value to set * @return the EndpointInner object itself. */ public EndpointInner withOrigins(List<DeepCreatedOrigin> origins) { this.origins = origins; return this; } /** * Get the origin groups comprising of origins that are used for load balancing the traffic based on availability. * * @return the originGroups value */ public List<DeepCreatedOriginGroup> originGroups() { return this.originGroups; } /** * Set the origin groups comprising of origins that are used for load balancing the traffic based on availability. * * @param originGroups the originGroups value to set * @return the EndpointInner object itself. */ public EndpointInner withOriginGroups(List<DeepCreatedOriginGroup> originGroups) { this.originGroups = originGroups; return this; } /** * Get resource status of the endpoint. Possible values include: 'Creating', 'Deleting', 'Running', 'Starting', 'Stopped', 'Stopping'. * * @return the resourceState value */ public EndpointResourceState resourceState() { return this.resourceState; } /** * Get provisioning status of the endpoint. * * @return the provisioningState value */ public String provisioningState() { return this.provisioningState; } }
java
{"name":"<NAME>","id":"BY-7773","address":"Holzgraben 13 97350 Mainbernheim","school_type":"Grundschule","fax":"09323 6285","phone":"09323 1222","website":"http://www.vs-mainbernheim.de","state":"BY","programs":{"programs":[]},"full_time_school":false,"lon":10.219378,"lat":49.711803}
json
The Yamuna Expressway Industrial Development Authority (YEIDA) is in the news for its plan to extract a whopping Rs. 4500 crores from Builders as pending dues for their respective projects in the area. This move has raised concerns among builders and property buyers, who fear that the cost of real estate in the area will go up. However, Yeida has defended its decision, saying that the money will be used for the development of infrastructure in the area. The registries have been a burning topic since forever in the real estate world. Many a time, it is seen that it is the home buyers that suffer due to incomplete registries. Collection of the pending dues will open the door for the home buyers to have their dream home. Out of the total amount, Rs. 2,753 crores will be collected as land premium, Rs. 1,408 crores as additional compensation for farmers, and Rs. 296 crores as lease rent. The decision to extract this amount was taken after a recent board meeting of YEIDA. The development of infrastructure in the area is long overdue. The Yamuna Expressway is a critical transport link between Delhi and Agra, and its development has been on the agenda of the Uttar Pradesh government for a long time. The collection of Rs. 2,753 crores as land premium is also justified, as the land in the area is valuable and has appreciated significantly in recent years. This move will help the builders and their projects get their OC and CC and will most likely allow them to start with the registries of the properties. This has by far been the major thing of concern for the people who are keen on getting their dream homes. How Will YEIDA Collect the Dues? YEIDA has introduced a one-time payment scheme to collect the pending dues from the builders. The decision by the Yamuna Expressway Industrial Development Authority (YEIDA) to collect Rs. 4500 crores from builders has been met with mixed reactions. While some are concerned about the impact it may have on the real estate market, others believe that it could have some positive benefits. One such benefit is the savings that builders may make on interest payments. Typically, builders finance their projects through loans from banks or financial institutions. These loans come with an interest rate, which adds to the overall cost of the project. The interest payments can be a significant burden on the builder, especially if the project is delayed, and the loan repayment is extended. If they choose to go forward with the scheme, they do not have to pay the interests that they were supposed to pay earlier. This reduction in interest payments can be significant, and builders can save a considerable amount of money in the long run. These savings can be used for other purposes such as completing the project faster, enhancing the quality of construction, or even passing on the benefits to the customers in the form of lower prices. Some of the biggest defaulters in the list include: - Orris Group (Sec-22D) - Sunworld Infrastructure (Sec 22-D) - ATS Realty (Sec 22-D) - Supertech Township (Sec 22-D) - Greenbay Infrastructure (Sec 22-D) - Logix Buildestate (Sec 22-D) - SDS Infracon (Sec 26-A) - Supertech Ltd (Sec 26-A) In conclusion, the decision by YEIDA to extract Rs. 4500 crores from builders has both positive and negative implications. While the development of infrastructure in the area is essential, the collection of additional compensation for farmers and lease rent may have a negative impact on the real estate market. YEIDA needs to ensure that the money collected is used efficiently and transparently for the development of the area, and that the interests of all stakeholders are taken into consideration.
english
The government has granted loan waivers to 783 farmers in the district who were initially excluded from the Mahatma Phule Loan Waiver Scheme. These farmers have now received a total loan waiver amounting to Rs 4. 17 crore. The district bank received an official order from the cooperative department on Wednesday afternoon, facilitating the implementation of this scheme. Among the beneficiaries, Jamner taluka has the highest number of farmers, with 677 individuals, followed by Chalisgaon taluka with 70 farmers. Dharangaon taluka, on the other hand, has only one farmer who has been granted the loan waiver. Under the Mahatma Jyotirao Phule Kisan Loan Waiver Scheme, beneficiary farmers are eligible for a loan waiver of up to Rs 2 lakh. This scheme extends its benefits to farmers engaged in cultivating cotton, banana, sugarcane, orchards, as well as those involved in traditional farming practices. Rural Development Minister Girish Mahajan played a crucial role in advocating for loan waivers, resulting in the maximum benefit being provided to 677 farmers.
english
Despite GTA Online fans being bored of playing the same game for almost a decade, its recently released spinoff, GTA RP, has yet to die out in terms of popularity. Roleplaying vogue continues to rise as fans tune in to watch their favorite streamers on Twitch. This article picks out the top 5 most-watched roleplayers on Twitch as of February 2022. Penta roleplays as Randy Wrangler in NoPixel, a police officer who is perhaps the best-known cop character on the server, and the most hated one. Fans love to hate his questionable antics and his less-than-acceptable methods of investigation. Penta was recently banned from NoPixel for reasons not fully explained, although this was only a matter of time. He has often been complained about, by RP fans and other roleplayers alike. Sykkuno has taken breaks from GTA RP from time to time, but this hasn't resulted in a waning of his popularity. Every time he begins streaming the game on the NoPixel server, fans fall in love with his character, Yuno Sykk. He plays the role of a genius robber and expert hacker, who, despite being a hardened criminal, is always polite. His real-life personality spills over to his RP character and as such, he has gained a large fanbase. Buddha has been one of the major roleplayers in the NoPixel community. His character, Lang Buddha, is one of the most powerful crime bosses in the city, and he has an equally intriguing personality to match his position. Buddha's roleplaying skills are exceptional, and have often been praised by fans. This may be why he has consistently managed to come out on top in terms of Twitch viewership, time and time again. Loud Coringa has been the most popular Brazilian RP streamer ever since he started streaming the game. In fact, he has often come out on top as the most popular RP streamer and has only recently been beaten by xQc. He plays on the Cidade Alta server, which is extremely popular in the Spanish/Portuguese-speaking world. xQc continues to be the biggest GTA RP channel even in 2022. He has one of the most dedicated fanbases among other roleplaying streamers, and they're willing to follow him to whichever server he joins. xQc has often stirred up controversy while roleplaying on NoPixel, and this has resulted in him getting banned on multiple occasions. He has roleplayed several characters, which are all very similar to each other. He is mostly known for pulling off insane heists in NoPixel, and his roleplaying isn't always serious. Note: This article reflects the writer's opinions. For The Biggest GTA 6 Map Leaks, Click Here.
english
{"nft":{"id":1059,"image":"ipfs://QmfMKpvy28x4aqXMBvaHAa5SpKMMA95yq4GK4FW1Qe6MfD","attributes":[{"trait_type":"Clothes","value":"Space Suit"},{"trait_type":"Earring","value":"Gold Stud"},{"trait_type":"Fur","value":"Pink"},{"trait_type":"Background","value":"Purple"},{"trait_type":"Mouth","value":"Phoneme Vuh"},{"trait_type":"Eyes","value":"Bored"},{"trait_type":"Trait Count","value":"6"},{"trait_type":"Hat","value":"[Missing]"}]},"attributeRarities":[{"trait_type":"Clothes","value":"Space Suit","count":105,"ratio":0.0105,"ratioScore":95.23809523809523},{"trait_type":"Earring","value":"Gold Stud","count":439,"ratio":0.0439,"ratioScore":22.779043280182233},{"trait_type":"Fur","value":"Pink","count":511,"ratio":0.0511,"ratioScore":19.569471624266146},{"trait_type":"Background","value":"Purple","count":1291,"ratio":0.1291,"ratioScore":7.74593338497289},{"trait_type":"Mouth","value":"Phoneme Vuh","count":333,"ratio":0.0333,"ratioScore":30.030030030030026},{"trait_type":"Eyes","value":"Bored","count":1714,"ratio":0.1714,"ratioScore":5.834305717619603},{"trait_type":"Trait Count","value":"6","count":5323,"ratio":0.5323,"ratioScore":1.8786398647379297},{"trait_type":"Hat","value":"[Missing]","count":2256,"ratio":0.2256,"ratioScore":4.432624113475177}],"rarityScore":187.50814325337922,"rank":3350}
json
import typing_extensions as te class ASGIApp(te.Protocol): def __call__(self, scope, receive, send): raise NotImplementedError class WSGIApp(te.Protocol): def __call__(self, environ, start_response): raise NotImplementedError
python
<filename>package.json { "name": "react-arkit", "version": "0.1.8", "description": "AR features for React Native, based on ARKit", "main": "index.js", "homepage": "https://github.com/icanb/react-arkit/", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" }, "keywords": [ "react-native", "arkit", "react-arki", "ar", "augmented-reality" ], "devDependencies": { "react": "16.0.0-alpha.12" }, "repository": { "type": "git", "url": "<EMAIL>:icanb/react-arkit.git" }, "author": "<NAME> (@icanb)", "license": "MIT License" }
json