text
stringlengths
1
1.04M
language
stringclasses
25 values
<gh_stars>0 [{"title": "Fifty famous painters by <NAME> ", "author": "<NAME>, 1895-", "id": "000345530"}, {"title": "Fifty famous painters by <NAME> ", "author": "<NAME>, 1895-", "id": "000345530"}]
json
Cracking Civil Services in the very first attempt is no mean task! UPSC 2015 results are out – Guess who at CD made into the list? UPSC results 2016 out! http://zeenews.india.com/news/india/upsc-declares-final-result-of-civil-services-examination-2015-tina-dabi-secures-1st-rank_1883793.html Kiss kiss ka hua hai/ jaan pechaan/ or if there is someone in the platform over here – please... Continue reading UPSC results 2016 out! If I were a legislator, I would call this a bare act! If you go to a desert, you will hear this mysterious voice: Be wise, protect your forests! You shall soon enter the last lap of the Prelims preparation. It is time to take a stock, plan, execute and win! Can’t read news on sunday? any problem? Find the solution pdf below: https://drive.google.com/file/d/0BxdqUlW1zhMsMzBkbE4xQzVHNUE/view Maze 15 tomorrow.
english
#print ("Hello World") #counties=["Arapahoes","Denver","Jefferson"] #if counties[1]=='Denver': # print(counties[1]) #counties = ["Arapahoe","Denver","Jefferson"] #if "El Paso" in counties: # print("El Paso is in the list of counties.") #else: # print("El Paso is not the list of counties.") #if "Arapahoe" in counties and "El Paso" in counties: # print("Arapahoe and El Paso are in the list of counties.") #else: # print("Arapahoe or El Paso is not in the list of counties.") #if "Arapahoe" in counties or "El Paso" in counties: # print("Arapahoe or El Paso is in the list of counties.") #else: # print("Arapahoe and El Paso are not in the list of counties.") #counties_dict = {"Arapahoe": 422829, "Denver": 463353, "Jefferson": 432438} #for county in counties: # print(county) #for county in counties_dict.keys(): # print(county) #for voters in counties_dict.values(): # print(voters) #for county in counties_dict: # print(counties_dict[county]) #for county, voters in counties_dict.items(): #print(f"{county} county has {voters} registered voters.") voting_data = [{"county":"Arapahoe", "registered_voters": 422829}, {"county":"Denver", "registered_voters": 463353}, {"county":"Jefferson", "registered_voters": 432438}] #prints as a continuous list #print(voting_data) #prints as a stack. prints 1 under the other #for county_dict in voting_data: #print(county_dict) #3.2.10 says this will iterarte and print the counties. #I understand the for loop but dont understand the print line. #how does the module expect us to know this if we didn't cover it. #['county'] is throwing me off #for i in range(len(voting_data)): #print(voting_data[i]['county']) #for i in range(len(voting_data)): #print(voting_data[i]['registered_voters']) #why doesnt this work with registered_voters_dict. neither_dict are defined #for county_dict in voting_data: #for value in county_dict.values(): #print(value) #candidate_votes = int (input("How many votes did the candidate get in the election?")) #total_votes = int(input("What is the total number of votes in the election?")) #message_to_candidate = ( #f"You received {candidate_votes:,} number of votes. " #f"The total number of votes in the election was {total_votes:,}. " #f"You received {candidate_votes / total_votes * 100:.2f}% of the votes") #print(message_to_candidate) #f'{value:{width},.{precision}}' #width=number of characters #precision=.#'sf where # is the decimal places #skill drill #for county, voters in counties_dict.items(): #print(f"{county} county has {voters:,} registered voters.") #skill drill--need help solving #for county_dict in voting_data: #print(f"{county} county has {voters} registered voters.") for county, voters in voting_data: print (f"{'county'} county has {'voters'} registered voters")
python
13 (A)Be pleased, O Lord, to deliver me; O Lord, make haste to help me! Who seek to destroy my [a]life; Who wish me evil. 15 Let them be (C)confounded because of their shame, 16 (D)Let all those who seek You rejoice and be glad in You; 16 (D)Let all those who seek You rejoice and be glad in You; Let such as love Your salvation (E)say continually, 17 (F)But I am poor and needy; (G)Yet the Lord thinks upon me. You are my help and my deliverer; Do not delay, O my God. New King James Version (NKJV) Scripture taken from the New King James Version®. Copyright © 1982 by Thomas Nelson. Used by permission. All rights reserved.
english
<filename>propara/utils/prostruct_predicted_json_to_tsv_grid.py import json import sys from pprint import pprint from processes.data.propara_dataset_reader import Action # Input: json format generated by ProparaPredictor # paraid": "1114", # "sentence_texts": ["Rainwater falls onto the soil.", "The rainwater seeps into the soil.",...."], # "participants": ["rainwater; water", "bedrock", "funnels", "caves"], # "states": [["?", "soil", "soil", "bedrock", "bedrock", "bedrock", "bedrock", "bedrock"],....], # "predicted_actions": ["MOVE", "NONE", "NONE", "NONE", "MOVE", ..., "CREATE", "CREATE"] # # Output: paraid \t sentence_id \t participant \t action \t before_val \t after_val # This class converts the json file format generated by ProparaPredictor into partial grids TSV format def get_before_after_val(action: Action, predicted_before_location: str, predicted_after_location: str): if action == Action.CREATE: return '-', '?' elif action == Action.DESTROY: return '?', '-' elif action == Action.MOVE: return predicted_before_location, predicted_after_location elif action == Action.NONE: return '?', '?' def convert_predicted_json_to_partial_grids(infile_path: str, outfile_path: str): out_file = open(outfile_path, "w") for line in open(infile_path): data = json.loads(line) pprint(data) para_id = data["para_id"] participants = data["participants"] actions_sentences_participants = data["top1_original"] sentence_texts = data["sentence_texts"] num_sentences = len(sentence_texts) num_participants = len(participants) predicted_after_locations = data["predicted_locations"] if "predicted_locations" in data and len(data["predicted_locations"]) > 0 \ else [['?' for _ in range(num_participants)] for _ in range(num_sentences)] print(num_sentences) print(num_participants) for sentence_id in range(num_sentences): for participant_id in range(num_participants): predicted_before_location = predicted_after_locations[sentence_id-1][participant_id] if sentence_id > 0 else '?' predicted_after_location = predicted_after_locations[sentence_id][participant_id] action = Action(actions_sentences_participants[sentence_id][participant_id]) (before_val, after_val) = get_before_after_val(action, predicted_before_location, predicted_after_location) out_file.write("\t".join([para_id, str(sentence_id+1), participants[participant_id], action.name, before_val, after_val]) + "\n") out_file.close() if __name__ == '__main__': infile = sys.argv[1] outfile = sys.argv[2] convert_predicted_json_to_partial_grids(infile_path=infile, outfile_path=outfile)
python
import * as React from "react" import { Button, Form } from "react-bootstrap" const CsvExport = () => ( <Form.Group className="d-flex justify-content-end"> <Button type="submit">csv出力</Button> </Form.Group> ) export default CsvExport
typescript
Two charges of sexual assault and sexual interference were withdrawn by the Crown after finding “no medical evidence” to support them. Justice Kristin Ailsby accepted a joint submission for the seven-year sentence from the Crown prosecutors and defence lawyer. An agreed statement of facts filed with the court said the man and his partner, who has also pleaded guilty to failing to provide the necessaries of life, shared their home with a roommate. “They were not certain she would live,” said Ailsby, noting there was head trauma, spinal injuries and damaged blood vessels in her eyes. Ailsby said the girl spent several months in the hospital and was discharged to the care of a foster mother in April.Read more: Father who assaulted baby girl in southern Alberta sentenced to seven years in prisonThe 53\u002Dyear\u002Dold Lethbridge man had also been charged with sexual assault and sexual interference in January. Father who assaulted baby girl in southern Alberta sentenced to seven years in prisonThe 53\u002Dyear\u002Dold Lethbridge man had also been charged with sexual assault and sexual interference in January. Father who assaulted baby girl in southern Alberta sentenced to 7 years in prison | Globalnews.caA father from southern Alberta who pleaded guilty to aggravated assault and failing to provide his baby girl with the necessaries of life has been sentenced to seven years in prison. Controversial Calgary professor sues University of Lethbridge for nixing guest lectureA controversial Calgary professor is suing the University of Lethbridge over its decision to cancel a planned guest lecture in February. Controversial Calgary professor sues University of Lethbridge for cancelling guest lectureA controversial Calgary professor is suing the University of Lethbridge over its decision to cancel a planned guest lecture in February.
english
<reponame>lnugraha/trimap_generator<filename>trimap_module.py<gh_stars>100-1000 #!/usr/bin/env python import cv2, os, sys import numpy as np def extractImage(path): # error handller if the intended path is not found image = cv2.imread(path, cv2.IMREAD_GRAYSCALE) return image def checkImage(image): """ Args: image: input image to be checked Returns: binary image Raises: RGB image, grayscale image, all-black, and all-white image """ if len(image.shape) > 2: print("ERROR: non-binary image (RGB)"); sys.exit(); smallest = image.min(axis=0).min(axis=0) # lowest pixel value: 0 (black) largest = image.max(axis=0).max(axis=0) # highest pixel value: 1 (white) if (smallest == 0 and largest == 0): print("ERROR: non-binary image (all black)"); sys.exit() elif (smallest == 255 and largest == 255): print("ERROR: non-binary image (all white)"); sys.exit() elif (smallest > 0 or largest < 255 ): print("ERROR: non-binary image (grayscale)"); sys.exit() else: return True class Toolbox: def __init__(self, image): self.image = image @property def printImage(self): """ Print image into a file for checking purpose unitTest = Toolbox(image); unitTest.printImage(image); """ f = open("image_results.dat", "w+") for i in range(0, self.image.shape[0]): for j in range(0, self.image.shape[1]): f.write("%d " %self.image[i,j]) f.write("\n") f.close() @property def displayImage(self): """ Display the image on a window Press any key to exit """ cv2.imshow('Displayed Image', self.image) cv2.waitKey(0) cv2.destroyAllWindows() def saveImage(self, title, extension): """ Save as a specific image format (bmp, png, or jpeg) """ cv2.imwrite("{}.{}".format(title,extension), self.image) def morph_open(self, image, kernel): """ Remove all white noises or speckles outside images Need to tune the kernel size Instruction: unit01 = Toolbox(image); kernel = np.ones( (9,9), np.uint8 ); morph = unit01.morph_open(input_image, kernel); """ bin_open = cv2.morphologyEx(self.image, cv2.MORPH_OPEN, kernel) return bin_open def morph_close(self, image, kernel): """ Remove all black noises or speckles inside images Need to tune the kernel size Instruction: unit01 = Toolbox(image); kernel = np.ones( (11,11)_, np.uint8 ); morph = unit01.morph_close(input_image, kernel); """ bin_close = cv2.morphologyEx(self.image, cv2.MORPH_CLOSE, kernel) return bin_close def trimap(image, name, size, number, erosion=False): """ This function creates a trimap based on simple dilation algorithm Inputs [4]: a binary image (black & white only), name of the image, dilation pixels the last argument is optional; i.e., how many iterations will the image get eroded Output : a trimap """ checkImage(image) row = image.shape[0] col = image.shape[1] pixels = 2*size + 1 ## Double and plus 1 to have an odd-sized kernel kernel = np.ones((pixels,pixels),np.uint8) ## Pixel of extension I get if erosion is not False: erosion = int(erosion) erosion_kernel = np.ones((3,3), np.uint8) ## Design an odd-sized erosion kernel image = cv2.erode(image, erosion_kernel, iterations=erosion) ## How many erosion do you expect image = np.where(image > 0, 255, image) ## Any gray-clored pixel becomes white (smoothing) # Error-handler to prevent entire foreground annihilation if cv2.countNonZero(image) == 0: print("ERROR: foreground has been entirely eroded") sys.exit() dilation = cv2.dilate(image, kernel, iterations = 1) dilation = np.where(dilation == 255, 127, dilation) ## WHITE to GRAY remake = np.where(dilation != 127, 0, dilation) ## Smoothing remake = np.where(image > 127, 200, dilation) ## mark the tumor inside GRAY remake = np.where(remake < 127, 0, remake) ## Embelishment remake = np.where(remake > 200, 0, remake) ## Embelishment remake = np.where(remake == 200, 255, remake) ## GRAY to WHITE ############################################# # Ensures only three pixel values available # # TODO: Optimization with Cython # ############################################# for i in range(0,row): for j in range (0,col): if (remake[i,j] != 0 and remake[i,j] != 255): remake[i,j] = 127 path = "./images/results/" ## Change the directory new_name = '{}px_'.format(size) + name + '_{}.png'.format(number) cv2.imwrite(os.path.join(path, new_name) , remake) ############################################# ### TESTING SECTION ### ############################################# if __name__ == '__main__': path = "./images/test_images/test_image_11.png" image = extractImage(path) size = 10 number = path[-5] title = "test_image" unit01 = Toolbox(image); kernel1 = np.ones( (11,11), np.uint8 ) unit01.displayImage opening = unit01.morph_close(image,kernel1) trimap(opening, title, size, number, erosion=False) unit02 = Toolbox(opening) unit02.displayImage ######################################################## ## Default instruction (no binary opening or closing ## ## trimap(image, title, size, number, erosion=False); ## ########################################################
python
<reponame>przebieglykaziu/best-test import pymc3 as pm class BayesianDifferenceTest: """Perform a Bayesian test for difference of means and standard deviations between two samples. Inspired by the classic BEST paper by Kruschke. """ def __init__(self, param_a_name, param_b_name, mu_mean, mu_sd, sd_lower, sd_upper, nu_mean): """Initialize the test. Parameters ---------- param_a_name, param_b_name : str Names for the tested parameters. mu_mean, mu_sd : float Mean and standard deviation of the prior on mean. sd_upper, sd_lower : float Upper and lower bounds of the prior on standard deviation. nu_mean : float The mean of the prior on normality (aka 'degrees of freedom'). """ self.param_a_name = param_a_name self.param_b_name = param_b_name self.mu_mean = mu_mean self.mu_sd = mu_sd self.sd_lower = sd_lower self.sd_upper = sd_upper self.nu_mean = nu_mean self._varnames = { 'mean_param_a': '{}_mean'.format(self.param_a_name), 'mean_param_b': '{}_mean'.format(self.param_b_name), 'sd_param_a' : '{}_sd'.format(self.param_a_name), 'sd_param_b' : '{}_sd'.format(self.param_b_name), 'nu' : 'nu', 'diff_means' : 'difference_of_means', 'diff_sds' : 'difference_of_sds' } def _build_model(self, observed_a, observed_b): self.model = pm.Model() with self.model as model: # normal priors for means mean_param_a = pm.Normal(self._varnames['mean_param_a'], self.mu_mean, self.mu_sd) mean_param_b = pm.Normal(self._varnames['mean_param_b'], self.mu_mean, self.mu_sd) # uniform priors standard deviations sd_param_a = pm.Uniform(self._varnames['sd_param_a'], self.sd_lower, self.sd_upper) sd_param_b = pm.Uniform(self._varnames['sd_param_b'], self.sd_lower, self.sd_upper) # shifted exponential prior for normality (aka 'degrees of freedim') nu = pm.Exponential(self._varnames['nu'], 1 / self.nu_mean) + 1 # the data is assumed to come from Student's t distribution since it models data with outliers well # it is not realted to Student's t test in this case # pymc3 uses precision instead of sd for Student's t lambda_param_a = sd_param_a ** -2 lambda_param_b = sd_param_b ** -2 data_param_a = pm.StudentT('data_param_a', nu=nu, mu=mean_param_a, lam=lambda_param_a, observed=observed_a) data_param_b = pm.StudentT('data_param_b', nu=nu, mu=mean_param_b, lam=lambda_param_b, observed=observed_b) diff_means = pm.Deterministic(self._varnames['diff_means'], mean_param_a - mean_param_b) diff_sds = pm.Deterministic(self._varnames['diff_sds'], sd_param_a - sd_param_b) def run(self, observed_a, observed_b, nsamples=2000, njobs=1): """Run the inference on the model. Parameters ---------- observed_a, observed_b : array-like The observed data for the test. nsamples : int, optional The number of samples for MCMC (default 2000). njobs : int, optional the number of concurrent processes to use for sampling (default 1). """ self._build_model(observed_a, observed_b) with self.model as model: self.trace = pm.sample(nsamples, njobs=njobs) def plot_posterior(self, varnames=None, ref_val=None): """Generate informative plots form the trace. Parameters ---------- varnames : iterable of str or None, optional The model variables to generate plots for (default None). If None, defaults to all variables. ref_val: int or float or None, optional The value to use as reference on the plots (default None). Generally only relevant for posteriors on differences of means and standard deviations. For example, if ref_val = 0, a bar will be placed on the posterior plot at a point corresponding to zero difference in parameters. If this bar lies within the 95% HPD, then it is likely that there is no significant difference between the parameters. """ varnames = varnames or self.model_variables pm.plot_posterior(self.trace, varnames=varnames, ref_val=ref_val, color='#8BCAF1') def forestplot(self, varnames=None): """Generate a forestplot with 95% credible intervals and R hat statistic. Parameters ---------- varnames : iterable of str or None, optional The model variables to generate plots for (default None). If None, defaults to all variables. """ varnames = varnames or self.model_variables pm.forestplot(self.trace, varnames=varnames, color='#8BCAF1') def traceplot(self): """Generate a traceplot for MCMC diagnostics.""" pm.traceplot(self.trace) def summary(self, varnames=None): """Generate summary statistics for model as Pandas dataframe. Parameters ---------- varnames : iterable of str or None, optional The model variables to generate summaries for (default None). If None, defaults to all variables. Returns ------- summary : pandas.DataFrame The dataframe with summary statistics. """ varnames = varnames or self.model_variables return pm.df_summary(self.trace, varnames=varnames) @property def model_variables(self): """Get model variables. Returns ------- varnames : list of str The names of model variables. """ return list(self._varnames.values())
python
// Read a file from zip const StreamZip = require('node-stream-zip') // Convert the file from CP1251 const iconv = require('iconv-lite') const converterStream = iconv.decodeStream('win1251') // Split strings const split2 = require('split2') // Parse XML to JSON const parser = require('fast-xml-parser') const he = require('he') // Set options for the xml parser const options = { attributeNamePrefix : '@_', attrNodeName: 'attr', //default is 'false' textNodeName : '#text', ignoreAttributes : true, ignoreNameSpace : false, allowBooleanAttributes : false, parseNodeValue : true, parseAttributeValue : false, trimValues: true, cdataTagName: '__cdata', //default is 'false' cdataPositionChar: '\\c', parseTrueNumberOnly: false, arrayMode: false, //"strict" attrValueProcessor: (val, attrName) => he.decode(val, { isAttributeValue: true }), tagValueProcessor : (val, tagName) => he.decode(val), stopNodes: ['parse-me-as-string'] } // The zip file to process const file = '17-ufop_full_08-05-2020.zip' // The specific zipped file in the zip (uncomment the one you need) const fileName = '17.1-EX_XML_EDR_UO_FULL_08.05.2020.xml' // const fileName2 = '17.2-EX_XML_EDR_FOP_FULL_08.05.2020.xml' // Start zip streaming const zip = new StreamZip({ file, storeEntries: true }) // Handle errors of zip stream zip.on('error', err => { console.log(err) }) // Read zip stream zip.on('ready', () => { zip.stream(fileName, (err, stm) => { stm.pipe(converterStream).pipe(split2(/(?=<SUBJECT)/g)).on('data', processObject) stm.on('end', () => zip.close()) }) }) // Process an object const processObject = xml => { if (!xml.match(/SUBJECT/i)) return false const data = parser.parse(xml, options, true) const organization = data.SUBJECT // Check double quotes '' if (organization.NAME && organization.NAME.match(/''/g)) console.log(organization.NAME) // Check that the string doesn't start with a space if (organization.CONTACTS) { // Convert number to string (in case if a phone number was recognized as a string) organization.CONTACTS = organization.CONTACTS + '' // Split data by regex let contacts = organization.CONTACTS.split(/;/g) for (let contact of contacts) { if (contact.match(/\s$/)) console.log(contact) } } // Check if EDRPOU is valid if (organization.EDRPOU) { // COnvert number to string organization.EDRPOU = organization.EDRPOU + '' // Check EDRPOUs those do not match 8 symbols if (!organization.EDRPOU.match(/^........$/)) { console.log(organization.EDRPOU) // Fix such EDRPOUs organization.EDRPOU = organization.EDRPOU.padStart(8, '0') } // Output all EDRPOUs console.log(organization.EDRPOU) } // Extract registration date if (organization.REGISTRATION) { console.log(organization.REGISTRATION.match(/\d{2}\.\d{2}\.\d{2,4}/g)) } // Research companies' founders if (organization.FOUNDERS) { // Make an array if the field is a string if (!Array.isArray(organization.FOUNDERS.FOUNDER)) { organization.FOUNDERS.FOUNDER = [organization.FOUNDERS.FOUNDER] } // Find founders with EDRPOUs for (let founder of organization.FOUNDERS.FOUNDER) { if (founder.match(/\d{8}/g)) console.log(founder) } // Get names of founders for (let founder of organization.FOUNDERS.FOUNDER) { if (founder.match(/^[А-ЯЄІЇҐ]+\s[А-ЯЄІЇҐ]+\s[А-ЯЄІЇҐ]+/g)) console.log(founder.match(/^[А-ЯЄІЇҐ]+\s[А-ЯЄІЇҐ]+\s[А-ЯЄІЇҐ]+/g)) } // Check wrong apostrophes for (let founder of organization.FOUNDERS.FOUNDER) { if (founder.match(/[бпвмф][*”»"][яєюї]/ig)) console.log(founder) } // Get money of founders for (let founder of organization.FOUNDERS.FOUNDER) { if (founder.match(/\d+[,.]\d{2} грн./ig)) console.log(founder.match(/\d+[,.]\d{2}(?= грн.)/ig)) } // Better option to get names of founders for (let founder of organization.FOUNDERS.FOUNDER) { if (founder.match(/[^ ]+ [^ ]+ [^ ]+(ивна|івна|ович)/ig)) console.log(founder.match(/[^ ]+ [^ ]+ [^ ]+(ивна|івна|ович)/ig)) } } // Get dates and convert them to ISO if (organization.REGISTRATION) { console.log(organization.REGISTRATION.replace(/.*(\d{2})\.(\d{2})\.(\d{4}).*/, '$3-$2-$1')) } }
javascript
Tamannaah Bhatia looks regal in an ornate ombre lehenga. (Image: Instagram) Tamannaah Bhatia cuts a statusque figure in a layered lehenga. (Image: Instagram) Tamannaah Bhatia looks gorgeous in a bright blue lehenga. (Image: Instagram) Tamannaah Bhatia looks stunning in a golden embellished lehenga. (Image: Instagram) Tamannaah Bhatia looks vibrant in a pink lehenga with ornate embroidery. (Image: Instagram) Tamannaah Bhatia looks elegant in a floral embellished lehenga. (Image: Instagram) Tamannaah Bhatia looks sexy in a sequinned maroon lehenga. (Image: Instagram) Tamannaah Bhatia looks beautiful in a floral lehenga. (Image: Instagram) Tamannaah Bhatia looks stellar in a vibrant pink lehenga. (Image: Instagram) Tamannaah Bhatia strikes a pose in a voluptuous red lehenga. (Image: Instagram)
english
Wrestling fans have reacted to Logan Paul hinting at a match with WWE legend John Cena at WrestleMania 39. Paul faced Roman Reigns at the Crown Jewel premium live event for the Undisputed WWE Universal Championship. The night's main event was a hit as most fans enjoyed the social media star's incredible performance. He took The Tribal Chief to the limit and showcased several high-flying moves. Following Crown Jewel, Cena took to social media to share a photo of Paul, which could have been a hint about his potential future program with the YouTube star. Meanwhile, Paul reacted to Wrestle Ops' post about The Cenation Leader possibly returning to WWE for WrestleMania. Hence, many fans have demanded that the two stars lock horns inside the squared circle. In contrast, others suggested that Paul focus on the injuries he sustained at the Riyadh spectacle. Here are some of the interesting fan tweets below: Former WWE head writer Vince Russo recently discussed Crown Jewel's main event and highlighted a possible flaw in the company. Speaking on Sportskeeda Wrestling's Legion of RAW, the veteran claimed that there is something wrong with the promotion when an outsider performs better than 80 per cent of the stars on the roster. "What came away from that was all the type from Logan Paul. Think about that. How sad is it when a non-wrestler is better than 80 percent of your roster? Something's wrong bro. Something's wrong when a guy that has three matches is better than 80 percent. There's an issue there, bro." It will be exciting to see if Logan Paul will face John Cena at the upcoming WrestleMania 39 premium live event. The show is scheduled for April 1 and 2 next year in Los Angeles. What are your thoughts on Paul facing Cena shortly? Sound off in the comments section below.
english
R SukumaranÂs take on the lives of the greatest social reformer from Kerala, Sreenarayanaguru , made in the title 'Yugapurushan' has been in the news for the last couple of years. It was megastar Mammooty who was supposed to do the title role of Guru, but at some point of time Mammootty withdraw from being the epic guruji and humbly told the producers that he may not be the right choice as his screen image may interfere with the icon, the best known name in Kerala who fought for social equality, anticlassicism and freedom. The picture of guru, who lived in the first part of the last century, was quite familiar with the masses and so Mammootty hardly believed that he can substitute the looks and grace of the original figure. Then the crew was for some time engaged in a mamootth task of fixing another fresh face for being Guruji. They have now finalized for two persons who will depict the life of guruji between his thirties to the seventies. According to the latest reports, the movie will start it shoot, calling the first shot on the first of Januray. R Sukumaran and its cameraman Ramachandra Babu will be canning the first shot on Mammootty himself, who will now appear as a contemporary revolutionary who lived in the times of guru and supported his ideals. The director is planning for a shoot of two month in and outside Kerala. The detailed story boards of each of the shots are being prepared to control the budgets. The movie will also present the historic figures like Gandhiji, Tagore, Chattambi Swamikal, Kumaranasan, T K Madhavan, Ayyankaali, and Mannathu Padmanabhan who all lived and interacted with guru, when he was vigorously fighting for his ideals. Know who is appearing as Swami Vivekanandha in the movie? It is none other than superstar Mohanlal. With the coming together of such a big cast in the recreation of a life of historic personality, 'Yugapurushan' will be one of the most sought after films of 2009. Follow us on Google News and stay updated with the latest!
english
Russell Wilson's start to life as the Denver Broncos' franchise quarterback has been less than ideal. It is definitely far from what the former Seattle Seahawks star would have envisioned. The Broncos are 2-4 and have failed to score 20 points in five of their six games. In the one game they managed to breach that mark, they lost 32-23 against divisional rivals, the Las Vegas Raiders. Wilson's performances have left a lot to be desired. The quarterback has thrown for over 300 passing yards only once this season and is on pace for only 14 touchdown passes this season. That's not what the Broncos expected when they handed Wilson a five-year, $245 million extension after acquiring him in a blockbuster trade. The quarterback picked up a severe hamstring injury in the Broncos' Week 6 loss against the Los Angeles Chargers on Monday night, but claimed he will recover from it quickly because he has "Wolverine blood. " On NBC Sports' Galaxy Brains, hosts Patrick Daugherty and Denny Carter ridiculed the quarterback for comparing himself to an X-Men character. While discussing Wilson's quote, Carter said: "We have to counter that by saying, absolutely, you do not have Wolverine blood, Russell Wilson. And here's how I know the actual character, Wolverine from X-Men is a badass, and Russell Wilson is not. There are many, many differences between the two but that one jumps out to me most. " Daugherty quipped: "Yeah. I mean, I've seen a wolverine kill some people. I've seen Russell Wilson kill some drives by putting the football end to the turf. " Watch the entire segment below: Russell Wilson injury: Will QB play vs. New York Jets? Wilson has been a limited participant in practice throughout the week, but threw the ball in practice on Friday. The Super Bowl-winning quarterback has been ruled out by Broncos head Nathaniel Hackett against the New York Jets. With the Broncos in desperate need of a win, this is some very bad news for the franchise. Brett Rypien will start under center for Denver in this tricky fixture. With Wilson on the sidelines, the game just got a lot tougher for the Broncos, who have to find a way to win this match. It will be interesting to see if they can record a vital win in Week 7. If you use any of the above quotes, please credit Galaxy Brains and H/T Sportskeeda. 5 Times Steph Curry Was HUMILIATED On And Off The Court!
english
#ifndef __fxCG_LINEEDITORS_HPP__ #define __fxCG_LINEEDITORS_HPP__ #define KEY_HELP KEY_CTRL_F1 #define KEY_LEFT KEY_CTRL_LEFT #define KEY_RIGHT KEY_CTRL_RIGHT #define KEY_0 KEY_CHAR_0 #define KEY_1 KEY_CHAR_1 #define KEY_2 KEY_CHAR_2 #define KEY_3 KEY_CHAR_3 #define KEY_4 KEY_CHAR_4 #define KEY_5 KEY_CHAR_5 #define KEY_6 KEY_CHAR_6 #define KEY_7 KEY_CHAR_7 #define KEY_8 KEY_CHAR_8 #define KEY_9 KEY_CHAR_9 #define KEY_A KEY_CTRL_XTT #define KEY_B KEY_CHAR_LOG #define KEY_C KEY_CHAR_LN #define KEY_D KEY_CHAR_SIN #define KEY_E KEY_CHAR_COS #define KEY_F KEY_CHAR_TAN #define KEY_EXIT KEY_CTRL_EXIT #define KEY_EXE KEY_CTRL_EXE enum TEditMode { emANY, emINTEGER, emHEX }; int InputAny( unsigned char*buffer, int x, int y, int posmax, int&xpos, TEditMode edMode = emANY ); #endif
cpp
{ "title": "should add the date header to the headers, and to signedHeaders", "description": "If the request to be signed not contains the date header, Escher should add the current date. If missing, the date has to be added to signedheaders as well.", "request": { "method": "GET", "url": "/", "headers": [ ["host", "host.foo.com"] ], "body": "" }, "headersToSign": [], "config": { "vendorKey": "AWS4", "algoPrefix": "AWS4", "authHeaderName": "Authorization", "dateHeaderName": "Date", "hashAlgo": "SHA256", "date": "2011-09-09T23:36:00.000Z", "credentialScope": "us-east-1/host/aws4_request", "accessKeyId": "AKIDEXAMPLE", "apiSecret": "<KEY>" }, "expected": { "request": { "method": "GET", "url": "/", "headers": [ ["host", "host.foo.com"], ["Date", "Fri, 09 Sep 2011 23:36:00 GMT"], ["Authorization", "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=0a71dc54017d377751d56ae400f22f34f5802df5f2162a7261375a34686501be" ] ], "body": "" }, "authHeader": "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=0a71dc54017d377751d56ae400f22f34f5802df5f2162a7261375a34686501be" } }
json
<reponame>freyes/charm-azure-integrator import json import os import re import subprocess from base64 import b64decode from enum import Enum from math import ceil, floor from pathlib import Path from urllib.error import HTTPError from urllib.request import urlopen import yaml from charmhelpers.core import hookenv from charmhelpers.core.unitdata import kv from charms.layer import status ENTITY_PREFIX = 'charm.azure' MODEL_UUID = os.environ['JUJU_MODEL_UUID'] MAX_ROLE_NAME_LEN = 64 MAX_POLICY_NAME_LEN = 128 class StandardRole(Enum): NETWORK_MANAGER = '4d97b98b-1d4f-4787-a291-c67834d212e7' SECURITY_MANAGER = 'e3d13bf0-dd5a-482e-ba6b-9b8433878d10' DNS_MANAGER = 'befefa01-2a29-4197-83a8-272ff33ce314' OBJECT_STORE_READER = '2a2b9908-6ea1-4ae2-8e65-a410df84e7d1' OBJECT_STORE_MANAGER = 'ba92f5b4-2d11-453d-a403-e96b0029c9fe' # When debugging hooks, for some reason HOME is set to /home/ubuntu, whereas # during normal hook execution, it's /root. Set it here to be consistent. os.environ['HOME'] = '/root' def log(msg, *args): hookenv.log(msg.format(*args), hookenv.INFO) def log_err(msg, *args): hookenv.log(msg.format(*args), hookenv.ERROR) def get_credentials(): """ Get the credentials from either the config or the hook tool. Prefers the config so that it can be overridden. """ no_creds_msg = 'missing credentials; set credentials config' config = hookenv.config() # try to use Juju's trust feature try: result = subprocess.run(['credential-get'], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) creds = yaml.load(result.stdout.decode('utf8')) creds_data = creds['credential']['attributes'] login_cli(creds_data) return True except FileNotFoundError: pass # juju trust not available except subprocess.CalledProcessError as e: if 'permission denied' not in e.stderr.decode('utf8'): raise no_creds_msg = 'missing credentials access; grant with: juju trust' # try credentials config if config['credentials']: try: creds_data = b64decode(config['credentials']).decode('utf8') login_cli(creds_data) return True except Exception: status.blocked('invalid value for credentials config') return False # no creds provided status.blocked(no_creds_msg) return False def login_cli(creds_data): """ Use the credentials to authenticate the Azure CLI. """ app_id = creds_data['application-id'] app_pass = <PASSWORD>_<PASSWORD>['<PASSWORD>'] sub_id = creds_data['subscription-id'] tenant_id = _get_tenant_id(sub_id) try: log('Forcing logout of Azure CLI') _azure('logout') except AzureError: pass try: log('Logging in to Azure CLI') _azure('login', '--service-principal', '-u', app_id, '-p', app_pass, '-t', tenant_id) # cache the subscription ID for use in roles kv().set('charm.azure.sub-id', sub_id) except AzureError as e: # redact the credential info from the exception message stderr = re.sub(app_id, '<app-id>', e.args[0]) stderr = re.sub(app_pass, '<app-pass>', stderr) stderr = re.sub(tenant_id, '<tenant-id>', stderr) # from None suppresses the previous exception from the stack trace raise AzureError(stderr) from None def ensure_msi(request): msi = _get_msi(request.vm_id) if not msi: log('Enabling Managed Service Identity') result = _azure('vm', 'identity', 'assign', '--name', request.vm_name, '--resource-group', request.resource_group) vm_identities = kv().get('charm.azure.vm-identities', {}) msi = vm_identities[request.vm_id] = result['systemAssignedIdentity'] kv().set('charm.azure.vm-identities', vm_identities) log('Instance MSI is: {}', msi) def send_additional_metadata(request): """ Get additional info about the requesting instance via the API that isn't available from the metadata server. """ res_grp = _azure('group', 'show', '--name', request.resource_group) # hard-code most of these because with Juju, they're always the same # and the queries required to look them up are a PITA request.send_additional_metadata( resource_group_location=res_grp['location'], vnet_name='juju-internal-network', vnet_resource_group=request.resource_group, subnet_name='juju-internal-subnet', security_group_name='juju-internal-nsg', ) def tag_instance(request): """ Tag the given instance with the given tags. """ log('Tagging instance with: {}', request.instance_tags) _azure('vm', 'update', '--name', request.vm_name, '--resource-group', request.resource_group, '--set', *['tags.{}={}'.format(tag, value) for tag, value in request.instance_tags.items()]) def enable_instance_inspection(request): """ Enable instance inspection access for the given application. """ log('Enabling instance inspection') _assign_role(request, _get_role('vm-reader')) def enable_network_management(request): """ Enable network management for the given application. """ log('Enabling network management') _assign_role(request, StandardRole.NETWORK_MANAGER) def enable_security_management(request): """ Enable security management for the given application. """ log('Enabling security management') _assign_role(request, StandardRole.SECURITY_MANAGER) def enable_block_storage_management(request): """ Enable block storage (disk) management for the given application. """ log('Enabling block storage management') _assign_role(request, _get_role('disk-manager')) def enable_dns_management(request): """ Enable DNS management for the given application. """ log('Enabling DNS management') _assign_role(request, StandardRole.DNS_MANAGER) def enable_object_storage_access(request): """ Enable object storage read-only access for the given application. """ log('Enabling object storage read') _assign_role(request, StandardRole.OBJECT_STORE_READER) def enable_object_storage_management(request): """ Enable object storage management for the given application. """ log('Enabling object store management') _assign_role(request, StandardRole.OBJECT_STORE_MANAGER) def cleanup(): """ Perform cleanup. """ pass # Internal helpers class AzureError(Exception): """ Exception class representing an error returned from the azure-cli tool. """ @classmethod def get(cls, message): """ Factory method to create either an instance of this class or a meta-subclass for certain `message`s. """ if 'already exists' in message: return AlreadyExistsAzureError(message) return AzureError(message) class AlreadyExistsAzureError(AzureError): """ Meta-error subclass of AzureError representing something already existing. """ pass def _elide(s, max_len, ellipsis='...'): """ Elide s in the middle to ensure it is under max_len. That is, shorten the string, inserting an ellipsis where the removed characters were to show that they've been removed. """ if len(s) > max_len: hl = (max_len - len(ellipsis)) / 2 headl, taill = floor(hl), ceil(hl) s = s[:headl] + ellipsis + s[-taill:] return s def _get_tenant_id(subscription_id): """ Translate the subscription ID into a tenant ID by making an unauthorized request to the API and extracting the tenant ID from the WWW-Authenticate header in the error response. """ url = ('https://management.azure.com/subscriptions/' '{}?api-version=2018-03-01-01.6.1'.format(subscription_id)) try: urlopen(url) log_err('Error getting tenant ID: did not get "unauthorized" response') return None except HTTPError as e: if 'WWW-Authenticate' not in e.headers: log_err('Error getting tenant ID: missing WWW-Authenticate header') return None www_auth = e.headers['WWW-Authenticate'] match = re.search(r'authorization_uri="[^"]*/([^/"]*)"', www_auth) if not match: log_err('Error getting tenant ID: unable to find in {}', www_auth) return None return match.group(1) def _azure(cmd, *args, return_stderr=False): """ Call the azure-cli tool. """ cmd = ['az', cmd] cmd.extend(args) result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = result.stdout.decode('utf8').strip() stderr = result.stderr.decode('utf8').strip() if result.returncode != 0: raise AzureError.get(stderr) if return_stderr: return stderr if stdout: stdout = json.loads(stdout) return stdout def _get_msi(vm_id): """ Get the Managed System Identity for the VM. """ vm_identities = kv().get('charm.azure.vm-identities', {}) return vm_identities.get(vm_id) def _get_role(role_name): """ Translate short role name into a full role name and ensure that the custom role is loaded. The custom roles have to be applied to a specific subscription ID, but the subscription ID applies to the entire credential, so will almost certainly be reused, so there's not much danger in hitting the 2k custom role limit. """ known_roles = kv().get('charm.azure.roles', {}) if role_name in known_roles: return known_roles[role_name] sub_id = kv().get('charm.azure.sub-id') role_file = Path('files/roles/{}.json'.format(role_name)) role_data = json.loads(role_file.read_text()) role_fullname = role_data['Name'].format(sub_id) scope = role_data['AssignableScopes'][0].format(sub_id) role_data['Name'] = role_fullname role_data['AssignableScopes'][0] = scope try: log('Ensuring role {}', role_fullname) _azure('role', 'definition', 'create', '--role-definition', json.dumps(role_data)) except AzureError as e: if 'already exists' not in e.args[0]: raise known_roles[role_name] = role_fullname return role_fullname def _assign_role(request, role): if isinstance(role, StandardRole): role = role.value msi = _get_msi(request.vm_id) try: _azure('role', 'assignment', 'create', '--assignee-object-id', msi, '--resource-group', request.resource_group, '--role', role) except AlreadyExistsAzureError: pass
python
<filename>CH11_AssociativeContainers/exercises/ex11_07.cc /**************************************************************************** * * Copyright (c) 2016 <NAME>. All rights reserved. * ****************************************************************************/ /** * @file ex11_07.cc * * Exercise 11.7: * Define a map for which the key is the family’s last name and * the value is a vector of the children’s names. Write code to * add new families and to add new children to an existing family. * * @author <NAME> <<EMAIL>> */ #include <map> using std::map; #include <string> using std::string; #include <vector> using std::vector; #include <iostream> using std::cin; using std::cout; using std::endl; int main(int argc, char const *argv[]) { map<string, vector<string>> families; for (string last_name; cout << "Input family's last name:" << endl, cin >> last_name;) { for (string child_name; cout << "Input children's name:" << endl, cin >> child_name;) { families[last_name].push_back(child_name); } cin.clear(); } cout << "===========================" << endl; for (const auto& family : families) { cout << family.first << ":" << endl; for (const auto& child : family.second) cout << child << " "; cout << endl; } return 0; }
cpp
switch(1) { default: const a = 2; case 3: var a; }
javascript
import Account from '../../src/db/models/Account'; import initDBConfigs from '../../src/config/init-db'; import request from 'supertest'; import express, { Request } from 'express'; import cookieParser from 'cookie-parser'; import router from '../../src/routes/social-login'; import { before } from 'mocha'; import sequelize from '../../src/config/sequelize'; import RsaKey from '../../src/db/models/RsaKey'; import { customExceptionHandler } from '../../src/middleware/error-handler'; import { passportUses } from '../../src/middleware/passport'; import sinon from 'sinon'; import { accountMock } from '../mocks/accountMock'; import { assert } from 'chai'; // after(async () => { // await sequelize.authenticate().then(async () => { // await sequelize.sync({ force: true }).then(async () => { // await Account.truncate(); // await RsaKey.truncate(); // }); // }); // }); describe('social login tests', () => { const app = express(); before(async () => { app.use(express.json()); app.use(express.urlencoded({ extended: true })); app.use(cookieParser()); app.use('/auth/social', router); app.use(customExceptionHandler); await sequelize.authenticate().then(async () => { await sequelize.sync({ force: true }).then(async () => { await Account.truncate(); await RsaKey.truncate(); await initDBConfigs(); }); }); }); beforeEach(() => { sinon.stub(passportUses, 'github').callsFake((req: Request, res, next) => { req.user = accountMock; next(); }); }); afterEach(() => sinon.restore()); it('github auth for non existing user then create user and return', (done) => { request(app) .get('/auth/social/github/callback') .expect(302) .then((response) => { const refreshTokenCookie = response.header['set-cookie'][0]; const { location } = response.headers; const jwtRegexp = new RegExp('[a-z]+_token.[A-Za-z0-9-_=]+.?[A-Za-z0-9-_.+/=]*'); assert.isTrue(jwtRegexp.test(refreshTokenCookie)); assert.isDefined(location); done(); }) .catch((err) => { done(err); }); }); });
typescript
In this Ishq Vishk film, Shahid Kapoor, Amrita Rao played the primary leads. The Ishq Vishk was released in theaters on 11 Apr 2003. Movies like Sam Bahadur, Animal, Dunki and others in a similar vein had the same genre but quite different stories. The Ishq Vishk had a runtime of 127 minutes. The soundtracks and background music were composed by Anu Malik for the movie Ishq Vishk. The movie Ishq Vishk belonged to the Romance, genre.
english
<filename>angular-google-maps/1.0.15.json {"angular-google-maps.js":"<KEY>,"angular-google-maps.min.js":"<KEY>}
json
BETHESDA, Maryland -- Some scientists believe that nanotechnology will transform computing, biotechnology, and medicine, even proclaiming that the technology will one day solve every problem from hunger to disease. But researcher Steven Block has one thing to say to these nanotech Polyannas: Wake up. "One of the problems is that nanotechnology enthusiasts don’t know much about biology," said Block, a biophysicist at Stanford University, during a speech on Sunday to 650 scientists and academics at the Nanoscience and Nanotechnology: Shaping Biomedical Research conference. Nanotechnology is the science of building devices out of individual atoms or molecules, and was first theorized by Nobel Prize winner Richard Feynman in 1959. But Block said that ever since K. Eric Drexler published Engines of Creation: The Coming Era of Nanotechnology, scientists and science fiction fans alike have overstated the immediate impact of the technology. "Let’s get real. There’s a lot of basic science work that needs to be done," Block said. The National Institutes of Health convened the conference to help promote better understanding of a field that is fraught with promise but short on answers. Block, whose lab pioneered the use of laser-based optical traps or "optical tweezers," to study the detailed motion of single molecules, offered a sobering assessment of what nanotechnology can and cannot do. "We simply don’t know how to design ... complex macromolecules that work," Block said. Block said Drexler and his nonprofit Foresight Institute, which is developing guidelines for nanotechnology, are getting ahead of themselves. "Biologists and nanotechnologists need to figure out how nature’s machines work" before trying to manipulate them or develop their own, he said. Nanotechnolgy research will continue to expand during the coming years thanks to a boost from President Clinton’s 2001 budget, which created the National Nanotechnology Initiative. The program earmarked $495 million for research projects, an 83 percent increase over funding for this year. Seventy percent of the money will go to university-based research. Later on Sunday, researchers discussed the promise that nantotechnology offers in the field of medicine and public health. Chad Mirkin, acting director of the Center for Nanofabrication and Molecular Assembly at Northwestern University, said nanotechnology has already had an impact in the field of diagnostics and is used in tests for tuberculosis and colon cancer. Mirkin said he believes one area in which nanotechnology will have a major impact is drug delivery. Nearly half of all potential new drugs don’t dissolve easily, but they will dissolve more readily if they are nanometer sized, he said. Eugene Cooper of Elan Pharmaceutical Technologies said that by utilizing a technology called NanoCrystals, his company can speed the delivery time of an over-the-counter pain killer in the body from three hours to 20 minutes.
english
<filename>geography/zipcodes/all/74034.geo.json {"geometry": {"type": "Point", "coordinates": [-96.56, 36.23]}, "type": "Feature", "id": "74034", "properties": {"other_cities": "", "city": "Hallett", "state": "OK", "county": "Pawnee County"}}
json
<gh_stars>1-10 package com.celgene.kafka; import org.apache.oodt.cas.metadata.Metadata; import java.util.Hashtable; import java.util.List; import java.util.Vector; /** * Factory to create different types of metadata */ public class MetadataFactory { public enum MetadataType { HASHTABLE("hashtable"), LIST("list"), SIMPLE("simple"), COMPLEX("complex"); private String val; MetadataType(String val) { this.val = val; } @Override public String toString() { return this.val; } } public static Metadata buildMetadata(int type) { Metadata md = null; switch(type) { case 0: md = createHashTableMd(); break; case 1: md = createListMd(); break; case 2: md = createSimpleMd(); break; case 3: md = createComplexMd(); break; default: throw new IllegalArgumentException("Metadata type not supported!"); } return md; } public static Metadata buildMetadata(MetadataType type) { Metadata md = null; switch(type) { case HASHTABLE: md = createHashTableMd(); break; case LIST: md = createListMd(); break; case SIMPLE: md = createSimpleMd(); break; case COMPLEX: md = createComplexMd(); break; default: throw new IllegalArgumentException("Metadata type not supported!"); } return md; } private static Metadata createComplexMd() { Metadata m1 = new Metadata(); m1.addMetadata("Group1/key1", "val1"); m1.addMetadata("Group1/key2", "val2"); m1.addMetadata("Group2/key2", "val3"); m1.addMetadata("Group2/key2/key3", "val3"); m1.addMetadata("Group1/sub1/key2", "val3"); m1.addMetadata("Group1/sub2/key2/key3", "val3"); // List<String> keys = m1.getAllKeysWithName("key2"); // assertEquals(keys.size(), 3); // assertEquals(keys.get(0), "Group2/key2"); // assertEquals(keys.get(1), "Group1/sub1/key2"); // assertEquals(keys.get(2), "Group1/key2"); // keys = m1.getAllKeysWithName("key1"); // assertEquals(keys.size(), 1); // assertEquals(keys.get(0), "Group1/key1"); return m1; } private static Metadata createSimpleMd() { Metadata m1 = new Metadata(); m1.addMetadata("key1", "val1"); m1.addMetadata("key2", "val2"); m1.addMetadata("key2", "val3"); return m1; } private static Metadata createListMd() { Metadata m1 = new Metadata(); List counting = new Vector(); counting.add("1"); counting.add("2"); counting.add("3"); m1.addMetadata("ManyTest", counting); return m1; } private static Metadata createHashTableMd() { Hashtable testHash = new Hashtable(); testHash.put("key1", "val1"); testHash.put("key2", "val2"); Metadata m1 = new Metadata(); m1.addMetadata("key3", "val3"); m1.addMetadata(testHash); return m1; } }
java
<gh_stars>1-10 import React from 'react'; import './Footer.css'; export default function Footer() { return( <div class="footer"> <p>© 2020 Happy Indra Wijaya. All rights reserved. <a href="/credits">Credits</a>. </p> <p>Powered by <a class="tmdb" href="https://www.themoviedb.org/" target="_blank" rel="noopener noreferrer">TMDb</a></p> </div> ); }
javascript
<reponame>Travis-Witts/POS<filename>client/src/components/Navbar/index.tsx import React from 'react'; import './style.scss'; import axios from 'axios'; import { NavLink } from "react-router-dom"; import { ReactComponent as Logout } from '../assets/icons/logout.svg'; import { ReactComponent as POS } from '../assets/icons/pos.svg'; import { ReactComponent as Tag } from '../assets/icons/tag.svg'; import { ReactComponent as Settings } from '../assets/icons/settings.svg'; import { ReactComponent as List } from '../assets/icons/list.svg'; import { LoginProps } from '../../types/types'; import { IsDesktopOrLaptop } from '../../utils/responsiveHooks' const Navbar: React.FC<LoginProps> = (Props: LoginProps) => { const logoutHandler = async (event: React.MouseEvent<HTMLAnchorElement>) => { event.preventDefault(); await axios.post('/user/logout'); Props.setLogin('') }; return ( <div className="sidebar"> <ul className="sidebar-nav"> <li className="logo"> <a href="/" className="nav-link"> <POS className="link-img" /> <span className="link-text">AnyPOS</span> </a> </li> <li className="nav-item"> <NavLink exact to="/" className="nav-link"> <Tag className="link-img" /> <span className="link-text">New Sale</span> </NavLink> </li> {!IsDesktopOrLaptop() && <li className="nav-item mobile-display"> <NavLink to="/sale" className="nav-link"> <List className="link-img" /> <span className="link-text">Sale Items</span> </NavLink> </li> } <li id="store" className="nav-item"> <NavLink exact to="/profile" className="nav-link"> <Settings className="link-img" /> <span className="link-text">Profile</span> </NavLink> </li> <li id="logout" className="nav-item"> <a href="/" onClick={logoutHandler} className=" nav-link"> <Logout className="link-img" /> <span className="link-text">Logout</span> </a> </li> </ul> </div> ); }; export default Navbar;
typescript
import os, sys, configparser, warnings from flask import (Flask, redirect, render_template, request, session, url_for) from app import consent, alert, experiment, complete, error from .io import write_metadata from .utils import gen_code __version__ = '1.0' ## Define root directory. ROOT_DIR = os.path.dirname(os.path.realpath(__file__)) ## Load and parse configuration file. cfg = configparser.ConfigParser() cfg.read(os.path.join(ROOT_DIR, 'app.ini')) ## Ensure output directories exist. data_dir = os.path.join(ROOT_DIR, cfg['IO']['DATA']) if not os.path.isdir(data_dir): os.makedirs(data_dir) meta_dir = os.path.join(ROOT_DIR, cfg['IO']['METADATA']) if not os.path.isdir(meta_dir): os.makedirs(meta_dir) reject_dir = os.path.join(ROOT_DIR, cfg['IO']['REJECT']) if not os.path.isdir(reject_dir): os.makedirs(reject_dir) ## Check Flask mode; if debug mode, clear session variable. debug = cfg['FLASK'].getboolean('DEBUG') if debug: warnings.warn("WARNING: Flask currently in debug mode. This should be changed prior to production.") ## Check Flask password. secret_key = cfg['FLASK']['SECRET_KEY'] if secret_key == "PLEASE_CHANGE_THIS": warnings.warn("WARNING: Flask password is currently default. This should be changed prior to production.") ## Initialize Flask application. app = Flask(__name__) app.secret_key = secret_key ## Apply blueprints to the application. app.register_blueprint(consent.bp) app.register_blueprint(alert.bp) app.register_blueprint(experiment.bp) app.register_blueprint(complete.bp) app.register_blueprint(error.bp) ## Define root node. @app.route('/') def index(): ## Debug mode: clear session. if debug: session.clear() ## Store directories in session object. session['data'] = data_dir session['metadata'] = meta_dir session['reject'] = reject_dir ## Record incoming metadata. info = dict( workerId = request.args.get('workerId'), # MTurk metadata assignmentId = request.args.get('assignmentId'), # MTurk metadata hitId = request.args.get('hitId'), # MTurk metadata subId = gen_code(24), # NivTurk metadata a = request.args.get('a'), # TurkPrime metadata tp_a = request.args.get('tp_a'), # TurkPrime metadata b = request.args.get('b'), # TurkPrime metadata tp_b = request.args.get('tp_b'), # TurkPrime metadata c = request.args.get('c'), # TurkPrime metadata tp_c = request.args.get('tp_c'), # TurkPrime metadata address = request.remote_addr, # NivTurk metadata browser = request.user_agent.browser, # User metadata platform = request.user_agent.platform, # User metadata version = request.user_agent.version, # User metadata ) ## Case 1: workerId absent. if info['workerId'] is None: ## Redirect participant to error (missing workerId). return redirect(url_for('error.error', errornum=1000)) ## Case 2: mobile user. elif info['platform'] in ['android','iphone','ipad','wii']: ## Redirect participant to error (platform error). return redirect(url_for('error.error', errornum=1001)) ## Case 3: repeat visit, previous success. elif f"{info['subId']}.json" in os.listdir(data_dir): ## Update metadata. session['workerId'] = info['workerId'] session['complete'] = 'success' ## Redirect participant to complete page. return redirect(url_for('complete.complete')) ## Case 4: repeat visit, previous reject. elif f"{info['subId']}.json" in os.listdir(reject_dir): ## Update metadata. session['workerId'] = info['workerId'] session['complete'] = 'reject' ## Redirect participant to complete page. return redirect(url_for('complete.complete')) ## Case 5: repeat visit, preexisting log but no session data. elif not 'workerId' in session and info['workerId'] in os.listdir(meta_dir): ## Update metadata. for k, v in info.items(): session[k] = v session['WARNING'] = "Incognito user." write_metadata(session, ['subId','WARNING'], 'a') ## Redirect participant to consent form. return redirect(url_for('consent.consent')) ## Case 6: repeat visit, preexisting activity. elif 'workerId' in session: ## Update metadata. session['WARNING'] = "Revisited home." write_metadata(session, ['WARNING'], 'a') ## Redirect participant to consent form. return redirect(url_for('consent.consent')) ## Case 7: first visit, workerId present. else: ## Update metadata. for k, v in info.items(): session[k] = v write_metadata(session, ['workerId','hitId','assignmentId','subId','address','browser','platform','version'], 'w') ## Redirect participant to consent form. return redirect(url_for('consent.consent'))
python
In Japanese, the 'kyr' in 'Kyrgyz' sounds a lot like 'kill'. Curious about Chilean cinema and culture? Look no further! "How do you know you are indigenous? [. . . ] It does not matter what we are called, we know who we are. It is you who do not recognise me. " "This is ludicrous, I can't believe these so-called experts could not distinguish between fiction and reality. "
english
// Copyright 2020 <NAME> // SPDX-License-Identifier: Apache-2.0 // (Re-)generated by schema tool // >>>> DO NOT CHANGE THIS FILE! <<<< // Change the json schema instead #![allow(dead_code)] use wasmlib::*; use crate::*; pub struct ArrayOfArraysAppendCall { pub func: ScFunc, pub params: MutableArrayOfArraysAppendParams, } pub struct ArrayOfArraysClearCall { pub func: ScFunc, } pub struct ArrayOfArraysSetCall { pub func: ScFunc, pub params: MutableArrayOfArraysSetParams, } pub struct ArrayOfMapsClearCall { pub func: ScFunc, } pub struct ArrayOfMapsSetCall { pub func: ScFunc, pub params: MutableArrayOfMapsSetParams, } pub struct MapOfArraysAppendCall { pub func: ScFunc, pub params: MutableMapOfArraysAppendParams, } pub struct MapOfArraysClearCall { pub func: ScFunc, pub params: MutableMapOfArraysClearParams, } pub struct MapOfArraysSetCall { pub func: ScFunc, pub params: MutableMapOfArraysSetParams, } pub struct MapOfMapsClearCall { pub func: ScFunc, pub params: MutableMapOfMapsClearParams, } pub struct MapOfMapsSetCall { pub func: ScFunc, pub params: MutableMapOfMapsSetParams, } pub struct ParamTypesCall { pub func: ScFunc, pub params: MutableParamTypesParams, } pub struct RandomCall { pub func: ScFunc, } pub struct TriggerEventCall { pub func: ScFunc, pub params: MutableTriggerEventParams, } pub struct ArrayOfArraysLengthCall { pub func: ScView, pub results: ImmutableArrayOfArraysLengthResults, } pub struct ArrayOfArraysValueCall { pub func: ScView, pub params: MutableArrayOfArraysValueParams, pub results: ImmutableArrayOfArraysValueResults, } pub struct ArrayOfMapsValueCall { pub func: ScView, pub params: MutableArrayOfMapsValueParams, pub results: ImmutableArrayOfMapsValueResults, } pub struct BlockRecordCall { pub func: ScView, pub params: MutableBlockRecordParams, pub results: ImmutableBlockRecordResults, } pub struct BlockRecordsCall { pub func: ScView, pub params: MutableBlockRecordsParams, pub results: ImmutableBlockRecordsResults, } pub struct GetRandomCall { pub func: ScView, pub results: ImmutableGetRandomResults, } pub struct IotaBalanceCall { pub func: ScView, pub results: ImmutableIotaBalanceResults, } pub struct MapOfArraysLengthCall { pub func: ScView, pub params: MutableMapOfArraysLengthParams, pub results: ImmutableMapOfArraysLengthResults, } pub struct MapOfArraysValueCall { pub func: ScView, pub params: MutableMapOfArraysValueParams, pub results: ImmutableMapOfArraysValueResults, } pub struct MapOfMapsValueCall { pub func: ScView, pub params: MutableMapOfMapsValueParams, pub results: ImmutableMapOfMapsValueResults, } pub struct ScFuncs { } impl ScFuncs { pub fn array_of_arrays_append(_ctx: &dyn ScFuncCallContext) -> ArrayOfArraysAppendCall { let mut f = ArrayOfArraysAppendCall { func: ScFunc::new(HSC_NAME, HFUNC_ARRAY_OF_ARRAYS_APPEND), params: MutableArrayOfArraysAppendParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn array_of_arrays_clear(_ctx: &dyn ScFuncCallContext) -> ArrayOfArraysClearCall { ArrayOfArraysClearCall { func: ScFunc::new(HSC_NAME, HFUNC_ARRAY_OF_ARRAYS_CLEAR), } } pub fn array_of_arrays_set(_ctx: &dyn ScFuncCallContext) -> ArrayOfArraysSetCall { let mut f = ArrayOfArraysSetCall { func: ScFunc::new(HSC_NAME, HFUNC_ARRAY_OF_ARRAYS_SET), params: MutableArrayOfArraysSetParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn array_of_maps_clear(_ctx: &dyn ScFuncCallContext) -> ArrayOfMapsClearCall { ArrayOfMapsClearCall { func: ScFunc::new(HSC_NAME, HFUNC_ARRAY_OF_MAPS_CLEAR), } } pub fn array_of_maps_set(_ctx: &dyn ScFuncCallContext) -> ArrayOfMapsSetCall { let mut f = ArrayOfMapsSetCall { func: ScFunc::new(HSC_NAME, HFUNC_ARRAY_OF_MAPS_SET), params: MutableArrayOfMapsSetParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn map_of_arrays_append(_ctx: &dyn ScFuncCallContext) -> MapOfArraysAppendCall { let mut f = MapOfArraysAppendCall { func: ScFunc::new(HSC_NAME, HFUNC_MAP_OF_ARRAYS_APPEND), params: MutableMapOfArraysAppendParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn map_of_arrays_clear(_ctx: &dyn ScFuncCallContext) -> MapOfArraysClearCall { let mut f = MapOfArraysClearCall { func: ScFunc::new(HSC_NAME, HFUNC_MAP_OF_ARRAYS_CLEAR), params: MutableMapOfArraysClearParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn map_of_arrays_set(_ctx: &dyn ScFuncCallContext) -> MapOfArraysSetCall { let mut f = MapOfArraysSetCall { func: ScFunc::new(HSC_NAME, HFUNC_MAP_OF_ARRAYS_SET), params: MutableMapOfArraysSetParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn map_of_maps_clear(_ctx: &dyn ScFuncCallContext) -> MapOfMapsClearCall { let mut f = MapOfMapsClearCall { func: ScFunc::new(HSC_NAME, HFUNC_MAP_OF_MAPS_CLEAR), params: MutableMapOfMapsClearParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn map_of_maps_set(_ctx: &dyn ScFuncCallContext) -> MapOfMapsSetCall { let mut f = MapOfMapsSetCall { func: ScFunc::new(HSC_NAME, HFUNC_MAP_OF_MAPS_SET), params: MutableMapOfMapsSetParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn param_types(_ctx: &dyn ScFuncCallContext) -> ParamTypesCall { let mut f = ParamTypesCall { func: ScFunc::new(HSC_NAME, HFUNC_PARAM_TYPES), params: MutableParamTypesParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn random(_ctx: &dyn ScFuncCallContext) -> RandomCall { RandomCall { func: ScFunc::new(HSC_NAME, HFUNC_RANDOM), } } pub fn trigger_event(_ctx: &dyn ScFuncCallContext) -> TriggerEventCall { let mut f = TriggerEventCall { func: ScFunc::new(HSC_NAME, HFUNC_TRIGGER_EVENT), params: MutableTriggerEventParams { proxy: Proxy::nil() }, }; ScFunc::link_params(&mut f.params.proxy, &f.func); f } pub fn array_of_arrays_length(_ctx: &dyn ScViewCallContext) -> ArrayOfArraysLengthCall { let mut f = ArrayOfArraysLengthCall { func: ScView::new(HSC_NAME, HVIEW_ARRAY_OF_ARRAYS_LENGTH), results: ImmutableArrayOfArraysLengthResults { proxy: Proxy::nil() }, }; ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn array_of_arrays_value(_ctx: &dyn ScViewCallContext) -> ArrayOfArraysValueCall { let mut f = ArrayOfArraysValueCall { func: ScView::new(HSC_NAME, HVIEW_ARRAY_OF_ARRAYS_VALUE), params: MutableArrayOfArraysValueParams { proxy: Proxy::nil() }, results: ImmutableArrayOfArraysValueResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn array_of_maps_value(_ctx: &dyn ScViewCallContext) -> ArrayOfMapsValueCall { let mut f = ArrayOfMapsValueCall { func: ScView::new(HSC_NAME, HVIEW_ARRAY_OF_MAPS_VALUE), params: MutableArrayOfMapsValueParams { proxy: Proxy::nil() }, results: ImmutableArrayOfMapsValueResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn block_record(_ctx: &dyn ScViewCallContext) -> BlockRecordCall { let mut f = BlockRecordCall { func: ScView::new(HSC_NAME, HVIEW_BLOCK_RECORD), params: MutableBlockRecordParams { proxy: Proxy::nil() }, results: ImmutableBlockRecordResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn block_records(_ctx: &dyn ScViewCallContext) -> BlockRecordsCall { let mut f = BlockRecordsCall { func: ScView::new(HSC_NAME, HVIEW_BLOCK_RECORDS), params: MutableBlockRecordsParams { proxy: Proxy::nil() }, results: ImmutableBlockRecordsResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn get_random(_ctx: &dyn ScViewCallContext) -> GetRandomCall { let mut f = GetRandomCall { func: ScView::new(HSC_NAME, HVIEW_GET_RANDOM), results: ImmutableGetRandomResults { proxy: Proxy::nil() }, }; ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn iota_balance(_ctx: &dyn ScViewCallContext) -> IotaBalanceCall { let mut f = IotaBalanceCall { func: ScView::new(HSC_NAME, HVIEW_IOTA_BALANCE), results: ImmutableIotaBalanceResults { proxy: Proxy::nil() }, }; ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn map_of_arrays_length(_ctx: &dyn ScViewCallContext) -> MapOfArraysLengthCall { let mut f = MapOfArraysLengthCall { func: ScView::new(HSC_NAME, HVIEW_MAP_OF_ARRAYS_LENGTH), params: MutableMapOfArraysLengthParams { proxy: Proxy::nil() }, results: ImmutableMapOfArraysLengthResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn map_of_arrays_value(_ctx: &dyn ScViewCallContext) -> MapOfArraysValueCall { let mut f = MapOfArraysValueCall { func: ScView::new(HSC_NAME, HVIEW_MAP_OF_ARRAYS_VALUE), params: MutableMapOfArraysValueParams { proxy: Proxy::nil() }, results: ImmutableMapOfArraysValueResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } pub fn map_of_maps_value(_ctx: &dyn ScViewCallContext) -> MapOfMapsValueCall { let mut f = MapOfMapsValueCall { func: ScView::new(HSC_NAME, HVIEW_MAP_OF_MAPS_VALUE), params: MutableMapOfMapsValueParams { proxy: Proxy::nil() }, results: ImmutableMapOfMapsValueResults { proxy: Proxy::nil() }, }; ScView::link_params(&mut f.params.proxy, &f.func); ScView::link_results(&mut f.results.proxy, &f.func); f } }
rust
<filename>node_modules/.cache/babel-loader/3ac3b791b0482d0571cc07d0404cba26.json<gh_stars>0 {"ast":null,"code":"'use strict';\n\nvar d = require('d'),\n callable = require('es5-ext/object/valid-callable'),\n apply = Function.prototype.apply,\n call = Function.prototype.call,\n create = Object.create,\n defineProperty = Object.defineProperty,\n defineProperties = Object.defineProperties,\n hasOwnProperty = Object.prototype.hasOwnProperty,\n descriptor = {\n configurable: true,\n enumerable: false,\n writable: true\n},\n on,\n _once2,\n off,\n emit,\n methods,\n descriptors,\n base;\n\non = function on(type, listener) {\n var data;\n callable(listener);\n\n if (!hasOwnProperty.call(this, '__ee__')) {\n data = descriptor.value = create(null);\n defineProperty(this, '__ee__', descriptor);\n descriptor.value = null;\n } else {\n data = this.__ee__;\n }\n\n if (!data[type]) data[type] = listener;else if (typeof data[type] === 'object') data[type].push(listener);else data[type] = [data[type], listener];\n return this;\n};\n\n_once2 = function once(type, listener) {\n var _once, self;\n\n callable(listener);\n self = this;\n on.call(this, type, _once = function once() {\n off.call(self, type, _once);\n apply.call(listener, this, arguments);\n });\n _once.__eeOnceListener__ = listener;\n return this;\n};\n\noff = function off(type, listener) {\n var data, listeners, candidate, i;\n callable(listener);\n if (!hasOwnProperty.call(this, '__ee__')) return this;\n data = this.__ee__;\n if (!data[type]) return this;\n listeners = data[type];\n\n if (typeof listeners === 'object') {\n for (i = 0; candidate = listeners[i]; ++i) {\n if (candidate === listener || candidate.__eeOnceListener__ === listener) {\n if (listeners.length === 2) data[type] = listeners[i ? 0 : 1];else listeners.splice(i, 1);\n }\n }\n } else {\n if (listeners === listener || listeners.__eeOnceListener__ === listener) {\n delete data[type];\n }\n }\n\n return this;\n};\n\nemit = function emit(type) {\n var i, l, listener, listeners, args;\n if (!hasOwnProperty.call(this, '__ee__')) return;\n listeners = this.__ee__[type];\n if (!listeners) return;\n\n if (typeof listeners === 'object') {\n l = arguments.length;\n args = new Array(l - 1);\n\n for (i = 1; i < l; ++i) {\n args[i - 1] = arguments[i];\n }\n\n listeners = listeners.slice();\n\n for (i = 0; listener = listeners[i]; ++i) {\n apply.call(listener, this, args);\n }\n } else {\n switch (arguments.length) {\n case 1:\n call.call(listeners, this);\n break;\n\n case 2:\n call.call(listeners, this, arguments[1]);\n break;\n\n case 3:\n call.call(listeners, this, arguments[1], arguments[2]);\n break;\n\n default:\n l = arguments.length;\n args = new Array(l - 1);\n\n for (i = 1; i < l; ++i) {\n args[i - 1] = arguments[i];\n }\n\n apply.call(listeners, this, args);\n }\n }\n};\n\nmethods = {\n on: on,\n once: _once2,\n off: off,\n emit: emit\n};\ndescriptors = {\n on: d(on),\n once: d(_once2),\n off: d(off),\n emit: d(emit)\n};\nbase = defineProperties({}, descriptors);\n\nmodule.exports = exports = function exports(o) {\n return o == null ? create(base) : defineProperties(Object(o), descriptors);\n};\n\nexports.methods = methods;","map":null,"metadata":{},"sourceType":"script"}
json
Comment avons-nous fini dans cette situation? How did we end up in this situation? L’entreprise a augmenté ses revenus. The company has increased its revenue. Attends, tu as perdu ton portefeuille! Wait, you’ve lost your wallet! Elle veut noter son idée d’entreprise. She wants to write down her business idea.
english
<reponame>Karkrieg/Bedrock-Sage {"translation-revision-date":"2022-02-07 14:49:57+0000","generator":"GlotPress\/3.0.0-alpha.2","domain":"messages","locale_data":{"messages":{"":{"domain":"messages","plural-forms":"nplurals=2; plural=n != 1;","lang":"en_GB"},"Create a classic widget layout with a title that\u2019s styled by your theme for your widget areas.":["Create a classic widget layout with a title that\u2019s styled by your theme for your widget areas."],"Widget Group":["Widget group"],"The \"%s\" block was affected by errors and may not function properly. Check the developer tools for more details.":["The \"%s\" block was affected by errors and may not function properly. Check the developer tools for more details."],"Move to widget area":["Move to widget area"],"Widget is missing.":["Widget is missing."],"No preview available.":["No preview available."],"Legacy Widget Preview":["Legacy Widget Preview"],"Select widget":["Select widget"],"Convert to blocks":["Convert to blocks"],"Move to":["Move to"],"Legacy Widget":["Legacy Widget"],"Select a legacy widget to display:":["Select a legacy widget to display:"],"There are no widgets available.":["There are no widgets available."],"Save":["Save"],"Title":["Title"]}},"comment":{"reference":"wp-includes\/js\/dist\/widgets.js"}}
json
{"id":"acb3ca81-81f9-47a1-a622-8ccbbd598a7d","playerTags":[],"teamTags":["36569151-a2fb-43c1-9df7-2df512424c82"],"gameTags":[],"metadata":{"totalShames":62,"totalShamings":47},"created":"2021-03-17T16:25:02.225Z","season":13,"tournament":-1,"type":155,"day":48,"phase":4,"category":3,"description":"The Millennials shamed the Pies.","nuts":2}
json
<filename>index.js /** * Start web application */ const app = require('./lib/app') const config = require('./lib/config') require('@tensorflow/tfjs-node'); app.listen(config.port, function () { console.log('\n----------------------------') console.log('Node app is running!') console.log('\tENV: \t%s', config.env) console.log('\tPORT: \t%s', config.port) console.log('\tPORTAL PREFIX: \t%s', config.portalPrefix) console.log('\tNODE VERSION: \t%s', process.version) console.log('\tSTARTUP TIME: \t%s', new Date()) console.log('----------------------------\n') })
javascript
The Kailasa Temple @ Aurangabad. Why is this Temple not considered as one of the wonders of the World, is one of the many questions that arise when you visit this ancient Monolithic Temple? Kailasa Temple is part of an extensive cave complex that is more than 1200 years old. Let’s put ourselves in the shoes of the architect, sculptors and engineers who designed, crafted and worked out the mechanics of this unbelievable structure. Remember this type of construction requires an acute sense of pre-planning, mathematical genius and mindboggling precision as you cannot make alterations if an error was made, keeping in mind this structure which was built from top to bottom, unlike other structures which are built from bottom (foundation) to top (Shikhar). The Hindu king ordered the temple built after he prayed to Shiva to save his wife from sickness. Having gotten the orders, would the architect have had envisioned this entire complex before starting the work or would he have started with the "let’s go with the flow" attitude (I’m sure this would have not been the case). Did he make drawings and if he did, how did he manage to keep them till the temple complex was completed? Did he draw sketches on something or did he instruct the sculptors and then they took his vision forward? The timeline for the Kailasa temple is said to be around 20 years! How motivated would all the labour, masons, sculptors and the entire team be to pull it off in so less time! What level of coordination would it have taken within the team to complete this masterpiece? It not only is an architectural marvel but also a marvel of time management and handling human resources. The so-called engineers of the time, how did they calculate the load a column would take and the depth of the beam should be, how deep could they cut the rock? How did they convince the workers to carve a temple from a giant mountain without it collapsing? Did they have calculations for the size of the cantilever (an overhang projecting out of the support system of columns and beams) or did they leave it to fate. (The later seems improbable as it has survived for more than a thousand years). How did they manage to align and keep the geometry of the temple so precise? Even though India had a rich tradition of sculptors and artists but what they have achieved here in this temple complex is next to impossible. Basalt stone being one of the most difficult stones to carve in, it is not soft or workable as the stone temples we see in other parts of India. Just to think of it that they achieved all this with a pair of hammer and chisel is incredible. Imaging sleeping on a scaffolding and chiselling a mountain surface above you! The sheer scale, the carvings and intricacy of detailing will put to shame all our modern technology and equipment’s to shame. To sum up and bring to light the magnitude of Kailasa temple. It was built in the 8th century; it took about 500 years to build it. A megalith carved from a single mountain block is considered as one of the most remarkable rock cut temples of the world. The 107 feet high temple is a part of the Ellora cave complex which spans for abut 2kms. The rock cut temple was made in a U shape, 50 m deep and about 2, 00,000 tonnes of rock was removed to shape it. All of this with a pair of Chisel and Hammer!
english
package com.lifemenu.controller; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import com.lifemenu.dto.RefndDto; import com.lifemenu.service.IRefndService; @Controller @RequestMapping("/Refnd") public class RefndController { @Autowired private IRefndService refndService; //환불페이지 @RequestMapping("/refndForm") public String refndForm(Model model, @RequestParam(value="SETLE_CODE") String setleCode) { return refndService.selectOneSetle(model, setleCode); } //환불진행 @RequestMapping("insertRefnd") public String insertRefnd(Model model, RefndDto refndDto) throws Exception { return refndService.insertRefnd(model, refndDto); } }
java
# To-do-List-using-Javascript
markdown
New Delhi: The flight from Jeddah in Saudi Arabia, carrying 367 people from strife-torn Sudan, landed at the Delhi International Airport on Wednesday night. Among the passengers, 19 were Malayalis. As informed earlier by K V Thomas, special representative of the Kerala Government in Delhi, all 19 of them will be provided with food and accommodation at Kerala House. They will be safely brought back to Kerala at the expense of the state government. Union Minister of State for External Affairs V Muraleedharan, in a tweet posted earlier today, said: "Happy to see off 360 Indians at Jeddah Airport in a flight bound for New Delhi. Under Operation Kaveri, the government is working relentlessly to evacuate Indian nationals from Sudan and bring them home safely. " On Wednesday morning, an IAF aircraft carrying the third batch comprising 135 Indians from Port Sudan arrived in Jeddha. Muraleedharan is coordinating the rescue operations by camping in Jeddah. India launched the mission 'Operation Kaveri' on Monday. India has set up a control room in the Saudi Arabian city of Jeddah to facilitate the evacuation of Indians from Sudan. Sudan has been witnessing deadly fighting between the country's army and a paramilitary group for the last 12 days that has reportedly left around 400 people dead. India stepped up its efforts to evacuate the Indians from Sudan as a 72-hour truce was agreed to between the Sudanese army and the paramilitary Rapid Support Forces (RSF) following intense negotiations. Several states have opened help desks and announced assistance like free travel and lodging for Indians evacuated from strife-torn Sudan once they arrive in the country. The Kerala government said it would make necessary arrangements to bring Malayalees evacuated from Sudan by the Centre to the state. A cabinet meeting chaired by Chief Minister Pinarayi Vijayan decided that Non-Resident Keralites Affairs (NORKA) department would assist evacuees at various airports, according to the chief minister's office. Uttar Pradesh government on Wednesday opened a help desk at the resident commissioner office in Delhi for people from the state trapped in Sudan, a senior official said in Lucknow. Those coming from Sudan can contact Neeraj Singh, assistant review officer on 8920808414 or Ashish Kumar, protocol assistant on 9313434088 or Whatsapp them about their problems," an order issued by Additional Resident Commissioner, New Delhi, Saumya Srivastava said. The Rajasthan government has decided to bear the transportation expense of all migrants from the state after they land in Delhi. Chief Resident Commissioner, Bikaner House, Shubra Singh urged all those affected from Rajasthan to furnish details to state authorities. Dhiraj Srivastava who is the Commissioner of Rajasthan Foundation - a government organisation that works to strengthen relationships between the state and non-Rajasthani residents - said the state government has decided to assist the returnees. "It will make sure that all the returnees from Sudan reach their families in Rajasthan free of cost from Delhi irrespective of whether they are travelling by flight, bus or any other form of transport, Srivastava said in a statement. The Madhya Pradesh government has started a helpline to assist people from the state as well as other parts of the country who are stranded in strife-torn Sudan. Those stranded in Sudan and are keen to come back to MP or other states can contact the helpline at 91-755-2555582 and share their details, the official said. The state government will coordinate with the Centre to facilitate their evacuation, he said. The government has appointed the state's home secretary Gaurav Rajput as the nodal officer for the purpose. Besides, relatives of Madhya Pradesh residents stuck in Sudan can also contact the CM Helpline 181 to register details about their near and dear ones for getting assistance, the official said. According to the official, the information can also be registered on the CM Helpline Portal: http://www. cmhelpline. mp. gov. in. Two military transport aircraft of the Indian Air Force (IAF) evacuated over 250 Indians from Sudan after a naval ship rescued another 278 citizens from the strife-torn country. The total number of Indians evacuated so far from Sudan stands at around 530, according to official data. The first batch of 278 Indians was evacuated by Indian Navy's frontline ship INS Sumedha on Tuesday. Apart from the Sudanese authorities, the MEA and the Indian embassy in Sudan have been in regular touch with the UN, Saudi Arabia, the UAE, Egypt, and the US among others. At a high-level meeting on Friday last, Prime Minister Narendra Modi issued directions for the preparation of contingency plans to evacuate Indians from Sudan. (With PTI inputs)
english
<reponame>poulosar/nfl-database<filename>profile_data/A-B/1167_Khalif-Barnes.json {"player_id": 1167, "name": "<NAME>", "position": "T", "height": "6-6", "weight": "320", "current_team": null, "birth_date": "1982-04-21", "birth_place": "San Diego, CA", "death_date": null, "college": "Washington", "high_school": "Mount Miguel, CA", "draft_team": "Jacksonville Jaguars", "draft_round": "2", "draft_position": "52", "draft_year": "2005", "current_salary": null, "hof_induction_year": null}
json
<filename>oda-connectors/dnp3/src/test/java/es/amplia/oda/connector/dnp3/DNP3ConnectorTest.java package es.amplia.oda.connector.dnp3; import es.amplia.oda.core.commons.interfaces.ScadaConnector; import es.amplia.oda.core.commons.interfaces.ScadaDispatcher; import es.amplia.oda.core.commons.osgi.proxies.ScadaTableInfoProxy; import es.amplia.oda.core.commons.utils.ServiceRegistrationManager; import es.amplia.oda.connector.dnp3.configuration.DNP3ConnectorConfiguration; import com.automatak.dnp3.*; import com.automatak.dnp3.impl.DNP3ManagerFactory; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.internal.util.reflection.Whitebox; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(PowerMockRunner.class) @PrepareForTest({ DNP3Connector.class, DNP3ManagerFactory.class }) public class DNP3ConnectorTest { // Do not load the opendnp3 native libs static { System.setProperty("com.automatak.dnp3.nostaticload", ""); } private static final String TEST_CHANNEL_ID = "testChannel"; private static final String TEST_OUTSTATION_ID = "testOutstation"; private static final String TEST_IP_ADDRESS = "0.0.0.0"; private static final int TEST_IP_PORT = 20000; private static final int TEST_LOCAL_DEVICE_DNP_ADDRESS = 1; private static final int TEST_REMOTE_DEVICE_DNP_ADDRESS = 2; private static final int TEST_EVENT_BUFFER_SIZE = 10; private static final int TEST_LOG_LEVEL = 0; private static final byte TEST_DATA_QUALITY = 0x01; private static final int TEST_INDEX = 1; private static final long TEST_TIMESTAMP = System.currentTimeMillis(); private static final String MANAGER_FIELD_NAME = "manager"; private static final String CHANNEL_LISTENER_FIELD_NAME = "channelListener"; private static final String CHANNEL_FIELD_NAME = "channel"; private static final String OUTSTATION_FIELD_NAME = "outstation"; @Mock private ScadaTableInfoProxy mockedTableInfo; @Mock private ScadaDispatcher mockedDispatcher; @Mock private ServiceRegistrationManager<ScadaConnector> mockedScadaConnectorRegistrationManager; private DNP3Connector testConnector; @Mock private DNP3LogHandler mockedLogHandler; @Mock private DNP3Manager mockedManager; @Mock private DNP3ChannelListener mockedListener; @Mock private Channel mockedChannel; @Mock private ScadaCommandHandler mockedCommandHandler; @Mock private Outstation mockedOutstation; @Before public void setUp() throws Exception { PowerMockito.whenNew(DNP3LogHandler.class).withAnyArguments().thenReturn(mockedLogHandler); PowerMockito.mockStatic(DNP3ManagerFactory.class); PowerMockito.when(DNP3ManagerFactory.createManager(any(LogHandler.class))).thenReturn(mockedManager); testConnector = new DNP3Connector(mockedTableInfo, mockedDispatcher, mockedScadaConnectorRegistrationManager); } @Test public void testConstructor() throws Exception { assertNotNull(testConnector); PowerMockito.verifyNew(DNP3LogHandler.class).withNoArguments(); PowerMockito.verifyStatic(DNP3ManagerFactory.class); DNP3ManagerFactory.createManager(eq(mockedLogHandler)); } @Test public void testLoadConfiguration() throws Exception { DNP3ConnectorConfiguration testConfiguration = DNP3ConnectorConfiguration.builder() .channelIdentifier(TEST_CHANNEL_ID) .outstationIdentifier(TEST_OUTSTATION_ID) .ipAddress(TEST_IP_ADDRESS) .ipPort(TEST_IP_PORT) .localDeviceDNP3Address(TEST_LOCAL_DEVICE_DNP_ADDRESS) .remoteDeviceDNP3Address(TEST_REMOTE_DEVICE_DNP_ADDRESS) .eventBufferSize(TEST_EVENT_BUFFER_SIZE) .logLevel(TEST_LOG_LEVEL) .build(); Whitebox.setInternalState(testConnector, MANAGER_FIELD_NAME, mockedManager); PowerMockito.whenNew(DNP3ChannelListener.class).withAnyArguments().thenReturn(mockedListener); when(mockedManager.addTCPServer(anyString(), anyInt(), any(ChannelRetry.class), anyString(), anyInt(), any(ChannelListener.class))).thenReturn(mockedChannel); when(mockedChannel.addOutstation(anyString(), any(CommandHandler.class), any(OutstationApplication.class), any(OutstationStackConfig.class))).thenReturn(mockedOutstation); testConnector.loadConfiguration(testConfiguration); PowerMockito.verifyNew(DNP3ChannelListener.class).withNoArguments(); verify(mockedManager).addTCPServer(eq(TEST_CHANNEL_ID), eq(TEST_LOG_LEVEL), any(ChannelRetry.class), eq(TEST_IP_ADDRESS), eq(TEST_IP_PORT), eq(mockedListener)); verify(mockedTableInfo).getNumBinaryInputs(); verify(mockedTableInfo).getNumDoubleBinaryInputs(); verify(mockedTableInfo).getNumAnalogInputs(); verify(mockedTableInfo).getNumCounters(); verify(mockedTableInfo).getNumFrozenCounters(); verify(mockedTableInfo).getNumBinaryOutputs(); verify(mockedTableInfo).getNumAnalogOutputs(); assertNotNull(Whitebox.getInternalState(testConnector, "outstationStackConfig")); } @Test public void testLoadConfigurationAlreadyLoadedConfiguration() throws Exception { DNP3ConnectorConfiguration testConfiguration = DNP3ConnectorConfiguration.builder() .channelIdentifier(TEST_CHANNEL_ID) .outstationIdentifier(TEST_OUTSTATION_ID) .ipAddress(TEST_IP_ADDRESS) .ipPort(TEST_IP_PORT) .localDeviceDNP3Address(TEST_LOCAL_DEVICE_DNP_ADDRESS) .remoteDeviceDNP3Address(TEST_REMOTE_DEVICE_DNP_ADDRESS) .eventBufferSize(TEST_EVENT_BUFFER_SIZE) .logLevel(TEST_LOG_LEVEL) .build(); Channel oldMockedChannel = mock(Channel.class); Outstation oldMockedOutstation = mock(Outstation.class); Whitebox.setInternalState(testConnector, MANAGER_FIELD_NAME, mockedManager); Whitebox.setInternalState(testConnector, CHANNEL_FIELD_NAME, oldMockedChannel); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, oldMockedOutstation); PowerMockito.whenNew(DNP3ChannelListener.class).withAnyArguments().thenReturn(mockedListener); when(mockedManager.addTCPServer(anyString(), anyInt(), any(ChannelRetry.class), anyString(), anyInt(), any(ChannelListener.class))).thenReturn(mockedChannel); PowerMockito.whenNew(ScadaCommandHandler.class).withAnyArguments().thenReturn(mockedCommandHandler); when(mockedChannel.addOutstation(anyString(), any(CommandHandler.class), any(OutstationApplication.class), any(OutstationStackConfig.class))).thenReturn(mockedOutstation); testConnector.loadConfiguration(testConfiguration); testConnector.init(); verify(mockedScadaConnectorRegistrationManager).unregister(); verify(oldMockedOutstation).shutdown(); verify(oldMockedChannel).shutdown(); PowerMockito.verifyNew(DNP3ChannelListener.class).withNoArguments(); verify(mockedManager).addTCPServer(eq(TEST_CHANNEL_ID), eq(TEST_LOG_LEVEL), any(ChannelRetry.class), eq(TEST_IP_ADDRESS), eq(TEST_IP_PORT), any(ChannelListener.class)); } @Test public void testInit() throws Exception { OutstationStackConfig mockedOutstationStackConfig = mock(OutstationStackConfig.class); Whitebox.setInternalState(testConnector, CHANNEL_FIELD_NAME, mockedChannel); Whitebox.setInternalState(testConnector, "outstationIdentifier", TEST_OUTSTATION_ID); Whitebox.setInternalState(testConnector, "outstationStackConfig", mockedOutstationStackConfig); PowerMockito.whenNew(ScadaCommandHandler.class).withAnyArguments().thenReturn(mockedCommandHandler); when(mockedChannel.addOutstation(anyString(), any(CommandHandler.class), any(OutstationApplication.class), any(OutstationStackConfig.class))).thenReturn(mockedOutstation); testConnector.init(); PowerMockito.verifyNew(ScadaCommandHandler.class).withArguments(eq(mockedDispatcher)); verify(mockedChannel).addOutstation(eq(TEST_OUTSTATION_ID), eq(mockedCommandHandler), any(OutstationApplication.class), eq(mockedOutstationStackConfig)); verify(mockedOutstation).enable(); verify(mockedScadaConnectorRegistrationManager).register(eq(testConnector)); } @Test @SuppressWarnings("ConstantConditions") public void testUplinkBoolean() throws Exception { boolean testValue = true; String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); BinaryInput mockedBinaryInput = mock(BinaryInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(BinaryInput.class).withAnyArguments().thenReturn(mockedBinaryInput); testConnector.uplink(TEST_INDEX, testValue, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(BinaryInput.class).withArguments(eq(testValue), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedBinaryInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkTrueAsString() throws Exception { String testValue = Boolean.TRUE.toString(); String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); BinaryInput mockedBinaryInput = mock(BinaryInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(BinaryInput.class).withAnyArguments().thenReturn(mockedBinaryInput); testConnector.uplink(TEST_INDEX, testValue, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(BinaryInput.class).withArguments(eq(Boolean.TRUE), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedBinaryInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkFalseAsString() throws Exception { String testValue = Boolean.FALSE.toString(); String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); BinaryInput mockedBinaryInput = mock(BinaryInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(BinaryInput.class).withAnyArguments().thenReturn(mockedBinaryInput); testConnector.uplink(TEST_INDEX, testValue, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(BinaryInput.class).withArguments(eq(Boolean.FALSE), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedBinaryInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkInt() throws Exception { int testValue = 1; String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); AnalogInput mockedAnalogInput = mock(AnalogInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(AnalogInput.class).withAnyArguments().thenReturn(mockedAnalogInput); testConnector.uplink(TEST_INDEX, testValue, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(AnalogInput.class) .withArguments(eq((double) testValue), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedAnalogInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkDouble() throws Exception { double testValue = 18.50; String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); AnalogInput mockedAnalogInput = mock(AnalogInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(AnalogInput.class).withAnyArguments().thenReturn(mockedAnalogInput); testConnector.uplink(TEST_INDEX, testValue, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(AnalogInput.class).withArguments(eq(testValue), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedAnalogInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkDoubleAsString() throws Exception { double testValue = 18.50; String testValueAsString = Double.toString(testValue); String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); AnalogInput mockedAnalogInput = mock(AnalogInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(AnalogInput.class).withAnyArguments().thenReturn(mockedAnalogInput); testConnector.uplink(TEST_INDEX, testValueAsString, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); PowerMockito.verifyNew(OutstationChangeSet.class).withNoArguments(); PowerMockito.verifyNew(AnalogInput.class).withArguments(eq(testValue), eq(TEST_DATA_QUALITY), eq(TEST_TIMESTAMP)); mockedChangeSet.update(eq(mockedAnalogInput), eq(TEST_INDEX)); mockedOutstation.apply(eq(mockedChangeSet)); } @Test public void testUplinkDoubleAsStringNotValid() throws Exception { String testValueAsString = "18.50la"; String testType = "type"; OutstationChangeSet mockedChangeSet = mock(OutstationChangeSet.class); AnalogInput mockedAnalogInput = mock(AnalogInput.class); Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); PowerMockito.whenNew(OutstationChangeSet.class).withNoArguments().thenReturn(mockedChangeSet); PowerMockito.whenNew(AnalogInput.class).withAnyArguments().thenReturn(mockedAnalogInput); testConnector.uplink(TEST_INDEX, testValueAsString, testType, TEST_TIMESTAMP); verify(mockedListener).isOpen(); verify(mockedOutstation, never()).apply(any(ChangeSet.class)); } @Test public void testUplinkConnectorNotConfigured() { Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); when(mockedListener.isOpen()).thenReturn(false); testConnector.uplink(TEST_INDEX, 1, "type", TEST_TIMESTAMP); verify(mockedListener).isOpen(); verify(mockedOutstation, never()).apply(any(ChangeSet.class)); } @Test public void testUplinkNotScadaData() { Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); when(mockedListener.isOpen()).thenReturn(true); testConnector.uplink(TEST_INDEX, new Object(), new Object(), TEST_TIMESTAMP); verify(mockedListener).isOpen(); verify(mockedOutstation, never()).apply(any(ChangeSet.class)); } @Test public void testIsConnected() { Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); when(mockedListener.isOpen()).thenReturn(true); boolean connected = testConnector.isConnected(); assertTrue(connected); } @Test public void testIsConnectedNullDnpChannelListener() { Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, null); boolean connected = testConnector.isConnected(); assertFalse(connected); } @Test public void testIsConnectedNoOpenChannelState() { Whitebox.setInternalState(testConnector, CHANNEL_LISTENER_FIELD_NAME, mockedListener); when(mockedListener.isOpen()).thenReturn(false); boolean connected = testConnector.isConnected(); assertFalse(connected); } @Test public void testClose() { Whitebox.setInternalState(testConnector, MANAGER_FIELD_NAME, mockedManager); Whitebox.setInternalState(testConnector, CHANNEL_FIELD_NAME, mockedChannel); Whitebox.setInternalState(testConnector, OUTSTATION_FIELD_NAME, mockedOutstation); testConnector.close(); verify(mockedScadaConnectorRegistrationManager).unregister(); verify(mockedOutstation).shutdown(); verify(mockedChannel).shutdown(); verify(mockedManager).shutdown(); } @Test public void testCloseNotConfigurationLoaded() { Whitebox.setInternalState(testConnector, MANAGER_FIELD_NAME, mockedManager); testConnector.close(); verify(mockedManager).shutdown(); } }
java
<filename>sampledata.json {"timestamp":"2018-10-19T05:05:06.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id2","trace.parent_id":"id1"} {"timestamp":"2018-10-19T05:05:07.123Z","method":"GET","endpoint":"/baz","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id3","trace.parent_id":"id2"} {"timestamp":"2018-10-19T05:05:06.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id4","trace.parent_id":"id1"} {"timestamp":"2018-10-19T05:05:07.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id5","trace.parent_id":"id4"} {"timestamp":"2018-10-19T05:05:08.123Z","method":"GET","endpoint":"/bar","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id6","trace.parent_id":"id5"} {"timestamp":"2018-10-19T05:05:08.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id7","trace.parent_id":"id1"} {"timestamp":"2018-10-19T05:05:08.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id8","trace.parent_id":"id7"} {"timestamp":"2018-10-19T05:05:08.123Z","method":"GET","endpoint":"/bar","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id9","trace.parent_id":"id7"} {"timestamp":"2018-10-19T05:05:06.123Z","method":"GET","endpoint":"/one","shard":"users","duration_ms":32,"trace.trace_id":"eyedee","trace.span_id":"id1"}
json
<gh_stars>1-10 { "name": "parcel-plugin-stringify-anything", "version": "1.2.0", "description": "Parcel plugin for importing any file types as string", "main": "./src/index.js", "scripts": { "prettier": "prettier '@(src|demo)/**/*.@(js|html|json)' --write", "test": "echo \"Error: no test specified\" && exit 1" }, "repository": { "type": "git", "url": "git+https://github.com/LeetCode-OpenSource/parcel-plugin-stringify-anything.git" }, "keywords": [ "parcel", "parcel-bundler", "parcel-plugin", "stringify", "string" ], "bugs": { "url": "https://github.com/LeetCode-OpenSource/parcel-plugin-stringify-anything/issues" }, "homepage": "https://github.com/LeetCode-OpenSource/parcel-plugin-stringify-anything#readme", "author": "LeetCode front-end team", "license": "MIT", "husky": { "hooks": { "pre-commit": "lint-staged" } }, "lint-staged": { "*.{js|html|json}": [ "prettier --write", "git add" ] }, "devDependencies": { "husky": "^4.0.0", "lint-staged": "^10.0.1", "parcel-bundler": "^1.10.3", "prettier": "2.2.1" }, "peerDependencies": { "parcel-bundler": "^1.10.3" } }
json
153 Written Answers CHAITRA 6, 1895 (SAKA) (b) whether it will affect the power supply in Delhi; (c) how much power is supplied by D.E.S.U. to Haryana; and (d) the additional quantity which D.E.S. U. has decided to supply to Haryana ? THE DEPUTY MINISTER IN THE MINISTRY OF IRRIGATION AND POWER (SHRI BALGOVIND VERMA): (a) As per the agreement with the Haryana State Electricity Board, surplus power available with DESU has been agreed to be supplied to Haryana. (b) This will not affect power supply position in Delhi. (c) and (d). Haryana has one-third share of total generation from 3×62 5 MW units numbers 2, 3 & 4. at Indraprastha Extension Station of DESU. About 10 lakh units per day over and above Haryana's share are being supplied by DFSU to Haryana at present. Memorandum submitted by North Eastern Railway Mazdoor Union to General Manager, North Eastern Railway at Narkatia Ganj 4858. SHRI BHOGENDRA JHA : Will the Minister of RAIL WAYS be pleased to state: (a) whether Railway employees of Samastipur Division had staged a demonstration on 12th January, 1973 at Narkatiaganj and a memorandum was submitted to the General Manager, North Eastern Railway camping at Narkatiaganj, by the North Eastern Railway Mazdoor Union; and (b) if so, the contents of the memorandum and Government reaction thereto? THE DEPUTY MINISTER IN THE MINISTRY OF RAILWAYS (SHRI MOHD. SHAFI QURESHI) : (a) No. (b) Does not arise. Written Answers 154 Construction of sluice gates-comBridges on river Khiroi in Bihar 4859. SHRI BHOGFNDRA JHA: Will the Minister of IRRIGATION AND POWER be pleased to refer to the reply given to Unstarred Question No. 1146 on the 27th February, 1973 regarding construction of Shuice Gates-cum-Bridges on River Khiroi in Bihar and state the estimated expenditure and time schedule for the completion of sluice-gates-cum-bridges on River Khiroi in District Darbhanga (Bihar) ? THE DEPUTY MINISTER IN THE MINISTRY OF IRRIGATION AND POWER (SHRI BALGOVIND VERMA): The State Government of Bihar have reported that further hydrological observations are to be carried out during 1973 monsoon for the preparation of the scheme of Sluice-gates-cum-bridges on the river Khiroi in Darbhanga District. The estimated cost and schedule of construction will be available only after the scheme is finalised. Mass Deputation and Memorandum submitted to Railway Minister at Darbhanga by North Eastern Railway Mazdoor Union 4860. SHRI BHOGENDRA JHA: Will the Minister of RAILWAYS be pleased to state : (a) whether North Eastern Railway Mazdoor union had waited upon the Railway Minister in a mass deputation on the 24th February, 1973 at Darbhanga and submitted a memorandum to him; and (b) if so, contents of the memorandum and action taken thereon ? THE DEPUTY MINISTER IN THE MINISTRY OF RAILWAYS (SHRI MOHD. SHAFI QURESHI): (a) The Divisional Secretary of the N.E. Railway Written Answers Mazdoor union, Samstipur Division and the N.E. Railway Mazdoor Union Branch at Darbhanga each submitted a memoran dum to the Minister for Railways on 24-2-73 at Darbhanga. (b) The main demands contained in the memoranda are given in the attached statement. Many of the demands are of a general nature and do not specifically pertain to the Railways. The remaining demands are such as are generally settled through the Negotiating Machinery and Joint Consultative Machinery provided for the purpose. I. Demands as contained in the memorandum submitted by the Samastipur Divisional Branch of N.E. Railway Mazdoor Union. 1. Grant of Bonus to Railwaymen. 2. Payment of need based minimum wages. 3. Early publication of Third Pay commissions' Report. 4. No reduction in the retirement age. 5. Continued payment of House Rent Allowance to the staff transferred from Goarkhpur to Samastipur consequent on Divisionalisation of N.E. Railway with effect from 1-5-1969. II. Demands as contained in the memorandum submitted by the Darbhanga Branch of N.E. Railway Mazdoor Union. 1. Publication of Pay commissions' Report and payment of need based minimum wage. 2. Payment of Bonus to Railway employees @ 8.33%. 3. Revaluation of the Provident Fund Deposit on the basis of present -day value. Written Answers 156 4. Availability of essential commodities at subsidized rates to neutralize, the soaring price rise. 5. Provision of ration cards to all tailway employees irrespective of rural or úrtan region and supply of food grains at cheap rates. 6. Absorption of Casual Labour in Class IV and abolition of Casual Labour system. 7. Closure of Railway Hospitals and payment of medical allowance in lieu. 8. Timely supply of uniforms on the basis of the system prevalent prior to the Chinese and Pakistani aggressions. 9. Implementation of Mia Bhoy Tribunal's decisions. 10. Reduction of duty hours of Running Staff from 14 hours to 8 hours and provision of Vans for transportation of staff of goods trains of the 2nd shift from Darbhanga to Nirmali and Jaynagar. 11. Reduction in duty hours from 12 hours to 8 hours of some staff of Traffic, Engineering and Loco Departments like Pointsmen, Cabinmen, Box Porters etc. 12. Fixation of working hours of Open Line Subordinate Offices from 10.00 AM instead of 7.00 AM and reduction in working hours from 8 hours to 6 hours or payment of special allowance to the employees concerned. 13. Construction of quarter for all Gangmen. 14. Provision of Hand Pumps at all manned level crossing gates. 15. 20 % up gradation of skilled categories of Loco, Carriage, Electric and 'Workshop staff in accordance with Shankar Saran's Award. 16. Solution of problems arising out of the transfer of Signalling Staff of Gófákhpur and Marine Staff of Barari. CHAITRA 6, 1895 (SAKA) Written Answers 158 Delhi and Ghaziabad. Further doubling of 17.80 km. between Ghaziabad and Muradnagar is under consideration. 157 Written Answers 17. Payment of Washing Allowance to employees of medical department on monthly basis. 18. Reservation of seats in Lok Sabha and State Assemblies for the representatives of Railway employees on numerical 19. Construction of 300 quarters for railwav employees at Darbhanga. 20. Provision of a Middle School for children of over 1000 Railway employees. 21. Provision of cooperative canteen 22. Provision of a cycle stand in local offices like Loco Sheds, Stations, etc. 23. Provision of Hand Pumps in Railwav colonies for supplementing the shortage of water in emergencies. 24. Provision of a proper drainage system in Eastern Railway colony of Darbhanga. 25. Provision of accommodation for Cooperative Society at Darbhanga. 26. To make grants and construction of a Theatre for the development of Recreational Centre at Darbhanga. 27. Construction of a wall to prevent theft of coal from Loco Shed and Yard. 28. Arrangement for the payment of the arrears of dues of Railway employees. 29. Increase in staff in Electrical Department according to actual requirement. Double Railway Track Between Delhi and Meerut 4861. SHRI RAM PRAKASH : Will the Minister of RAILWAYS be pleased to (a) whether double Rail track is planned between Delhi and Meerut ; and (b) if so, the time by which the project would be completed ? THE DEPUTY MINISTER IN THE MINISTRY OF RAILWAYS (SHRI MOHD. SHAFI QURESHI): (a) and (b). Double line is already available between Use of Electricity in Government Offices 4862. SHRI DHARAMRAO AFZALPURKAR : Will the Minister of IRRIGATION AND POWER be pleased to state the steps Government have taken to reduce the use of electricity in the Central Government Offices in view of the shortage of electricity ? THE DEPUTY MINISTER IN THE MINISTRY OF IRRIGATION AND POWER (SHRI BALGOVIND VERMA): Instructions have been issued to all the Ministries and Departments of the Government of India that steps be taken in offices under their control to economise in the use of power to the maximum extent possible, by using fewer lights, fans, Air conditioner and other electrical appliances till such time as the power supply position improves. Railway Line from Tellicherry to Mysore 4863. SHRI C. K. CHANDRAPPAN : Will the Minister of RAILWAYS be pleased to state : (a) whether Government have conducted a survey for the construction of a Railway line from Tellicherry to Mysore via Coorg; (b) if so, the outcome thereof and the estimated expenditure thereon ; (c) whether Government received any representation that the construction of this line should be taken up soon; and (d) If so, the decision thereon ?
english
<filename>.expo/web/cache/development/babel-loader/ad50770f655835664e590af7553385c8.json {"ast":null,"code":"var _this = this,\n _jsxFileName = \"/Users/sumedhreddy/Documents/stealth/stealth-startup/src/components/PrimaryButton.tsx\";\n\nimport React from \"react\";\nimport StyleSheet from \"react-native-web/dist/exports/StyleSheet\";\nimport TouchableOpacity from \"react-native-web/dist/exports/TouchableOpacity\";\nimport { Text } from \"./Text\";\nimport { useTheme } from \"../theme\";\nexport var PrimaryButton = function PrimaryButton(_ref) {\n var title = _ref.title,\n onPress = _ref.onPress,\n style = _ref.style;\n\n var _useTheme = useTheme(),\n colors = _useTheme.colors,\n sizes = _useTheme.sizes;\n\n return React.createElement(TouchableOpacity, {\n onPress: onPress,\n style: [styles.container, {\n backgroundColor: colors.primaryColor,\n borderRadius: sizes.boxRadius\n }, style],\n __self: _this,\n __source: {\n fileName: _jsxFileName,\n lineNumber: 20,\n columnNumber: 5\n }\n }, React.createElement(Text, {\n style: styles.text,\n __self: _this,\n __source: {\n fileName: _jsxFileName,\n lineNumber: 31,\n columnNumber: 7\n }\n }, title));\n};\nvar styles = StyleSheet.create({\n container: {\n paddingVertical: 15,\n alignItems: \"center\",\n borderColor: \"white\",\n borderWidth: 0,\n shadowColor: \"#00000020\",\n shadowOpacity: 1,\n shadowRadius: 4,\n elevation: 4,\n shadowOffset: {\n width: 0,\n height: 4\n }\n },\n text: {\n color: \"white\",\n fontSize: 16,\n fontFamily: \"default-medium\"\n }\n});","map":{"version":3,"sources":["/Users/sumedhreddy/Documents/stealth/stealth-startup/src/components/PrimaryButton.tsx"],"names":["React","Text","useTheme","PrimaryButton","title","onPress","style","colors","sizes","styles","container","backgroundColor","primaryColor","borderRadius","boxRadius","text","StyleSheet","create","paddingVertical","alignItems","borderColor","borderWidth","shadowColor","shadowOpacity","shadowRadius","elevation","shadowOffset","width","height","color","fontSize","fontFamily"],"mappings":";;;AAAA,OAAOA,KAAP,MAAkB,OAAlB;;;AAOA,SAASC,IAAT;AACA,SAASC,QAAT;AAQA,OAAO,IAAMC,aAA+B,GAAG,SAAlCA,aAAkC,OAA+B;AAAA,MAA5BC,KAA4B,QAA5BA,KAA4B;AAAA,MAArBC,OAAqB,QAArBA,OAAqB;AAAA,MAAZC,KAAY,QAAZA,KAAY;;AAC5E,kBAA0BJ,QAAQ,EAAlC;AAAA,MAAQK,MAAR,aAAQA,MAAR;AAAA,MAAgBC,KAAhB,aAAgBA,KAAhB;;AACA,SACE,oBAAC,gBAAD;AACE,IAAA,OAAO,EAAEH,OADX;AAEE,IAAA,KAAK,EAAE,CACLI,MAAM,CAACC,SADF,EAEL;AACEC,MAAAA,eAAe,EAAEJ,MAAM,CAACK,YAD1B;AAEEC,MAAAA,YAAY,EAAEL,KAAK,CAACM;AAFtB,KAFK,EAMLR,KANK,CAFT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAWE,oBAAC,IAAD;AAAM,IAAA,KAAK,EAAEG,MAAM,CAACM,IAApB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAA2BX,KAA3B,CAXF,CADF;AAeD,CAjBM;AAmBP,IAAMK,MAAM,GAAGO,UAAU,CAACC,MAAX,CAAkB;AAC/BP,EAAAA,SAAS,EAAE;AACTQ,IAAAA,eAAe,EAAE,EADR;AAETC,IAAAA,UAAU,EAAE,QAFH;AAGTC,IAAAA,WAAW,EAAE,OAHJ;AAITC,IAAAA,WAAW,EAAE,CAJJ;AAKTC,IAAAA,WAAW,EAAE,WALJ;AAMTC,IAAAA,aAAa,EAAE,CANN;AAOTC,IAAAA,YAAY,EAAE,CAPL;AAQTC,IAAAA,SAAS,EAAE,CARF;AASTC,IAAAA,YAAY,EAAE;AACZC,MAAAA,KAAK,EAAE,CADK;AAEZC,MAAAA,MAAM,EAAE;AAFI;AATL,GADoB;AAe/Bb,EAAAA,IAAI,EAAE;AAAEc,IAAAA,KAAK,EAAE,OAAT;AAAkBC,IAAAA,QAAQ,EAAE,EAA5B;AAAgCC,IAAAA,UAAU,EAAE;AAA5C;AAfyB,CAAlB,CAAf","sourcesContent":["import React from \"react\";\nimport {\n StyleSheet,\n TouchableOpacity,\n StyleProp,\n ViewStyle,\n} from \"react-native\";\nimport { Text } from \"./Text\";\nimport { useTheme } from \"../theme\";\n\ntype TProps = {\n title: string;\n onPress?: () => void;\n style?: StyleProp<ViewStyle>;\n};\n\nexport const PrimaryButton: React.FC<TProps> = ({ title, onPress, style }) => {\n const { colors, sizes } = useTheme();\n return (\n <TouchableOpacity\n onPress={onPress}\n style={[\n styles.container,\n {\n backgroundColor: colors.primaryColor,\n borderRadius: sizes.boxRadius,\n },\n style,\n ]}\n >\n <Text style={styles.text}>{title}</Text>\n </TouchableOpacity>\n );\n};\n\nconst styles = StyleSheet.create({\n container: {\n paddingVertical: 15,\n alignItems: \"center\",\n borderColor: \"white\",\n borderWidth: 0,\n shadowColor: \"#00000020\",\n shadowOpacity: 1,\n shadowRadius: 4,\n elevation: 4,\n shadowOffset: {\n width: 0,\n height: 4,\n },\n },\n text: { color: \"white\", fontSize: 16, fontFamily: \"default-medium\" },\n});\n"]},"metadata":{},"sourceType":"module"}
json
import React, {Component, useContext, useEffect, useRef, useState} from 'react' import axios from "axios"; import Button from "react-bootstrap/Button"; import ButtonGroup from "react-bootstrap/ButtonGroup"; import 'bootstrap/dist/css/bootstrap.min.css'; import {Container,Row,Col} from "react-bootstrap"; import './buttons.css'; import './first_row.css'; import FormatAlignLeftIcon from '@material-ui/icons/FormatAlignLeft'; import FormatAlignCenterIcon from '@material-ui/icons/FormatAlignCenter'; import FormatAlignRightIcon from '@material-ui/icons/FormatAlignRight'; import FormatAlignJustifyIcon from '@material-ui/icons/FormatAlignJustify'; import ToggleButton from '@material-ui/lab/ToggleButton'; import ToggleButtonGroup from '@material-ui/lab/ToggleButtonGroup'; import {AppContext} from "../../App"; import {faFileAlt,faRobot,faUser,faUserFriends,faUserEdit} from "@fortawesome/free-solid-svg-icons"; import {FontAwesomeIcon} from "@fortawesome/react-fontawesome"; function ChangeMemberGT(props){ const { fieldsToAnn,userchosen,finalcount,username,showmember,showmajority,reached,showautoannotation,reportString,fields,annotation,report,usecase,concepts,semanticArea, disButton,labelsToInsert, selectedconcepts,linkingConcepts, radio, checks,save, userLabels, labelsList, mentionsList, action, reports, index, mentionSingleWord, allMentions, tokens, associations } = useContext(AppContext); const [associations_to_show,SetAssociations_to_show] = associations; const [labels, setLabels] = labelsList const [Checks, setChecks] = checks; const [Fields,SetFields] = fields; const [FieldsToAnn,SetFieldsToAnn] = fieldsToAnn; const [SavedGT,SetSavedGT] = save; const [LabToInsert,SetLabToInsert] = labelsToInsert; const [Annotation,SetAnnotation] = annotation const [UseCase,SetUseCase] = usecase; const [reportsString, setReportsString] = reportString; const [FinalCount, SetFinalCount] = finalcount; const [FinalCountReached, SetFinalCountReached] = reached; const [ShowAutoAnn,SetShowAutoAnn] = showautoannotation; const [ShowMemberGt,SetShowMemberGt] =showmember const [ShowMajorityGt,SetShowMajorityGt] = showmajority const [Disable_Buttons, SetDisable_Buttons] = disButton; const [labels_to_show, setLabels_to_show] = userLabels; const [RadioChecked, SetRadioChecked] = radio; const [selectedConcepts, setSelectedConcepts] = selectedconcepts; const [Children,SetChildren] = tokens; const [mentions_to_show,SetMentions_to_show] = mentionsList; const [WordMention, SetWordMention] = mentionSingleWord; const [Report, setReport] = report; const [AllMentions, SetAllMentions] = allMentions; const [Reports, setReports] = reports; const [Index, setIndex] = index; const [UserLabels, SetUserLables] = userLabels; const [Action, SetAction] = action; const [Disabled,SetDisabled] = useState(true); //PER CLEAR const [ExaRobot,SetExaRobot] = useState(false) const [Concepts, SetConcepts] = concepts; const [ChangeButton,SetChangeButton] = useState(false) const [Username,SetUsername] = username const [SemanticArea, SetSemanticArea] = semanticArea; const [UserChosen,SetUserChosen] = userchosen const but1 = useRef(null) const but2 = useRef() const but3 = useRef() function order_array(mentions){ var ordered = [] var texts = [] mentions.map((item,i)=>{ texts.push(item.mention_text) }) texts.sort() texts.map((start,ind)=>{ mentions.map((ment,ind1)=>{ if(start === ment.mention_text){ if(ordered.indexOf(ment) === -1){ ordered.push(ment) } } }) }) return ordered } useEffect(()=>{ but1.current.focus() but1.current.className = 'btn btn-primary btn-sm' but2.current.className = 'btn btn-outline-primary btn-sm' but3.current.className = 'btn btn-outline-primary btn-sm' SetChangeButton(false) },[Index,Action]) function UserGT(){ but1.current.className = 'btn btn-primary btn-sm' but2.current.className = 'btn btn-outline-primary btn-sm' but3.current.className = 'btn btn-outline-primary btn-sm' SetShowAutoAnn(false) SetShowMemberGt(false) SetChangeButton(false) } function RobotGT(){ but1.current.className = 'btn btn-outline-primary btn-sm' but2.current.className = 'btn btn-primary btn-sm' but3.current.className = 'btn btn-outline-primary btn-sm' SetShowAutoAnn(true) SetShowMemberGt(false) SetChangeButton(false) } function MemberGT(){ but1.current.className = 'btn btn-outline-primary btn-sm' but3.current.className = 'btn btn-primary btn-sm' but2.current.className = 'btn btn-outline-primary btn-sm' SetShowMemberGt(true) SetShowAutoAnn(false) SetChangeButton(false) } useEffect(()=>{ var username_to_call = Username if (Annotation === 'Automatic'){ var ns_id = 'Robot' } else{ var ns_id = 'Human' } if(ShowAutoAnn){ username_to_call = Username ns_id = 'Robot' } else if(ShowMemberGt){ if(UserChosen.endsWith('_auto')){ username_to_call = UserChosen.substring(0,UserChosen.length - 5) ns_id = 'Robot' } else if (UserChosen.endsWith('_manual')){ username_to_call = UserChosen.substring(0,UserChosen.length - 7) ns_id = 'Human' } } axios.get("http://127.0.0.1:8000/report_start_end", {params: {ns_id:ns_id,report_id: Reports[Index].id_report.toString()}}).then(response => { SetFinalCount(response.data['final_count']);SetFinalCountReached(false);SetChangeButton(true) }) axios.get("http://127.0.0.1:8000/get_fields",{params:{ns_id:ns_id}}).then(response => {SetFields(response.data['fields']);SetFieldsToAnn(response.data['fields_to_ann']);}) if(Action === 'labels'){ axios.get("http://127.0.0.1:8000/annotationlabel/all_labels",{params:{ns_id:ns_id}}).then(response => {setLabels(response.data['labels'])}) axios.get("http://127.0.0.1:8000/annotationlabel/user_labels", {params: {ns_id:ns_id,username:username_to_call,report_id: Reports[Index].id_report.toString()}}).then(response => { setLabels_to_show(response.data[Action.toString()]); }) } else if(Action === 'concepts'){ axios.get("http://127.0.0.1:8000/get_semantic_area",{params: {ns_id:ns_id}}).then(response => SetSemanticArea(response.data['area'])) axios.get("http://127.0.0.1:8000/conc_view",{params: {ns_id:ns_id}}).then(response => {SetConcepts(response.data['concepts'])}) axios.get("http://127.0.0.1:8000/contains", {params: {ns_id:ns_id,username:username_to_call,report_id: Reports[Index].id_report.toString()}}).then(response => {setSelectedConcepts(response.data);}) } },[ShowAutoAnn,ShowMemberGt]) useEffect(()=>{ var labels = Array.from(document.getElementsByName('labels')) // var tokens = Array.from(document.getElementsByClassName('token')) // var tokens_not = Array.from(document.getElementsByClassName('notSelected')) // var concept_list = document.getElementById('concept_list_id') // var sem_area_list = document.getElementById('semanticAreaSelect') if(ShowMemberGt === true || ShowAutoAnn === true){ console.log('è true') console.log(reportString) //Not modifiable labels.map((val,i)=>{ val.setAttribute('disabled',true) }) } else{ if(Action === 'labels'){ labels.map((val,i)=>{ val.removeAttribute('disabled') }) } } },[ShowAutoAnn,ShowMemberGt]) useEffect(()=>{ if((ShowAutoAnn === true || ShowMemberGt === true) && ChangeButton === true){ if(Action === 'mentions'){ axios.get("http://127.0.0.1:8000/mention_insertion", {params: {ns_id:'Robot',report_id: Reports[Index].id_report.toString()}}).then(response => { var mentions = (response.data[Action.toString()]) var ordered = order_array(mentions) SetMentions_to_show(ordered); }) } else if(Action === 'concept-mention'){ axios.get("http://127.0.0.1:8000/insert_link/linked", {params: {ns_id:'Robot',report_id: Reports[Index].id_report.toString()}}).then(response => { SetAssociations_to_show(response.data['associations']); }) axios.get("http://127.0.0.1:8000/insert_link/mentions", {params: {ns_id:'Robot',report_id: Reports[Index].id_report.toString()}}).then(response => { var mentions = (response.data['mentions1']); var ordered = order_array(mentions) SetAllMentions(ordered) }) } } else{ if(Action === 'mentions'){ axios.get("http://127.0.0.1:8000/mention_insertion", {params: {report_id: Reports[Index].id_report.toString()}}).then(response => { var mentions = (response.data[Action.toString()]) var ordered = order_array(mentions) SetMentions_to_show(ordered); }) } else if(Action === 'concept-mention'){ axios.get("http://127.0.0.1:8000/insert_link/linked", {params: {report_id: Reports[Index].id_report.toString()}}).then(response => { SetAssociations_to_show(response.data['associations']); }) axios.get("http://127.0.0.1:8000/insert_link/mentions", {params: {report_id: Reports[Index].id_report.toString()}}).then(response => { var mentions = (response.data['mentions1']); var ordered = order_array(mentions) SetAllMentions(ordered) }) } } },[FinalCount,reportString,ChangeButton]) return( <div className="buttongroup"> <ButtonGroup> <Button ref ={but1} onClick={()=>UserGT()} id='current' size = 'sm' variant="secondary"> <FontAwesomeIcon icon={faUser} /> </Button> <Button ref = {but2} onClick={()=>RobotGT()} id='robot' size = 'sm' variant="secondary"> <FontAwesomeIcon icon={faRobot} /> </Button> <Button ref = {but3} onClick={()=>MemberGT()} id='mate' size = 'sm' variant="secondary"> <FontAwesomeIcon icon={faUserFriends} /> </Button> </ButtonGroup> </div> ); } export default ChangeMemberGT
javascript
{"date":20201027,"state":"CO","positive":98733,"probableCases":6710,"negative":1085021,"pending":null,"totalTestResultsSource":"totalTestEncountersViral","totalTestResults":1892949,"hospitalizedCurrently":648,"hospitalizedCumulative":8778,"inIcuCurrently":null,"inIcuCumulative":null,"onVentilatorCurrently":null,"onVentilatorCumulative":null,"recovered":null,"lastUpdateEt":"10/27/2020 01:59","dateModified":"2020-10-27T01:59:00Z","checkTimeEt":"10/26 21:59","death":2236,"hospitalized":8778,"hospitalizedDischarged":7668,"dateChecked":"2020-10-27T01:59:00Z","totalTestsViral":null,"positiveTestsViral":null,"negativeTestsViral":null,"positiveCasesViral":92023,"deathConfirmed":1850,"deathProbable":386,"totalTestEncountersViral":1892949,"totalTestsPeopleViral":1177044,"totalTestsAntibody":181987,"positiveTestsAntibody":13243,"negativeTestsAntibody":168744,"totalTestsPeopleAntibody":null,"positiveTestsPeopleAntibody":null,"negativeTestsPeopleAntibody":null,"totalTestsPeopleAntigen":null,"positiveTestsPeopleAntigen":null,"totalTestsAntigen":null,"positiveTestsAntigen":null,"fips":"08","positiveIncrease":1433,"negativeIncrease":6971,"total":1183754,"totalTestResultsIncrease":18848,"posNeg":1183754,"dataQualityGrade":null,"deathIncrease":10,"hospitalizedIncrease":120,"hash":"ace50210d3eaccf4f18c467885831e695defa31d","commercialScore":0,"negativeRegularScore":0,"negativeScore":0,"positiveScore":0,"score":0,"grade":""}
json
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package git import ( "errors" "fmt" "reflect" "testing" "github.com/sirupsen/logrus" "k8s.io/apimachinery/pkg/util/diff" ) func TestInteractor_Clone(t *testing.T) { var testCases = []struct { name string dir string from string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", dir: "/else", from: "/somewhere", responses: map[string]execResponse{ "clone /somewhere /else": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"clone", "/somewhere", "/else"}, }, expectedErr: false, }, { name: "clone fails", dir: "/else", from: "/somewhere", responses: map[string]execResponse{ "clone /somewhere /else": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"clone", "/somewhere", "/else"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, dir: testCase.dir, logger: logrus.WithField("test", testCase.name), } actualErr := i.Clone(testCase.from) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_MirrorClone(t *testing.T) { var testCases = []struct { name string dir string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", dir: "/else", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "clone --mirror someone.com /else": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"clone", "--mirror", "someone.com", "/else"}, }, expectedErr: false, }, { name: "remote resolution fails", dir: "/else", remote: func() (string, error) { return "", errors.New("oops") }, responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedErr: true, }, { name: "clone fails", dir: "/else", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "clone --mirror someone.com /else": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"clone", "--mirror", "someone.com", "/else"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, dir: testCase.dir, logger: logrus.WithField("test", testCase.name), } actualErr := i.MirrorClone() if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Checkout(t *testing.T) { var testCases = []struct { name string commitlike string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", commitlike: "shasum", responses: map[string]execResponse{ "checkout shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "shasum"}, }, expectedErr: false, }, { name: "checkout fails", commitlike: "shasum", responses: map[string]execResponse{ "checkout shasum": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"checkout", "shasum"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.Checkout(testCase.commitlike) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_RevParse(t *testing.T) { var testCases = []struct { name string commitlike string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedOut string expectedErr bool }{ { name: "happy case", commitlike: "shasum", responses: map[string]execResponse{ "rev-parse shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"rev-parse", "shasum"}, }, expectedOut: "ok", expectedErr: false, }, { name: "rev-parse fails", commitlike: "shasum", responses: map[string]execResponse{ "rev-parse shasum": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"rev-parse", "shasum"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualOut, actualErr := i.RevParse(testCase.commitlike) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } if actualOut != testCase.expectedOut { t.Errorf("%s: got incorrect output: expected %v, got %v", testCase.name, testCase.expectedOut, actualOut) } }) } } func TestInteractor_BranchExists(t *testing.T) { var testCases = []struct { name string branch string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedOut bool }{ { name: "happy case", branch: "branch", responses: map[string]execResponse{ "ls-remote --exit-code --heads origin branch": { out: []byte(`c165713776618ff3162643ea4d0382ca039adfeb refs/heads/branch`), }, }, expectedCalls: [][]string{ {"ls-remote", "--exit-code", "--heads", "origin", "branch"}, }, expectedOut: true, }, { name: "ls-remote fails", branch: "branch", responses: map[string]execResponse{ "ls-remote --exit-code --heads origin branch": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"ls-remote", "--exit-code", "--heads", "origin", "branch"}, }, expectedOut: false, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualOut := i.BranchExists(testCase.branch) if testCase.expectedOut != actualOut { t.Errorf("%s: got incorrect output: expected %v, got %v", testCase.name, testCase.expectedOut, actualOut) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_CheckoutNewBranch(t *testing.T) { var testCases = []struct { name string branch string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", branch: "new-branch", responses: map[string]execResponse{ "checkout -b new-branch": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "-b", "new-branch"}, }, expectedErr: false, }, { name: "checkout fails", branch: "new-branch", responses: map[string]execResponse{ "checkout -b new-branch": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"checkout", "-b", "new-branch"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.CheckoutNewBranch(testCase.branch) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Merge(t *testing.T) { var testCases = []struct { name string commitlike string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedMerge bool expectedErr bool }{ { name: "happy case", commitlike: "shasum", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, }, expectedMerge: true, expectedErr: false, }, { name: "merge fails but abort succeeds", commitlike: "shasum", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { err: errors.New("oops"), }, "merge --abort": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, {"merge", "--abort"}, }, expectedMerge: false, expectedErr: false, }, { name: "merge fails and abort fails", commitlike: "shasum", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { err: errors.New("oops"), }, "merge --abort": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, {"merge", "--abort"}, }, expectedMerge: false, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualMerge, actualErr := i.Merge(testCase.commitlike) if testCase.expectedMerge != actualMerge { t.Errorf("%s: got incorrect output: expected %v, got %v", testCase.name, testCase.expectedMerge, actualMerge) } if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_MergeWithStrategy(t *testing.T) { var testCases = []struct { name string commitlike string strategy string opts []MergeOpt remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedMerge bool expectedErr bool }{ { name: "happy merge case", commitlike: "shasum", strategy: "merge", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, }, expectedMerge: true, expectedErr: false, }, { name: "happy merge case with options", commitlike: "shasum", strategy: "merge", opts: []MergeOpt{{CommitMessage: "message"}}, responses: map[string]execResponse{ "merge --no-ff --no-stat -m message shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "message", "shasum"}, }, expectedMerge: true, expectedErr: false, }, { name: "happy merge case with multi words message", commitlike: "shasum", strategy: "merge", opts: []MergeOpt{{CommitMessage: "my happy merge message"}}, responses: map[string]execResponse{ "merge --no-ff --no-stat -m my happy merge message shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "my happy merge message", "shasum"}, }, expectedMerge: true, expectedErr: false, }, { name: "happy merge case with multiple options with single/multi words message", commitlike: "shasum", strategy: "merge", opts: []MergeOpt{ {CommitMessage: "my"}, {CommitMessage: "happy merge"}, {CommitMessage: "message"}, }, responses: map[string]execResponse{ "merge --no-ff --no-stat -m my -m happy merge -m message shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "my", "-m", "happy merge", "-m", "message", "shasum"}, }, expectedMerge: true, expectedErr: false, }, { name: "happy squash case", commitlike: "shasum", strategy: "squash", responses: map[string]execResponse{ "merge --squash --no-stat shasum": { out: []byte(`ok`), }, "commit --no-stat -m merge": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--squash", "--no-stat", "shasum"}, {"commit", "--no-stat", "-m", "merge"}, }, expectedMerge: true, expectedErr: false, }, { name: "invalid strategy", commitlike: "shasum", strategy: "whatever", responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedMerge: false, expectedErr: true, }, { name: "merge fails but abort succeeds", commitlike: "shasum", strategy: "merge", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { err: errors.New("oops"), }, "merge --abort": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, {"merge", "--abort"}, }, expectedMerge: false, expectedErr: false, }, { name: "merge fails and abort fails", commitlike: "shasum", strategy: "merge", responses: map[string]execResponse{ "merge --no-ff --no-stat -m merge shasum": { err: errors.New("oops"), }, "merge --abort": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"merge", "--no-ff", "--no-stat", "-m", "merge", "shasum"}, {"merge", "--abort"}, }, expectedMerge: false, expectedErr: true, }, { name: "squash merge fails but abort succeeds", commitlike: "shasum", strategy: "squash", responses: map[string]execResponse{ "merge --squash --no-stat shasum": { err: errors.New("oops"), }, "reset --hard HEAD": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--squash", "--no-stat", "shasum"}, {"reset", "--hard", "HEAD"}, }, expectedMerge: false, expectedErr: false, }, { name: "squash merge fails and abort fails", commitlike: "shasum", strategy: "squash", responses: map[string]execResponse{ "merge --squash --no-stat shasum": { err: errors.New("oops"), }, "reset --hard HEAD": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"merge", "--squash", "--no-stat", "shasum"}, {"reset", "--hard", "HEAD"}, }, expectedMerge: false, expectedErr: true, }, { name: "squash merge staging succeeds, commit fails and abort succeeds", commitlike: "shasum", strategy: "squash", responses: map[string]execResponse{ "merge --squash --no-stat shasum": { out: []byte(`ok`), }, "commit --no-stat -m merge": { err: errors.New("oops"), }, "reset --hard HEAD": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"merge", "--squash", "--no-stat", "shasum"}, {"commit", "--no-stat", "-m", "merge"}, {"reset", "--hard", "HEAD"}, }, expectedMerge: false, expectedErr: false, }, { name: "squash merge staging succeeds, commit fails and abort fails", commitlike: "shasum", strategy: "squash", responses: map[string]execResponse{ "merge --squash --no-stat shasum": { out: []byte(`ok`), }, "commit --no-stat -m merge": { err: errors.New("oops"), }, "reset --hard HEAD": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"merge", "--squash", "--no-stat", "shasum"}, {"commit", "--no-stat", "-m", "merge"}, {"reset", "--hard", "HEAD"}, }, expectedMerge: false, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualMerge, actualErr := i.MergeWithStrategy(testCase.commitlike, testCase.strategy, testCase.opts...) if testCase.expectedMerge != actualMerge { t.Errorf("%s: got incorrect output: expected %v, got %v", testCase.name, testCase.expectedMerge, actualMerge) } if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_MergeAndCheckout(t *testing.T) { var testCases = []struct { name string baseSHA string commitlikes []string strategy string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy do nothing case", baseSHA: "base", commitlikes: []string{}, strategy: "merge", responses: map[string]execResponse{ "checkout base": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "base"}, }, expectedErr: false, }, { name: "happy merge case", baseSHA: "base", commitlikes: []string{"first", "second"}, strategy: "merge", responses: map[string]execResponse{ "checkout base": { out: []byte(`ok`), }, "merge --no-ff --no-stat -m merge first": { out: []byte(`ok`), }, "merge --no-ff --no-stat -m merge second": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "base"}, {"merge", "--no-ff", "--no-stat", "-m", "merge", "first"}, {"merge", "--no-ff", "--no-stat", "-m", "merge", "second"}, }, expectedErr: false, }, { name: "happy squash case", baseSHA: "base", commitlikes: []string{"first", "second"}, strategy: "squash", responses: map[string]execResponse{ "checkout base": { out: []byte(`ok`), }, "merge --squash --no-stat first": { out: []byte(`ok`), }, "commit --no-stat -m merge": { out: []byte(`ok`), }, "merge --squash --no-stat second": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "base"}, {"merge", "--squash", "--no-stat", "first"}, {"commit", "--no-stat", "-m", "merge"}, {"merge", "--squash", "--no-stat", "second"}, {"commit", "--no-stat", "-m", "merge"}, }, expectedErr: false, }, { name: "invalid strategy", commitlikes: []string{"shasum"}, strategy: "whatever", responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedErr: true, }, { name: "checkout fails", baseSHA: "base", commitlikes: []string{"first", "second"}, strategy: "squash", responses: map[string]execResponse{ "checkout base": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"checkout", "base"}, }, expectedErr: true, }, { name: "merge fails but abort succeeds", baseSHA: "base", commitlikes: []string{"first", "second"}, strategy: "merge", responses: map[string]execResponse{ "checkout base": { out: []byte(`ok`), }, "merge --no-ff --no-stat -m merge first": { err: errors.New("oops"), }, "merge --abort": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"checkout", "base"}, {"merge", "--no-ff", "--no-stat", "-m", "merge", "first"}, {"merge", "--abort"}, }, expectedErr: true, }, { name: "merge fails and abort fails", baseSHA: "base", commitlikes: []string{"first", "second"}, strategy: "merge", responses: map[string]execResponse{ "checkout base": { out: []byte(`ok`), }, "merge --no-ff --no-stat -m merge first": { err: errors.New("oops"), }, "merge --abort": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"checkout", "base"}, {"merge", "--no-ff", "--no-stat", "-m", "merge", "first"}, {"merge", "--abort"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.MergeAndCheckout(testCase.baseSHA, testCase.strategy, testCase.commitlikes...) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Am(t *testing.T) { var testCases = []struct { name string path string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", path: "my/changes.patch", responses: map[string]execResponse{ "am --3way my/changes.patch": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"am", "--3way", "my/changes.patch"}, }, expectedErr: false, }, { name: "am fails but abort succeeds", path: "my/changes.patch", responses: map[string]execResponse{ "am --3way my/changes.patch": { err: errors.New("oops"), }, "am --abort": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"am", "--3way", "my/changes.patch"}, {"am", "--abort"}, }, expectedErr: true, }, { name: "am fails and abort fails", path: "my/changes.patch", responses: map[string]execResponse{ "am --3way my/changes.patch": { err: errors.New("oops"), }, "am --abort": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"am", "--3way", "my/changes.patch"}, {"am", "--abort"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.Am(testCase.path) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_RemoteUpdate(t *testing.T) { var testCases = []struct { name string responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", responses: map[string]execResponse{ "remote update": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"remote", "update"}, }, expectedErr: false, }, { name: "update fails", responses: map[string]execResponse{ "remote update": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"remote", "update"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, logger: logrus.WithField("test", testCase.name), } actualErr := i.RemoteUpdate() if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Fetch(t *testing.T) { var testCases = []struct { name string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"fetch", "someone.com"}, }, expectedErr: false, }, { name: "remote resolution fails", remote: func() (string, error) { return "", errors.New("oops") }, responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedErr: true, }, { name: "fetch fails", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"fetch", "someone.com"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.Fetch() if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_FetchRef(t *testing.T) { var testCases = []struct { name string refspec string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", refspec: "shasum", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com shasum": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "shasum"}, }, expectedErr: false, }, { name: "remote resolution fails", refspec: "shasum", remote: func() (string, error) { return "", errors.New("oops") }, responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedErr: true, }, { name: "fetch fails", refspec: "shasum", remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com shasum": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "shasum"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.FetchRef(testCase.refspec) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_FetchFromRemote(t *testing.T) { var testCases = []struct { name string remote RemoteResolver toRemote RemoteResolver branch string responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "fetch from different remote without token", remote: func() (string, error) { return "someone.com", nil }, toRemote: func() (string, error) { return "https://github.com/kubernetes/test-infra-fork", nil }, branch: "test-branch", responses: map[string]execResponse{ "fetch https://github.com/kubernetes/test-infra-fork test-branch": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"fetch", "https://github.com/kubernetes/test-infra-fork", "test-branch"}, }, expectedErr: false, }, { name: "fetch from different remote with token", remote: func() (string, error) { return "someone.com", nil }, toRemote: func() (string, error) { return "https://user:pass@github.com/kubernetes/test-infra-fork", nil }, branch: "test-branch", responses: map[string]execResponse{ "fetch https://user:pass@github.com/kubernetes/test-infra-fork test-branch": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"fetch", "https://user:pass@github.com/kubernetes/test-infra-fork", "test-branch"}, }, expectedErr: false, }, { name: "passing non-valid remote", remote: func() (string, error) { return "someone.com", nil }, toRemote: func() (string, error) { return "", fmt.Errorf("non-valid URL") }, branch: "test-branch", expectedCalls: [][]string{}, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.FetchFromRemote(testCase.toRemote, testCase.branch) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_CheckoutPullRequest(t *testing.T) { var testCases = []struct { name string number int remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", number: 1, remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com pull/1/head": { out: []byte(`ok`), }, "checkout FETCH_HEAD": { out: []byte(`ok`), }, "checkout -b pull1": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "pull/1/head"}, {"checkout", "FETCH_HEAD"}, {"checkout", "-b", "pull1"}, }, expectedErr: false, }, { name: "remote resolution fails", number: 1, remote: func() (string, error) { return "", errors.New("oops") }, responses: map[string]execResponse{}, expectedCalls: [][]string{}, expectedErr: true, }, { name: "fetch fails", number: 1, remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com pull/1/head": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "pull/1/head"}, }, expectedErr: true, }, { name: "checkout fails", number: 1, remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com pull/1/head": { out: []byte(`ok`), }, "checkout FETCH_HEAD": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "pull/1/head"}, {"checkout", "FETCH_HEAD"}, }, expectedErr: true, }, { name: "branch fails", number: 1, remote: func() (string, error) { return "someone.com", nil }, responses: map[string]execResponse{ "fetch someone.com pull/1/head": { out: []byte(`ok`), }, "checkout FETCH_HEAD": { out: []byte(`ok`), }, "checkout -b pull1": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"fetch", "someone.com", "pull/1/head"}, {"checkout", "FETCH_HEAD"}, {"checkout", "-b", "pull1"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.CheckoutPullRequest(testCase.number) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Config(t *testing.T) { var testCases = []struct { name string key, value string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", key: "key", value: "value", responses: map[string]execResponse{ "config key value": { out: []byte(`ok`), }, }, expectedCalls: [][]string{ {"config", "key", "value"}, }, expectedErr: false, }, { name: "config fails", key: "key", value: "value", responses: map[string]execResponse{ "config key value": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"config", "key", "value"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualErr := i.Config(testCase.key, testCase.value) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_Diff(t *testing.T) { var testCases = []struct { name string head, sha string remote RemoteResolver responses map[string]execResponse expectedCalls [][]string expectedOut []string expectedErr bool }{ { name: "happy case", head: "head", sha: "sha", responses: map[string]execResponse{ "diff head sha --name-only": { out: []byte(`prow/git/v2/client_factory.go prow/git/v2/executor.go prow/git/v2/executor_test.go prow/git/v2/fakes.go prow/git/v2/interactor.go prow/git/v2/publisher.go prow/git/v2/publisher_test.go prow/git/v2/remote.go prow/git/v2/remote_test.go`), }, }, expectedCalls: [][]string{ {"diff", "head", "sha", "--name-only"}, }, expectedOut: []string{ "prow/git/v2/client_factory.go", "prow/git/v2/executor.go", "prow/git/v2/executor_test.go", "prow/git/v2/fakes.go", "prow/git/v2/interactor.go", "prow/git/v2/publisher.go", "prow/git/v2/publisher_test.go", "prow/git/v2/remote.go", "prow/git/v2/remote_test.go", }, expectedErr: false, }, { name: "config fails", head: "head", sha: "sha", responses: map[string]execResponse{ "diff head sha --name-only": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"diff", "head", "sha", "--name-only"}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, remote: testCase.remote, logger: logrus.WithField("test", testCase.name), } actualOut, actualErr := i.Diff(testCase.head, testCase.sha) if !reflect.DeepEqual(actualOut, testCase.expectedOut) { t.Errorf("%s: got incorrect output: %v", testCase.name, diff.ObjectReflectDiff(actualOut, testCase.expectedOut)) } if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_MergeCommitsExistBetween(t *testing.T) { var testCases = []struct { name string target, head string responses map[string]execResponse expectedCalls [][]string expectedOut bool expectedErr bool }{ { name: "happy case and merges exist", target: "target", head: "head", responses: map[string]execResponse{ "log target..head --oneline --merges": { out: []byte(`8df5654e6 Merge pull request #14911 from mborsz/etcd 96cbeee23 Merge pull request #14755 from justinsb/the_life_changing_magic_of_tidying_up`), }, }, expectedCalls: [][]string{ {"log", "target..head", "--oneline", "--merges"}, }, expectedOut: true, expectedErr: false, }, { name: "happy case and merges don't exist", target: "target", head: "head", responses: map[string]execResponse{ "log target..head --oneline --merges": { out: []byte(``), }, }, expectedCalls: [][]string{ {"log", "target..head", "--oneline", "--merges"}, }, expectedOut: false, expectedErr: false, }, { name: "log fails", target: "target", head: "head", responses: map[string]execResponse{ "log target..head --oneline --merges": { err: errors.New("oops"), }, }, expectedCalls: [][]string{ {"log", "target..head", "--oneline", "--merges"}, }, expectedOut: false, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, logger: logrus.WithField("test", testCase.name), } actualOut, actualErr := i.MergeCommitsExistBetween(testCase.target, testCase.head) if testCase.expectedOut != actualOut { t.Errorf("%s: got incorrect output: expected %v, got %v", testCase.name, testCase.expectedOut, actualOut) } if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } } func TestInteractor_ShowRef(t *testing.T) { const target = "some-branch" var testCases = []struct { name string responses map[string]execResponse expectedCalls [][]string expectedErr bool }{ { name: "happy case", responses: map[string]execResponse{ "show-ref -s some-branch": {out: []byte("32d3f5a6826109c625527f18a59f2e7144a330b6\n")}, }, expectedCalls: [][]string{ {"show-ref", "-s", target}, }, expectedErr: false, }, { name: "unhappy case", responses: map[string]execResponse{ "git show-ref -s some-undef-branch": {err: errors.New("some-err")}, }, expectedCalls: [][]string{ {"show-ref", "-s", target}, }, expectedErr: true, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { e := fakeExecutor{ records: [][]string{}, responses: testCase.responses, } i := interactor{ executor: &e, logger: logrus.WithField("test", testCase.name), } _, actualErr := i.ShowRef(target) if testCase.expectedErr && actualErr == nil { t.Errorf("%s: expected an error but got none", testCase.name) } if !testCase.expectedErr && actualErr != nil { t.Errorf("%s: expected no error but got one: %v", testCase.name, actualErr) } if actual, expected := e.records, testCase.expectedCalls; !reflect.DeepEqual(actual, expected) { t.Errorf("%s: got incorrect git calls: %v", testCase.name, diff.ObjectReflectDiff(actual, expected)) } }) } }
go
{"name":"Wakayama","lastUpdate":"2020-10-23T13:00:00","ncurrentpatients":16,"nexits":245,"ndeaths":4,"npatients":265,"src_url":"https://www.pref.wakayama.lg.jp/prefg/000200/covid19_d/fil/kansensuii.xlsx","url_opendata":"https://www.pref.wakayama.lg.jp/prefg/041200/d00203387.html"}
json
<gh_stars>0 --- UID: NF:fltkernel.FltRetainSwappedBufferMdlAddress title: FltRetainSwappedBufferMdlAddress function (fltkernel.h) description: FltRetainSwappedBufferMdlAddress prevents the Filter Manager from freeing the memory descriptor list (MDL) for a buffer that was swapped in by a minifilter driver. old-location: ifsk\fltretainswappedbuffermdladdress.htm tech.root: ifsk ms.assetid: 80498410-9617-414d-997c-0d55f891ba3c ms.date: 04/16/2018 ms.keywords: FltApiRef_p_to_z_3832baaa-37bc-47cc-9df4-12c92fd0ddd8.xml, FltRetainSwappedBufferMdlAddress, FltRetainSwappedBufferMdlAddress function [Installable File System Drivers], fltkernel/FltRetainSwappedBufferMdlAddress, ifsk.fltretainswappedbuffermdladdress ms.topic: function f1_keywords: - "fltkernel/FltRetainSwappedBufferMdlAddress" req.header: fltkernel.h req.include-header: Fltkernel.h req.target-type: Universal req.target-min-winverclnt: req.target-min-winversvr: req.kmdf-ver: req.umdf-ver: req.ddi-compliance: req.unicode-ansi: req.idl: req.max-support: req.namespace: req.assembly: req.type-library: req.lib: FltMgr.lib req.dll: Fltmgr.sys req.irql: Any level topic_type: - APIRef - kbSyntax api_type: - DllExport api_location: - fltmgr.sys api_name: - FltRetainSwappedBufferMdlAddress product: - Windows targetos: Windows req.typenames: --- # FltRetainSwappedBufferMdlAddress function ## -description <b>FltRetainSwappedBufferMdlAddress</b> prevents the Filter Manager from freeing the memory descriptor list (MDL) for a buffer that was swapped in by a minifilter driver. ## -parameters ### -param CallbackData [in] Pointer to the callback data structure for the operation. ## -returns None ## -remarks When a minifilter driver swaps in a new buffer in a preoperation callback (<a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nc-fltkernel-pflt_pre_operation_callback">PFLT_PRE_OPERATION_CALLBACK</a>) routine, the Filter Manager automatically frees the buffer's MDL when the corresponding postoperation (<a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nc-fltkernel-pflt_post_operation_callback">PFLT_POST_OPERATION_CALLBACK</a>) callback routine returns. The minifilter driver can prevent the Filter Manager from freeing the MDL by calling <b>FltRetainSwappedBufferMdlAddress</b> from the postoperation callback routine. After calling <b>FltRetainSwappedBufferMdlAddress</b>, the caller is responsible for freeing the MDL by calling a routine such as <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/wdm/nf-wdm-iofreemdl">IoFreeMdl</a>. <b>FltRetainSwappedBufferMdlAddress</b> can only be called from a postoperation callback routine. ## -see-also <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nf-fltkernel-fltdecodeparameters">FltDecodeParameters</a> <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nf-fltkernel-fltgetswappedbuffermdladdress">FltGetSwappedBufferMdlAddress</a> <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/wdm/nf-wdm-iofreemdl">IoFreeMdl</a> <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nc-fltkernel-pflt_post_operation_callback">PFLT_POST_OPERATION_CALLBACK</a> <a href="https://docs.microsoft.com/windows-hardware/drivers/ddi/content/fltkernel/nc-fltkernel-pflt_pre_operation_callback">PFLT_PRE_OPERATION_CALLBACK</a>    
markdown
<filename>translations/texts/quests/outpost/penguin1.questtemplate.json [ { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/scriptConfig/descriptions/findGoal" ] }, "Texts": { "Eng": "Defeat ^orange;Dreadwing^reset;", "Rus": "Одолейте ^orange;Ужаснокрыла^reset;" } }, { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/text" ] }, "Texts": { "Eng": "Help me out, would ya? Business around here hasn't been so great since all my regulars are on the run from ^orange;Dreadwing^white; and his gang of crooks! Can you ^green;go and defeat Dreadwing^white; for me? Come speak when you're ready, I'll send ya right to him!", "Rus": "Помоги мне, а? Мой бизнес катится ко дну, поскольку все мои постоянные клиенты находятся в бегах, скрываясь от ^orange;Ужаснокрыла^white; и его бандитов! Не мог бы ты ^green;убить Ужаснокрыла^white;? Скажи, когда будешь готов, и я отправлю тебя прямо к нему!" } }, { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/scriptConfig/descriptions/turnIn" ] }, "Texts": { "Eng": "Return to the ^orange;Bartender^reset; at the ^orange;Outpost^reset;", "Rus": "Вернитесь к ^orange;Бармену^reset; на ^orange;Форпосте^reset;" } }, { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/scriptConfig/descriptions/enterInstance" ] }, "Texts": { "Eng": "Talk to the ^orange;Bartender^reset; to go to ^orange;Dreadwing^reset;", "Rus": "Поговорите с ^orange;Барменом^reset;, чтобы отправиться к ^orange;Ужаснокрылу^reset;" } }, { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/completionText" ] }, "Texts": { "Eng": "You sorted him out for me? Thanks, now I'm back in business! ^green;Come speak to me if you want a ^orange;Dubloon^green; to hire a penguin merc^white;.", "Rus": "Ты с ним разобрался? Спасибо, теперь я снова в деле! ^green;Если захочешь воспользоваться услугами наёмника-пингвина, то приходи, я дам тебе ^orange;Дублон^white;." } }, { "DeniedAlternatives": [], "Files": { "quests/outpost/penguin1.questtemplate": [ "/title" ] }, "Texts": { "Eng": "^green;Dreadwing the Penguin", "Rus": "Пингвин Ужаснокрыл" } } ]
json
{"nom":"Saint-Médard-la-Rochette","circ":"1ère circonscription","dpt":"Creuse","inscrits":475,"abs":275,"votants":200,"blancs":17,"nuls":8,"exp":175,"res":[{"nuance":"REM","nom":"<NAME>","voix":89},{"nuance":"LR","nom":"<NAME>","voix":86}]}
json
James has written that God stands ready and willing to give away wisdom—for free and without finding fault—to all who ask Him for it. But they must ask believing that God is trustworthy to give wisdom. They must be ready to act on the wisdom He gives. However, those who see God's way as only one of many options will spend their lives being driven back and forth by whatever wind blows strongest. A person who comes to God, asking for wisdom, but without faith—continuing to shop around for sources of wisdom contrary to God—will be like a wave tossed around by the wind. Here, in verse 7, James makes it clear that the faithless person shouldn't expect to receive any wisdom from God. God cares deeply that His children, believers in Jesus, will trust Him exclusively. He has no interest in being one booth we visit in the marketplace of theological ideas. He wants to be our first and final stop. If we don't trust Him exclusively, we will receive no wisdom from Him, at all. God needs to be the center of our life. We have to have the courage and confidence to ask God for what we want. We also have to believe it and have child-like faith. You remember when you were a kid and you believed you were a superhero, doctor, singer or professional sports player? You really believed it and lived it. Where is that child-like faith? What are these gifts you need to spend more time working on? Seek progress and not perfection. The gifts are inside of you. They are God sent. Remove the glass ceiling and live out your legacy. Prayer of the Day: God, thank You for being my strength and my defense. You are always reliable and Your promises to me are always secure. I have nothing to fear because You are with me. You protect me, strengthen me, and go before me. You are all I need because You are my salvation. In Jesus’ name, Amen. Don’t I Need to Be Fixed Up?
english
<filename>source/cordwood-core/src/test/java/ren/hankai/cordwood/core/test/http/TinyHttpServer.java /******************************************************************************* * Copyright (C) 2018 hankai * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. ******************************************************************************/ package ren.hankai.cordwood.core.test.http; import org.simpleframework.http.Request; import org.simpleframework.http.Response; import org.simpleframework.http.core.Container; import org.simpleframework.http.core.ContainerServer; import org.simpleframework.transport.Server; import org.simpleframework.transport.connect.Connection; import org.simpleframework.transport.connect.SocketConnection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ren.hankai.cordwood.core.util.HttpSslUtil; import java.net.InetSocketAddress; import java.net.SocketAddress; /** * 微型HTTP服务器。 * * @author hankai * @version 1.0.0 * @since Dec 3, 2018 9:42:05 AM */ public class TinyHttpServer implements Container { private static final Logger logger = LoggerFactory.getLogger(TinyHttpServer.class); private final HttpRequestHandler handler; private final Connection connection; private final Server server; /** * 构建 Web 容器。 * @param handler 请求处理器 * @throws Exception 异常 */ public TinyHttpServer(HttpRequestHandler handler) throws Exception { this.server = new ContainerServer(this, 10); this.connection = new SocketConnection(server); this.handler = handler; } /** * 启动web容器。 * * @author hankai * @since Dec 3, 2018 9:12:10 AM */ public void start(int port, boolean secure) { try { final SocketAddress address = new InetSocketAddress(port); if (secure) { HttpSslUtil.trustAllHostnames(); connection.connect(address, HttpSslUtil.trustAllHttpsCertificates()); } else { connection.connect(address); } } catch (final Exception ex) { logger.error("Failed to start web container.", ex); } } /** * 停止web容器。 * * @author hankai * @since Dec 3, 2018 9:10:36 AM */ public void stop() { try { connection.close(); server.stop(); } catch (final Exception ex) { logger.error("Failed to stop web container.", ex); } } @Override public void handle(Request req, Response resp) { try { handler.handle(req, resp); } catch (final Throwable ex) { logger.error("Failed to handle web request.", ex); } finally { try { resp.close(); } catch (final Exception expected) { } } } }
java
[ ["java.lang.reflect.ParameterizedType","org.springframework.core.SerializableTypeWrapper$SerializableTypeProxy","java.io.Serializable"], ["java.lang.reflect.WildcardType","org.springframework.core.SerializableTypeWrapper$SerializableTypeProxy","java.io.Serializable"], ["java.sql.Connection"], ["org.springframework.batch.core.configuration.JobRegistry","org.springframework.aop.SpringProxy","org.springframework.aop.framework.Advised","org.springframework.core.DecoratingProxy"], ["org.springframework.batch.core.explore.JobExplorer","org.springframework.aop.SpringProxy","org.springframework.aop.framework.Advised","org.springframework.core.DecoratingProxy"], ["org.springframework.batch.core.launch.JobLauncher","org.springframework.aop.SpringProxy","org.springframework.aop.framework.Advised","org.springframework.core.DecoratingProxy"], ["org.springframework.batch.core.repository.JobRepository","org.springframework.aop.SpringProxy","org.springframework.aop.framework.Advised","org.springframework.core.DecoratingProxy"], ["org.springframework.boot.context.properties.ConfigurationProperties","org.springframework.core.annotation.SynthesizedAnnotation"], ["org.springframework.jdbc.datasource.ConnectionProxy"], ["org.springframework.transaction.PlatformTransactionManager","org.springframework.aop.SpringProxy","org.springframework.aop.framework.Advised","org.springframework.core.DecoratingProxy"] ]
json
Former opener Robin Uthappa has criticised Indian team management for not giving Sanju Samson enough chances to prove his talent at a high level. Samson, who made his debut in 2015, was touted as the next big thing in Indian cricket but he failed to live up to the expectations. He hasn’t been given a long rope in the Indian team which has also not worked in his favour. Samson, recently, sustained a knee injury and missed T20Is against Sri Lanka and New Zealand. Uthappa feels team management should give Samson enough opportunities in whatever position they want to use him. “He should be given a long rope. He is a high-quality player with a lot of potential. He has not been given a long opportunity. If you want to use him at No. 3, give him at least five opportunities. If you want to use him at No. 5, give him the opportunities at least. But give him a chance," Uthappa told Times of India. Samson was injured during the first T20I against Sri Lanka while fielding near the boundary rope as India gave chance to Rahul Tripathi in the XI after that and he took advantage of it by playing a couple of crucial knocks. Uthappa furter talked about KL Rahul’s role in the ODI team and he suggested that India should not do experiment with his batting-order as he has been doing well at number 5. “He should be wicketkeeping and bat in the middle order. He has been performing well in the middle order. His average is also above 50 at 5. He should keep and bat at 5 in the World Cup. He has a phenomenal average of above 50. When he is doing well in this position, why experiment then? " Talking about Rishabh Pant’s absence, Uthappa said that Rahul should be the ideal choice as a wicketkeeper batter in the ODI team. “Whatever happened with Rishabh is sad. If he (Rishabh) was there, he would have been the first choice of course. In Rishabh’s absence, Rahul becomes the first choice right now. India should stick with Rahul as a keeper and batsman at No. 5. "
english
Ei SMS Ta Je meye porbe she amar"PREMIKA"Na porle"NANI"Delete korle"SHALI"Na dekhle"DADI"Hashle"BEYAIN"Ragkorle "BOU" - AMI EXAM VALOBASI,IF U AGREE ...TAHOLE NICHER BLANK E TOMAR NAM LIKHO R SOBAI K PATHO- ___,CHANAKYA,BIRBAL, KALIDAS,VIDYA SAGAR. - SOB SOMAY MONE RAKHBE....CREAM BISCUIT E CREAM THAKE,KINTU TIGER BISCUIT E TIGER THAKE NA.
english
Islamabad, April 16: Former Pakistan Prime Minister Imran Khan received 58 gifts of more than Rs 140 million from the world leaders during his three-and-a-half-year stint and retained all of them either by paying a negligible amount or even without any payment, The News reported. The most expensive among them were, according to Prime Minister Shehbaz Sharif, sold in Dubai. Information obtained by The News suggests that Imran had to pay in order to retain 15 expensive gifts. He paid Rs 38 million for the gifts valued at Rs 140 million and other gifts worth Rs 8,00,200 were retained without making any payment. Among the most expensive was one set of gifts he had received after his inauguration as the prime minister in August 2018. It included the Graff watch of Rs 85 million that was received together with cufflinks of Rs 5. 67 million, a pen of Rs 1. 5 million and a ring of Rs 8. 75 million. Their price assessment was made by the evaluation committee set up by him. All these gifts which had a total value of around Rs 100 million were retained by Imran Khan in September 2018 by paying 20 per cent (Rs 20 million) of their estimated value, The News reported. They were subsequently sold in Dubai earning Rs 155 million, alleged Pak Prime Minister Shehbaz Sharif. It has to be determined who paid for their retention, whether Imran Khan paid any capital gains tax or not. According to the rules, a gift received by a government functionary from a leader of another country is deposited with the treasury. Those interested to retain the gift can do so by paying a certain amount of the value, which was 20 per cent at the time Imran Khan had retained the above-mentioned gifts. The rules were revised in December 2018 that required the payment of 50 per cent to retain these gifts. As far as the other gifts retained by Imran Khan are concerned, a set of gifts containing a Rolex watch, a pair of cufflinks, one ring and one box containing necklace, bracelet, and a pair of earrings was valued at Rs 23. 5 million and it was retained through Rs 11. 5 million. By that time, the rules were revised and retention could be done through payment of 50 per cent of the total value. The other gifts included a Rolex watch worth Rs 3. 8 million, which he had retained in October 2018 by paying around Rs 7,54,000. Another Rolex watch of Rs 1. 5 million was retained in return of Rs 2,94,000. Another set of gifts included a couple of Rolex watches, iPhone and other items worth Rs 1. 73 million, which was retained for Rs 3,38,600.
english
Indian aviation regulator DGCA has confirmed that a missing washer was found in one of the 39 under-fire Boeing 737MAX fleets during an inspection. Boeing recommended a one-time inspection of all 737MAX aircraft before January 10, 2024. The DGCA has confirmed that checks on operational Boeing 737-8 Max planes in India have been completed satisfactorily. Alaska Airlines decided to restrict the aircraft from long flights over water so that if the warning light reappeared, the plane "could return very quickly to an airport," National Transportation Safety Board Chair Jennifer Homendy said Sunday night. U.S. regulators grounded 171 MAX 9 planes after a panel blew off an Alaska Airlines-operated flight not long after taking off from a Portland, Oregon, airport on Friday, forcing pilots to scramble to land the plane safely. United, one of the two U.S. carriers that fly this Boeing model with the panels, said its own preliminary checks found bolts that needed tightening on several panels. Megacaps advanced, lifting stocks such as Amazon.com which closed up 2.66% and Alphabet, which rose 2.29%, as Treasury yields fell ahead of readings on inflation and a new supply of government debt this week, with the benchmark 10-year U.S. Treasury yield hitting a low of 3.966% on the session. The NTSB highlighted that pilots had reported pressurization warning lights in three prior flights involving the specific Alaska Airlines Max 9. The decision to restrict flights over water aimed to ensure a quick return to an airport in case of repeated warnings. Boeing slid 9.1% after the U.S. Federal Aviation Administration ordered the temporary grounding of some 737 MAX 9 jets fitted with a panel that blew off an Alaska Air Group jet in midair on Friday. Alaska Air Group slumped 3.9% after the carrier canceled more than 200 flights following the FAA order. A piece of fuselage tore off an Alaska Airlines 737 MAX 9 jet on Friday following takeoff from Portland, Oregon, forcing pilots to turn back. The U.S. Federal Aviation Administration (FAA) subsequently ordered the temporary grounding of 171 narrowbody MAX 9 jets. NTSB chair Jennifer Homendy said the flight data recorder and cockpit voice recorder were sent to NTSB labs on Sunday to be read, but no data was available on the cockpit voice recorder because it was not retrieved by the two-hour mark, when recording restarts and previous data is erased. For Boeing Chief Executive Officer Dave Calhoun, the Alaska Air episode is another blow to his efforts to stabilize the company after half a decade of upheaval, coming just a few days into a new year he had heralded as crucial to a turnaround. Boeing is still feeling the reverberations of two deadly 737 Max crashes almost five years ago that shook confidence in the company. Now Boeing’s fraught relationship with its biggest supplier — Spirit AeroSystems Holdings Inc. — stands to face fresh scrutiny. "We are very fortunate this didn't end up in something more tragic," Jennifer Homendy, the head of the National Transportation Safety Board, said at a press conference in Portland, Oregon, on Saturday. Turkish Airlines said its country's civil aviation authority asked it to examine its small fleet of five 737 Max 9 planes. Until the technical review is complete, the carrier has withdrawn the jets from service. India's aviation regulator has confirmed that domestic airlines have successfully performed checks on Boeing 737 Max 8 aircraft following a cabin panel blowout on a new Alaska Airlines 737 Max 9 aircraft. The DGCA has ordered all Indian operators to check the operation and proper closing of over-wing emergency exits as a precautionary measure. The Federal Aviation Administration (FAA) on Saturday ordered the temporary grounding of 171 Boeing MAX 9 jets installed with the same panel after the Alaskan Airlines plane was forced to make an emergency landing with a gap in the fuselage. A piece of fuselage tore off an Alaska Airlines jet on Friday following takeoff from Portland, Oregon, forcing pilots to turn back. Jefferies said in a note that the latest Boeing incident could slow aircraft production if manufacturing and installation processes are subject to further regulatory probes. Indian air carrier Vistara expects to receive its last 787 wide-body jet from Boeing by March or April, despite a recent incident involving a narrow-body 737 MAX 9 that lost part of the fuselage. The Federal Aviation Administration has grounded 171 Boeing 737 MAX 9 planes. The Federal Aviation Administration "is requiring immediate inspections of certain Boeing 737 MAX 9 planes before they can return to flight," the agency said on X, the former Twitter. The agency said around 171 aircraft worldwide would be affected, with each inspection taking four to eight hours. Passenger photos appeared to show that a section of the fuselage sometimes used for an optional rear mid-cabin exit door had vanished, leaving a neat door-shaped gap. The extra door is typically installed by low-cost airlines using extra seats that require more paths for evacuation. However, those doors are permanently "plugged," or deactivated, on some jets including those of Alaska Airlines. The head of the U.S. National Transportation Safety Board (NTSB), Jennifer Homendy, reported that no passengers were seated next to the cabin panel that blew out on an Alaska Airlines Boeing 737 MAX 9 during an emergency landing. The incident occurred shortly after takeoff from Portland, Oregon, en route to Ontario, California. The DGCA said on Saturday, 'Following the Alaska Airlines incident involving a Boeing 737-9 Max aircraft, there have been no inputs/guidance from Boeing so far. None of the Indian air operators have Boeing 737-9 Max as part of their fleet yet.' 'However, as an abundant precautionary measure, the DGCA has directed all Indian air operators to carry out a one-time inspection of the emergency exits immediately on all Boeing 737-8 Max aircraft currently operating as part of their fleet,' the regulator added. The FAA on Saturday ordered the temporary grounding of 171 Boeing jets installed with the same panel after an eight-week-old Alaska Airlines jet was forced to make an emergency landing with a gap in the fuselage. The door plug tore off the left side of the jet following takeoff from Portland, Oregon, en route to Ontario, California, forcing pilots to turn back and land safely with all 171 passengers and six crew on board. Two crashes involving Boeing 737 Max 8 aircraft killed a total of 346 people in less than five months in 2018 and 2019. Both crashes were later associated with a malfunctioning system that overrode pilot commands.
english
////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript.cpp // // The routines shown here for creating encapsulated postscript // figures were adapted from a modification of PlotRNA by // <NAME>. ////////////////////////////////////////////////////////////////////// #include "EncapsulatedPostScript.hpp" ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::EncapsulatedPostScript() // // Constructor. Write PostScript prolog. ////////////////////////////////////////////////////////////////////// EncapsulatedPostScript::EncapsulatedPostScript(std::ostream &out, double image_width, double image_height, int font_size) : out(out), image_width(image_width), image_height(image_height), font_size(font_size), done(false) { out << "%!PS-Adobe-3.0 EPSF-3.0" << std::endl << "%%BoundingBox: 0 0 " << int(image_width) << " " << int(image_height) << std::endl << "1.0 1.0 scale" << std::endl << "0 0 translate" << std::endl << "/HelveticaBold findfont" << std::endl << font_size << " scalefont" << std::endl << "setfont" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::SetRGBColor() // // Set current color explicitly. ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::SetRGBColor(double r, double g, double b) { Assert(0.0 <= r && r <= 1.0, "Out-of-range."); Assert(0.0 <= g && g <= 1.0, "Out-of-range."); Assert(0.0 <= b && b <= 1.0, "Out-of-range."); out << std::setprecision(3) << std::setiosflags(std::ios::showpoint | std::ios::fixed) << r << " " << g << " " << b << " setrgbcolor" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::SetColorBlack() // // Set current color back to black. ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::SetColorBlack() { out << "0 0 0 setrgbcolor" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::DrawString() // // Write a text string. Adapted from: // // http://www.nipr.ac.jp/~uap-mon/uapm/src.bak/pltSyowaMag_save.c // // Text alignment: // pos_x : x-align 0:left 1:center 2:right // pos_y : y-align 0:bottom 1:center 2:top ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::DrawString(double x, double y, const std::string &s, int pos_x, int pos_y) { if (done) Error("EPS file already closed."); int kx = 0, ky = 0; out << std::setprecision(3) << std::setiosflags(std::ios::showpoint | std::ios::fixed) << x << " " << image_height - y << " moveto" << std::endl << "(" << s << ")" << std::endl; if (pos_x == 1) kx = 2; if (pos_x == 2) kx = 1; if (pos_y == 1) ky = 2; if (pos_y == 2) ky = 1; if (pos_x == 1 || pos_x == 2) { out << "dup stringwidth pop " << kx << " div neg 0 rmoveto" << std::endl; } if( pos_y == 1 || pos_y == 2 ) { out << "gsave" << std::endl << "newpath" << std::endl << "0 0 moveto" << std::endl << "(" << s << ") true charpath flattenpath" << std::endl << "pathbbox /charheight exch def pop pop pop" << std::endl << "closepath" << std::endl << "grestore" << std::endl << "0 charheight " << ky << " div neg rmoveto" << std::endl; } out << "show" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::DrawLine() // // Draw a line from (sx,sy) to (ex,ey) with given width. ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::DrawLine(double sx, double sy, double ex, double ey, double width) { if (done) Error("EPS file already closed."); out << std::setprecision(3) << std::setiosflags(std::ios::showpoint | std::ios::fixed) << width << " setlinewidth" << std::endl << sx << " " << image_height - sy << " moveto " << ex << " " << image_height - ey << " lineto stroke" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::DrawCircle() // // Draw a circle at (x,y) with given radius. ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::DrawCircle(double x, double y, double r) { if (done) Error("EPS file already closed."); out << std::setprecision(3) << std::setiosflags(std::ios::showpoint | std::ios::fixed) << x << " " << image_height - y << " moveto" << std::endl << x << " " << image_height - y << " " << r << " 0 360 arc closepath fill" << std::endl; } ////////////////////////////////////////////////////////////////////// // EncapsulatedPostScript::Close() // // Finish EPS file. ////////////////////////////////////////////////////////////////////// void EncapsulatedPostScript::Close() { if (done) Error("EPS file already closed."); out << "showpage" << std::endl << "%EOF" << std::endl; done = true; }
cpp
/* * Copyright <NAME> 2007 - 2015. * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) */ #include <string> #include <iostream> #include <stdexcept> #include <boost/smart_ptr/shared_ptr.hpp> #include <boost/smart_ptr/make_shared_object.hpp> #include <boost/lexical_cast.hpp> #include <boost/phoenix.hpp> #include <boost/log/core.hpp> #include <boost/log/expressions.hpp> #include <boost/log/attributes/attribute_name.hpp> #include <boost/log/attributes/scoped_attribute.hpp> #include <boost/log/sources/logger.hpp> #include <boost/log/sources/record_ostream.hpp> #include <boost/log/utility/value_ref.hpp> #include <boost/log/utility/formatting_ostream.hpp> #include <boost/log/utility/manipulators/add_value.hpp> #include <boost/log/utility/setup/filter_parser.hpp> #include <boost/log/utility/setup/common_attributes.hpp> #include <boost/log/utility/setup/console.hpp> namespace logging = boost::log; namespace attrs = boost::log::attributes; namespace src = boost::log::sources; namespace expr = boost::log::expressions; namespace sinks = boost::log::sinks; namespace keywords = boost::log::keywords; struct point { float m_x, m_y; point() : m_x(0.0f), m_y(0.0f) {} point(float x, float y) : m_x(x), m_y(y) {} }; bool operator== (point const& left, point const& right); bool operator!= (point const& left, point const& right); template< typename CharT, typename TraitsT > std::basic_ostream< CharT, TraitsT >& operator<< (std::basic_ostream< CharT, TraitsT >& strm, point const& p); template< typename CharT, typename TraitsT > std::basic_istream< CharT, TraitsT >& operator>> (std::basic_istream< CharT, TraitsT >& strm, point& p); const float epsilon = 0.0001f; bool operator== (point const& left, point const& right) { return (left.m_x - epsilon <= right.m_x && left.m_x + epsilon >= right.m_x) && (left.m_y - epsilon <= right.m_y && left.m_y + epsilon >= right.m_y); } bool operator!= (point const& left, point const& right) { return !(left == right); } template< typename CharT, typename TraitsT > std::basic_ostream< CharT, TraitsT >& operator<< (std::basic_ostream< CharT, TraitsT >& strm, point const& p) { if (strm.good()) strm << "(" << p.m_x << ", " << p.m_y << ")"; return strm; } template< typename CharT, typename TraitsT > std::basic_istream< CharT, TraitsT >& operator>> (std::basic_istream< CharT, TraitsT >& strm, point& p) { if (strm.good()) { CharT left_brace = static_cast< CharT >(0), comma = static_cast< CharT >(0), right_brace = static_cast< CharT >(0); strm.setf(std::ios_base::skipws); strm >> left_brace >> p.m_x >> comma >> p.m_y >> right_brace; if (left_brace != '(' || comma != ',' || right_brace != ')') strm.setstate(std::ios_base::failbit); } return strm; } //[ example_extension_filter_parser_rectangle_definition struct rectangle { point m_top_left, m_bottom_right; }; template< typename CharT, typename TraitsT > std::basic_ostream< CharT, TraitsT >& operator<< (std::basic_ostream< CharT, TraitsT >& strm, rectangle const& r); template< typename CharT, typename TraitsT > std::basic_istream< CharT, TraitsT >& operator>> (std::basic_istream< CharT, TraitsT >& strm, rectangle& r); //] template< typename CharT, typename TraitsT > std::basic_ostream< CharT, TraitsT >& operator<< (std::basic_ostream< CharT, TraitsT >& strm, rectangle const& r) { if (strm.good()) strm << "{" << r.m_top_left << " - " << r.m_bottom_right << "}"; return strm; } template< typename CharT, typename TraitsT > std::basic_istream< CharT, TraitsT >& operator>> (std::basic_istream< CharT, TraitsT >& strm, rectangle& r) { if (strm.good()) { CharT left_brace = static_cast< CharT >(0), dash = static_cast< CharT >(0), right_brace = static_cast< CharT >(0); strm.setf(std::ios_base::skipws); strm >> left_brace >> r.m_top_left >> dash >> r.m_bottom_right >> right_brace; if (left_brace != '{' || dash != '-' || right_brace != '}') strm.setstate(std::ios_base::failbit); } return strm; } //[ example_extension_custom_filter_factory_with_custom_rel // The function checks if the point is inside the rectangle bool is_in_rectangle(logging::value_ref< point > const& p, rectangle const& r) { if (p) { return p->m_x >= r.m_top_left.m_x && p->m_x <= r.m_bottom_right.m_x && p->m_y >= r.m_top_left.m_y && p->m_y <= r.m_bottom_right.m_y; } return false; } // Custom point filter factory class point_filter_factory : public logging::filter_factory< char > { public: logging::filter on_exists_test(logging::attribute_name const& name) { return expr::has_attr< point >(name); } logging::filter on_equality_relation(logging::attribute_name const& name, string_type const& arg) { return expr::attr< point >(name) == boost::lexical_cast< point >(arg); } logging::filter on_inequality_relation(logging::attribute_name const& name, string_type const& arg) { return expr::attr< point >(name) != boost::lexical_cast< point >(arg); } logging::filter on_custom_relation(logging::attribute_name const& name, string_type const& rel, string_type const& arg) { if (rel == "is_in_rectangle") { return boost::phoenix::bind(&is_in_rectangle, expr::attr< point >(name), boost::lexical_cast< rectangle >(arg)); } throw std::runtime_error("Unsupported filter relation: " + rel); } }; void init_factories() { //<- logging::register_simple_formatter_factory< point, char >("Coordinates"); //-> logging::register_filter_factory("Coordinates", boost::make_shared< point_filter_factory >()); } //] void init_logging() { init_factories(); logging::add_console_log ( std::clog, keywords::filter = "%Coordinates% is_in_rectangle \"{(0, 0) - (20, 20)}\"", keywords::format = "%TimeStamp% %Coordinates% %Message%" ); logging::add_common_attributes(); } int main(int, char*[]) { init_logging(); src::logger lg; // We have to use scoped attributes in order coordinates to be passed to filters { BOOST_LOG_SCOPED_LOGGER_TAG(lg, "Coordinates", point(10, 10)); BOOST_LOG(lg) << "Hello, world with coordinates (10, 10)!"; } { BOOST_LOG_SCOPED_LOGGER_TAG(lg, "Coordinates", point(50, 50)); BOOST_LOG(lg) << "Hello, world with coordinates (50, 50)!"; // this message will be suppressed by filter } return 0; }
cpp
<reponame>andrewscofield/acf-recaptcha<filename>css/settings-page.css<gh_stars>10-100 .acf-recaptcha-settings-info-box { background-color: #568EE8; display: flex; flex: 0 1 auto; flex-direction: row; flex-wrap: wrap; padding: 1.5rem 0.5rem; margin-bottom: 0.5rem; border-radius: 0.5rem; } .acf-recaptcha-settings-info-box h1, .acf-recaptcha-settings-info-box p { color: #ffffff; } .acf-recaptcha-settings-info-box p.description, .acf-recaptcha-settings-info-box a { color: #d7dde6; } .acf-recaptcha-settings-info-box > * { padding: 0 1rem; flex-basis: 0; } @media only screen and (max-width: 47.995em) { .acf-recaptcha-settings-info-box .message-img { max-width: 100%; flex-basis: 100%; } } .acf-recaptcha-settings-info-box .message-text { flex-grow: 1; } .acf-recaptcha-settings-info-box .message-text h1 { padding-top: 0; }
css
Original with; Punb Vidhan Sabha Digitized by; Panjab Digital Library [ Shri Ram Kishan ] (ii) whether Chak No. 52/2L is included in this list if so, the list of owners of Chak No. 52/2L, Tehsil Okara, who are entitled to get sub-urban area in Punjab (India); Punjab Legislative Assembly (iii) the serial number of squares as recorded in revenue papers of Chak No. 52/2L. referred to above, which have been included in this list of suburban areas for allotment purposes ; (iv) the list of land owners of Chak No. 52/2L, who have been selected for the allotment in sub urban areas in the State ; (v) the distance of each of the squares from Nos. 40 to 51 of Chak No. 52/2L from the Municipal Boundry of Okara ; (c) (i) the list of the villages around Kasur town the owners of which are eligible for allotment of sub-urban area in the Punjab (India); (ii) whether the name of village Khara is included in this ist; if so, the area declared as sub-urban for allotment purposes ; (iii) the list of the land owners of village Khara who will be entitled to sub-urban lands in the State and the sub-urban area to which each of them is entitled? Sardar Ujjal Singh: (a) Sub-urban maps of Okara town have not SO for been received. Only few maps of Kasur town were received from Pakistan Government on 19th May 1952; (b) (i) In the absence of sub-urban maps of Okara Town verification was made on the basis of the instructions contained in R-13. Chapter VII. A list (Annexure "A") giving the required information is given below; (ii) On the verification made in accordance with the instruction R-13, this Chak is declared partially sub-urban. A list (Annexure B) showing the names of the persons who are entitled to sub-urban allotment is given below; (iii) A list (Annexure B) drawn on the basis of instruction R-13, is given below; (iv) A list (Annexure B) is given below; (v) Squares Nos. 41 and 47 are on the basis of verification under instruction R-13, within one mile from the Municipal limits of Okara and the rest are outside one mile distance ;
english
Wasim Jaffer and Hiken Shah hit twin half-centuries to help Mumbai reach 414 for nine at stumps against Uttar Pradesh on the second day of their Ranji Trophy Elite Group A match on Wednesday. Jaffer scored 55 in 104 balls with the help of five fours, while Shah came up with a 114-ball 56 studded with six boundaries to take Mumbai close to the 400-mark at Dr Akhilesh Das Stadium. Murtaza Hussain (11) and Aavishkar Salvi (2) were at the crease at the draw of stumps here. Resuming at 203 for 3, night watchman Dhawal Kulkarni scored 38 and added 100 runs with Jaffer for the fourth wicket before being removed by Imtiyaz Ahmed. Wickets tumbled inbetween but Shah and Hussain ensured they don’t spoil the hard work and stitched 53 runs for the ninth wicket to consolidate Mumbai’s position. (PTI) For UP, Ahmed was the peak of the bowlers with three wickets, while Bhuveneshwar Kumar, Piyush Chawla, Arish Alam took two scalps each.(PTI)
english
<gh_stars>1-10 {"web":[{"value":["运河","纳尔","沟渠"],"key":"canal"},{"value":["运河街车站","坚尼街","坚尼街车站 (IRT百老汇-第七大道线)"],"key":"Canal Street"},{"value":["伊利运河","伊利运河","伊利运河"],"key":"Erie Canal"}],"query":"canal","translation":["运河"],"errorCode":"0","dict":{"url":"yddict://m.youdao.com/dict?le=eng&q=canal"},"webdict":{"url":"http://m.youdao.com/dict?le=eng&q=canal"},"basic":{"us-phonetic":"kə'næl","phonetic":"kə'næl","uk-phonetic":"kə'næl","explains":["n. 运河;[地理] 水道;[建] 管道;灌溉水渠","vt. 在\u2026开凿运河","n. (Canal)人名;(英、法、西、意、葡)卡纳尔;(土)贾纳尔"]},"l":"EN2zh-CHS"}
json
Tessa Blanchard, the former IMPACT! World Champion, has signed with Women of Wrestling (WOW). Blanchard and the current WOW World Champion The Beast are the only two women signed so far. Her participation was announced in a relaunch video released by WOW. After being gone for more than a year, reports of Blanchard returning swirled over the past few days. Two days ago, Sean Ross Sapp of Fightful Select reported that Blanchard will be part of the relaunch of WOW. Shortly after winning the IMPACT World Championship, Blanchard took a leave of absence due to the COVID-19 pandemic. It was reported that Blanchard was released from IMPACT in June 2020 after failing to prove she was self-isolating in Mexico ahead of a title defense. IMPACT Wrestling officials reportedly thought that Blanchard "would no show" her title defense at that year's Slammiversary event. Tessa Blanchard has been rumored to return to wrestling throughout the past year. Blanchard was also rumored to be interested in working with both AEW and WWE. However, it was reported that neither company wanted to work with Blanchard, specifically sighting her backstage issues with other wrestlers. Blanchard became a pioneer for intergender wrestling in 2019 and early 2020. In July 2019, the Diamond Standard faced fellow IMPACT star Sami Callihan at Slammiversary XVII. This was the first intergender match to ever headline a pro-wrestling pay-per-view. While Blanchard lost, she went on to face Callihan again at Hard to Kill 2020, where she became the first woman to win the IMPACT World Championship. Her last appearance on IMPACT came in early March 2020, just at the start of the pandemic. Along with Blanchard, AJ Lee has been announced for Women of Wrestling as an executive producer. The former WWE Divas Champion hasn't been seen in pro-wrestling since retiring following WrestleMania 31. What will Tessa Blanchard do in WOW? Will she be well received by the fans? Tell us your thoughts in the comment section below!
english
/* * Copyright 2015-2017 floragunn GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * SPDX-License-Identifier: Apache-2.0 * * The OpenSearch Contributors require contributions made to * this file be licensed under the Apache-2.0 license or a * compatible open source license. * * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ package org.opensearch.security.securityconf.impl.v7; import java.util.Collections; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; import org.opensearch.security.securityconf.Hideable; import org.opensearch.security.securityconf.StaticDefinable; import org.opensearch.security.securityconf.impl.v6.ActionGroupsV6; public class ActionGroupsV7 implements Hideable, StaticDefinable { private boolean reserved; private boolean hidden; @JsonProperty(value = "static") private boolean _static; private List<String> allowed_actions = Collections.emptyList(); private String type; private String description; public ActionGroupsV7() { super(); } public ActionGroupsV7(String agName, ActionGroupsV6 ag6) { reserved = ag6.isReserved(); hidden = ag6.isHidden(); allowed_actions = ag6.getPermissions(); type = agName.toLowerCase().contains("cluster")?"cluster":"index"; description = "Migrated from v6"; } public ActionGroupsV7(String key, List<String> allowed_actions) { this.allowed_actions = allowed_actions; type = "unknown"; description = "Migrated from v6 (legacy)"; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public boolean isReserved() { return reserved; } public void setReserved(boolean reserved) { this.reserved = reserved; } public boolean isHidden() { return hidden; } public void setHidden(boolean hidden) { this.hidden = hidden; } public List<String> getAllowed_actions() { return allowed_actions; } public void setAllowed_actions(List<String> allowed_actions) { this.allowed_actions = allowed_actions; } @JsonProperty(value = "static") public boolean isStatic() { return _static; } @JsonProperty(value = "static") public void setStatic(boolean _static) { this._static = _static; } @Override public String toString() { return "ActionGroupsV7 [reserved=" + reserved + ", hidden=" + hidden + ", _static=" + _static + ", allowed_actions=" + allowed_actions + ", type=" + type + ", description=" + description + "]"; } }
java
<reponame>singh55preetika/simplifyi-phase1 package com.simplifyi.simplify_phase1.Activities; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import com.simplifyi.simplify_phase1.R; import me.anwarshahriar.calligrapher.Calligrapher; public class LaunchCallActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.launchcall); Calligrapher myfront=new Calligrapher(this); myfront.setFont(this,"roboto.xml",true); } }
java
The Bommai Nayagi was released in theaters on 03 Feb 2023. Movies like Demonte Colony 2, Vidaamuyarchi, Thangalaan and others in a similar vein had the same genre but quite different stories. The soundtracks and background music were composed by Sundaramurthy KS for the movie Bommai Nayagi. The cinematography for Bommai Nayagi was shot by Athisayaraj R. You can watch the Bommai Nayagi movie on Zee5,. On 10 Mar 2023 Bommai Nayagi was released on the Zee5, platform. The movie Bommai Nayagi belonged to the Drama,Musical, genre.
english
.footer { display: flex; justify-content: center; margin-top: 15px; opacity: 0.5; }
css
/* Copyright (C) 2013-2021 TU Dortmund * This file is part of LearnLib, http://www.learnlib.de/. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.learnlib.util.mealy; import java.util.ArrayList; import java.util.Collection; import java.util.List; import de.learnlib.api.oracle.MembershipOracle; import de.learnlib.api.query.Query; import net.automatalib.words.Word; import org.checkerframework.checker.nullness.qual.Nullable; /** * Word-to-Symbol-Oracle adapter. * <p> * Wraps an oracle which uses {@link Word}s as its output to an oracle which only yields the last symbol of each * output. * * @param <I> * input symbol type * @param <O> * output symbol type * * @author <NAME> */ final class SymbolOracleWrapper<I, O> implements MembershipOracle<I, @Nullable O> { private final MembershipOracle<I, Word<O>> wordOracle; /** * Constructor. * * @param wordOracle * the {@link MembershipOracle} returning output words. */ SymbolOracleWrapper(MembershipOracle<I, Word<O>> wordOracle) { this.wordOracle = wordOracle; } @Override public void processQueries(Collection<? extends Query<I, @Nullable O>> queries) { List<LastSymbolQuery<I, O>> lsQueries = new ArrayList<>(queries.size()); for (Query<I, @Nullable O> qry : queries) { lsQueries.add(new LastSymbolQuery<>(qry)); } wordOracle.processQueries(lsQueries); } private static final class LastSymbolQuery<I, O> extends Query<I, Word<O>> { private final Query<I, @Nullable O> originalQuery; LastSymbolQuery(Query<I, @Nullable O> originalQuery) { this.originalQuery = originalQuery; } @Override public void answer(Word<O> output) { if (output == null) { throw new IllegalArgumentException("Query answer words must not be null"); } originalQuery.answer(output.isEmpty() ? null : output.lastSymbol()); } @Override public Word<I> getPrefix() { return originalQuery.getPrefix(); } @Override public Word<I> getSuffix() { return originalQuery.getSuffix(); } @Override public String toString() { return originalQuery.toString(); } } }
java
Chennai, Oct 29: The Madras High Court on Tuesday made strong observations asking whether the government needs a corpse to act on implementing each and every statute, hours after the decomposed body of a two-year-old boy was pulled out from an unused borewell. Rescuers pulled out the decomposed and mangled body of Sujith Wilson early Tuesday from deep inside the borewell, after a futile 80-hour attempt to save the child who had fallen in while playing near his house in a village in Tamil Nadu's Tiruchirappalli district. In oral observations, the court also rapped the media, saying it was not streaming anything constructive in creating public awareness on the implementation of rules and regulation on borewells and tubewells, except giving a live relay on the rescue operations that eventually ended on a tragic note. It further observed that everybody has a social responsibility to bring awareness and it is more on the media in publishing the guidelines issued by the state government based on the Supreme Courts order. RIP Sujith: Borewell was supposed to be closed. . . Who should be held responsible? A division bench of Justices M Sathyanararayanan and N Seshasayee was hearing a petition seeking a direction to authorities for strict implementation of guidelines issued by the Supreme Court and for enforcement of The Tamil Nadu Panchayats (Regulations of Sinking of wells and Safety Measures) Rules 2015. The court impleaded the civic authorities and directed them to produce details on the action taken against violators who had failed to comply with the conditions stipulated for digging borewells and tubewells. The plea was filed by V Ponraj, a scientist who had worked with former president late APJ Abdul Kalam and was the Director, Technology Interface, at the Rashtrapathi Bhavan. The court said the counter-affidavit to be filed by November 21 should have details on the number of permissions granted and the list of contractors and entities who were accorded permission to dig borewells and tubewells and list of unused and abandoned borewells and tubewells as well as the penal action taken against the violators. The court has impleaded principal secretaries to Municipal Administration and Water Supply Department and Rural Development Panchayat Raj as parties. When the plea was taken up, the bench orally observed whether the government needs a "dead body" for implementation of each and every rule. The petitioner, citing the Supreme Court order in 2009 and guidelines issued by the Ministry of Women and Child Development in 2013, alleged that the Tamil Nadu government did not comply with the guidelines which led to a similar tragedy in Tiruvannamalai district in 2012. Immediately after this incident, a plea was filed before the High Court and on the basis of the court direction, a special Act to prevent such incidents was enacted and the Tamil Nadu Panchayats ( Regulations of Sinking of wells and Safety Measures) Rules 2015 came into force, the petitioner said. This was also not complied with by authorities this time around, he said. The petitioner, referring to the death of Sujith Wilson, said the main cause of the death was failure in the implementation of both the apex court and the High Court orders and non-compliance of the guidelines. The petitioner, pointing out to the rescue operations and the methods adopted, said "The methods adopted by government authorities one after another clearly shows that the government is not in a position to ascertain the ground reality and not prepared or equipped with the rescue operations. This is not the first incident, there were many that had taken place in Tamil Nadu, the petitioner submitted. The petitioner claimed the government did not have any solution or any technology when a disaster occurs and if rescue services were not equipped with latest technologies, the life and liberty of the people of Tamil Nadu would be at peril. Sujith had fallen into the disused farm borewell while playing near his house in Nadukattupatti. Initially, he was stuck at a depth of about 30 feet but subsequently slipped further down, and the body was finally pulled out from a depth of 88 feet. After post-mortem, the body was handed over to his parents who buried him in a heart-wrenching funeral conducted by the local parish priest at a nearby graveyard.
english
Mumbai: Ahead of the start of the much-awaited season of the Indian Premier League (IPL), Royal Challengers Bangalore (RCB) batter Virat Kohli is all excited for the cash-rich league to commence this weekend. IPL 2022 will begin with the Chennai Super Kings (CSK) taking on the Kolkata Knight Riders (KKR) at the Wankhede Stadium on Saturday. And RCB will lock horns with Punjab Kings in their first match of the tournament on Sunday. Kohli cannot wait to set the ball rolling as he gears up for the match with “buzz and excitement”. “Heading into matchday weekend. All the buzz and excitement of IPL (are) in the air. Love this anticipation and butterflies in the stomach before the tournament starts,” Kohli shared pictures from his training session on the KOO app. Kohli, free from the responsibilities as the captain, is focused on helping RCB lift the IPL trophy this season. “So, my focus is so clear now, it’s so precise what I want to do. I want to just have a lot of fun and enjoy myself on the field and give myself fully to this team, to this franchise like I have for so many years, without any load. So, I’m absolutely ready,” he added. Coming back to IPL, the games will be played across stadiums in Mumbai, Navi Mumbai, and Pune with an audience occupancy rate of 25 percent as per Covid-19 protocols. In all, 20 matches each will be held at Wankhede Stadium & DY Patil Stadium, 15 matches each at Brabourne and MCA International Stadium, Pune.
english
<filename>erlang/hello-world/.exercism/metadata.json {"track":"erlang","exercise":"hello-world","id":"043ab3c19e7c4134a76cc3f7ece7623d","url":"https://exercism.io/my/solutions/043ab3c19e7c4134a76cc3f7ece7623d","handle":"herminiotorres","is_requester":true,"auto_approve":true}
json
package com.github.wkennedy.pubsubly.config; import com.github.wkennedy.pubsubly.api.Processor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import java.util.HashMap; import java.util.List; import java.util.Map; @Configuration public class PluginConfig { @Bean public Map<String, Processor> processorMap(@Autowired List<Processor> processors) { HashMap<String, Processor> processorMap = new HashMap<>(); for (Processor processor : processors) { processorMap.put(processor.getName(), processor); } return processorMap; } }
java
<filename>scripts/framework-applications/export-framework-applicant-details.py #!/usr/bin/env python """Export supplier "about you" information for suppliers who applied to a framework. This report includes registered company information and contact details. Usage: scripts/framework-applications/export-framework-applicant-details.py <stage> <framework_slug> <output_dir> Options: --verbose Show debug log messages -h, --help Show this screen Example: scripts/framework-applications/export-framework-applicant-details.py dev g-cloud-12 SCRIPT_OUTPUTS """ import datetime import errno from multiprocessing.pool import ThreadPool import os import sys sys.path.insert(0, '.') from docopt import docopt from dmscripts.helpers.auth_helpers import get_auth_token from dmscripts.helpers.logging_helpers import configure_logger, get_logger from dmscripts.helpers.logging_helpers import INFO as loglevel_INFO, DEBUG as loglevel_DEBUG from dmscripts.export_framework_applicant_details import export_supplier_details from dmapiclient import DataAPIClient from dmutils.env_helpers import get_api_endpoint_from_stage if __name__ == '__main__': arguments = docopt(__doc__) STAGE = arguments['<stage>'] FRAMEWORK = arguments['<framework_slug>'] OUTPUT_DIR = arguments['<output_dir>'] configure_logger({"script": loglevel_DEBUG if arguments["--verbose"] else loglevel_INFO}) logger = get_logger() client = DataAPIClient(get_api_endpoint_from_stage(STAGE), get_auth_token('api', STAGE)) now = datetime.datetime.now() filename = FRAMEWORK + "-supplier-about-you-data-" + now.strftime("%Y-%m-%d_%H.%M-") + STAGE + ".csv" filepath = OUTPUT_DIR + os.sep + filename # Create output directory if it doesn't already exist if not os.path.exists(os.path.dirname(filepath)): try: os.makedirs(os.path.dirname(filepath)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise framework_lot_slugs = tuple([lot['slug'] for lot in client.get_framework(FRAMEWORK)['frameworks']['lots']]) pool = ThreadPool(3) export_supplier_details( client, FRAMEWORK, filepath, framework_lot_slugs=framework_lot_slugs, map_impl=pool.imap, logger=logger )
python
If you think watching porn is something which is strictly an individual choice and has nothing to do with your business, hold on. Researchers now reveal that employees who view pornography at work aren't only costing companies millions of rupees in wasted time but also causing harm to the organisations. Since unethical employee behaviour is linked to a number of negative organisational outcomes like fraud and collusion employee pornography consumption is putting organisations at risk. "Pornography is often framed as an issue affecting only individuals and relationships outside of a business context," said study co-author Melissa Lewis-Western, a professor of accountancy at the Utah-based Brigham Young University (BYU). But businesses are made up of people, and people make decisions, and businesses function off the decisions people make. "If you have a societal phenomenon that a lot of people are participating in and it negatively impacts individuals' decisions, it has the potential to impact organisational-level outcomes," she added in a paper published in the Journal of Business Ethics. The study included an experiment with 200 participants and a nationally-representative survey of 1,000 other individuals. One group was tasked with recalling and recording their last experience viewing pornography. The researchers chose not to expose participants directly to pornography due to ethical concerns and concerns of selection and demand effects. Meanwhile, members of the control group were asked to recall and record their most recent experience exercising. Both groups were then employed to watch the entirety of a boring 10-minute video consisting of a blue background with a monotone voice speaking with subtitles. Researchers found 21 per cent of those who had recalled their last experience viewing porn did not finish viewing the video, but lied about it. Only 8 per cent of those in the control group did not finish the video and lied about it. This represented a statistically significant 163 per cent increase in shirking work and lying for those who view pornography. Similar evidence was obtained from the survey. The experiment also found that the rise in unethical behaviour is caused by an increased propensity to dehumanize others; pornography consumption increases the viewer's propensity to view others as objects or less than human. Since porn consumption causes dehumanization, the incidence of sexual harassment or hostile work environments is likely to increase with increases in employee pornography consumption. "Organisations should be mindful of those risks," said former BYU graduate student Nathan Mecham, now a PhD student at the University of Pittsburgh. If you have a larger portion of your employees that are consuming pornography at work, it's likely changing their behaviours and those changes are likely negative. Companies should have preventative controls such as Internet filters and blocking devices, policies that prohibit porn consumption at work with penalties and hire employees who are less likely to view pornography than others.
english
use std::fs::File; use std::io::BufReader; use serde::{Deserialize, Serialize}; use serde_json::from_reader as read_json; use crate::lib::constant::conf::*; #[derive(Serialize, Deserialize, Debug)] pub struct Conf { #[serde(default)] pub back_folder: String, #[serde(default)] pub back_path: String, #[serde(default)] pub front_path: String, #[serde(default)] pub output_path: String, #[serde(default)] pub template_path: String, #[serde(default)] pub img_max_length: u32, } impl Default for Conf { fn default() -> Self { Conf { back_folder: DEFAULT_BACK_FOLDER.into(), back_path: DEFAULT_BACK_PATH.into(), front_path: DEFAULT_FRONT_PATH.into(), output_path: DEFAULT_OUTPUT_PATH.into(), template_path: DEFAULT_TEMPLATE_PATH.into(), img_max_length: DEFAULT_IMG_MAX_LENGTH, } } } impl Conf { pub fn load(path: &str) -> Self { let mut config = Self::default(); if let Ok(file) = File::open(path) { if let Ok(json) = read_json(BufReader::new(file)) { config.fill_from(json); } } config } fn fill_from(&mut self, other: Self) { if !other.back_folder.is_empty() { self.back_folder = other.back_folder; } if !other.back_path.is_empty() { self.back_path = other.back_path; } if !other.front_path.is_empty() { self.front_path = other.front_path; } if !other.output_path.is_empty() { self.output_path = other.output_path; } if !other.template_path.is_empty() { self.template_path = other.template_path; } if other.img_max_length > 0 { self.img_max_length = other.img_max_length; } } }
rust
import * as React from "react"; import { JsonLd } from "../src/index"; import { Person } from "schema-dts"; const T1: React.FunctionComponent = () => ( <JsonLd<Person> item={{ "@context": "https://schema.org", "@type": "Person", }} /> ); const T2: React.FunctionComponent = () => ( <JsonLd<Person> item={{ "@context": "https://schema.org", "@type": "Person", }} space={2} /> ); const T3 = () => ( <JsonLd<Person> item={{ "@context": "https://schema.org", // @ts-expect-error "@type": "Organization", }} /> );
typescript
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import re import attr import logging import collections as coll import typing as ty from Bio.Seq import Seq import rnacentral_pipeline.databases.helpers.embl as embl from rnacentral_pipeline.databases.helpers.phylogeny import UnknownTaxonId import rnacentral_pipeline.databases.helpers.publications as pubs from rnacentral_pipeline import ribovore from rnacentral_pipeline.databases.data import Entry from rnacentral_pipeline.databases.data import IdReference LOGGER = logging.getLogger(__name__) ONTOLOGIES = set( [ "ECO", "SO", "GO", ] ) MAY_SKIP = set( [ 1169740, 1202446, 1325974, 408170, 410658, 506600, 77133, 939928, ] ) KNOWN_DBS = set( [ "srpdb", "mirbase", "tmrna-website", "snopydb", "plncdb", "wormbase", "tair", "sgd", "rgd", "mgi", "pombase", "dictybase", "flybase", "silva-ssu", "silva-lsu", "lncrnadb", "gtrnadb", ] ) def source_qualifier_value(record, qualifier, pattern=r"^(.+)$", **kwargs): source = embl.source_feature(record) return embl.qualifier_value(source, qualifier, pattern, **kwargs) def chromosome(record): try: return source_qualifier_value(record, "chromosome") except ValueError: source = embl.source_feature(record) chromosomes = source.qualifiers["chromosome"] return chromosomes[0] def primary_id(_): return "" def sequence(record): return str(record.seq) def extract_experiment_refs(feature, known): experiment = embl.experiment(feature) if not experiment: return [] match = re.search(r"PMID:([\d, ]+)", experiment) if not match: return [] found = [] pmids = match.group(1).split(",") for pmid in pmids: pmid = pmid.strip() if not pmid: continue data = pubs.reference(int(pmid)) if data in known: continue found.append(data) return found def references(record, feature): refs = embl.references(record) known = {ref for ref in refs if isinstance(ref, IdReference)} experiment_refs = extract_experiment_refs(feature, known) refs.extend(experiment_refs) return refs def rna_type(feature): if feature.type == "ncRNA": return embl.qualifier_value(feature, "ncRNA_class", r"^(.+)$") if feature.type == "misc_RNA": prod = product(feature) or "" if prod.startswith("gene:"): gene = prod.split(":")[1].split(".")[0] if gene in {"rRNA", "snoRNA", "tRNA", "snRNA"}: return gene return "misc_RNA" if feature.type == "rRNA": found = product(feature) or "" if "tRNA" in found: return "tRNA" return feature.type def mol_type(record): return source_qualifier_value(record, "mol_type") def product(feature): return embl.qualifier_string(feature, "product", separator="; ") def note_data(feature): data = {} text = [] onts = set() for note in feature.qualifiers.get("note", []): for ontology in ONTOLOGIES: if note.startswith(ontology + ":"): onts.add(note) break else: text.append(note) if text: data["text"] = text if onts: data["ontology"] = sorted(onts) return data def url(record): """ Gets the standard url for this record. """ return "https://www.ebi.ac.uk/ena/data/view/Non-coding:%s" % accession(record) def exons(record, feature): data = [] chrom = chromosome(record) if not chrom: return embl.exons(feature) for exon in embl.exons(feature): data.append(attr.evolve(exon, chromosome=chrom)) return data def accession(record): """ Uses the record id as the accession for a feature. """ return record.id def is_composite(_): """ Always returns 'N' """ return "N" def function(feature): value = embl.qualifier_string(feature, "function") if not value: return None value = re.sub(r" :", ":", value) value = re.sub(r"\s+", " ", value) return value def allele(record): return source_qualifier_value(record, "allele") def anticodon(record, feature): raw_anti = embl.qualifier_string(feature, "anticodon") if raw_anti: match = re.search("seq:([ACGUT]{3})", raw_anti) if match: return match.group(1).upper() gene = embl.gene(feature) if gene: match = re.search(r"tRNA-\w+ \(([ACGU]{3})\)$", gene) if match: return match.group(1) match = re.search(r"tRNA-\w{3}[-_]([ACGUT]{3})", gene) if match: return match.group(1) note = " ".join(note_data(feature).get("text", [])) if note: match = re.search(r"codon recognized:(\s*[ACGUT]{3}\s*)", note) if match: raw = match.group(1).strip() try: return str(Seq(raw).reverse_complement()) except Exception as err: LOGGER.warn("Error getting reverse_complement") LOGGER.exception(err) return raw_anti return raw_anti def keywords(record): keys = [k for k in record.annotations["keywords"] if k] if not keys: return None return "; ".join(keys) def parent_accession(record): return record.id.split(".", 1)[0] def organelle(record): values = source_qualifier_value(record, "organelle", max_allowed=None) if not values: return None if len(values) == 1: return values.pop() if len(values) == 2: # Seems strange but there are cases where the value is # ['plastid', 'plastid:chloroplast'] and in that case we want # 'plastid:chloroplast' as that is more specific. first, second = sorted(values, key=len) if second.startswith(first): return second return " ".join(sorted(values)) def operon(feature): return embl.qualifier_string(feature, "operon") def is_pseudogene(feature): return "pseudogene" in feature.qualifiers or "pseudo" in feature.qualifiers def gene_synonyms(feature): result = [] synonyms = feature.qualifiers.get("gene_synonym", []) for synonym in synonyms: result.extend(synonym.split("; ")) return result def taxid(record): try: return embl.taxid(record) except UnknownTaxonId: return 32644 # Unclassified sequence def organism(record): return record.annotations.get("organism", None) def species(record): # try: # return embl.species(record) # except UnknownTaxonId: org = organism(record) # Strip out the common name if present if re.search(r"\([^()]+\)\s*$", org): return re.sub(r"\s*\(.+$", "", org) # Add a closing quote if needed match = re.search(r"([\"'])\w+$", org) if match: org += match.group(1) return org def common_name(record): # try: # return embl.common_name(record) # except UnknownTaxonId: org = organism(record) if org: match = re.search(r"\(([^()]+)\)$", org) if match: return match.group(1) return None def lineage(record) -> ty.Optional[str]: # try: # return embl.lineage(record) # except UnknownTaxonId: taxonomy = record.annotations.get("taxonomy", []) if taxonomy: taxonomy.append(species(record)) return "; ".join(taxonomy) return None def description(record) -> str: raw = embl.description(record) if "|" in raw: first = raw.split("|")[0].replace("gene:", "") return first.split("(")[0] return re.sub(r"^TPA:\s*", "", raw) def comment_xrefs(comments): xrefs = coll.defaultdict(list) for line in comments: match = re.match(r"^\s*(.+?)\s*;\s*(.+?)\s*\.?$", line) if match: db_name = match.group(1).lower() if db_name not in KNOWN_DBS: continue rest = match.group(2) if ";" in rest: xrefs[db_name].extend(re.split(r"\s*;\s*", rest)) else: xrefs[db_name].append(rest) return xrefs def xref_data(record, feature, refs): xrefs = {} xrefs.update(embl.xref_data(feature)) comment = record.annotations.get("comment", "") if comment: xrefs.update(comment_xrefs(comment.split("\n"))) ena_refs = {} for ref in refs: if ref.database != "MD5": ena_refs[ref.database.upper()] = (ref.primary_id, ref.secondary_id) if ena_refs: xrefs["ena_refs"] = ena_refs return xrefs def is_protein(feature) -> bool: if product(feature) == "uncharacterized protein": return True return False def is_skippable_sequence( entry: Entry, status: ty.Optional[ribovore.RibovoreResult] ) -> bool: if entry.rna_type != "SO:0000252" or "metagenome" not in entry.lineage: return False if not status or status.status == "FAIL": return True model_coverage = status.model_coverage if model_coverage is None: return False return model_coverage <= 0.90 def as_entry(ctx, record, feature) -> Entry: prod = product(feature) if prod: prod = prod[0:500] record_refs = ctx.dr[record.id] return Entry( primary_id=primary_id(feature), accession=accession(record), ncbi_tax_id=taxid(record), database="ENA", sequence=sequence(record), regions=[], rna_type=rna_type(feature), url=url(record), seq_version=embl.seq_version(record), note_data=note_data(feature), xref_data=xref_data(record, feature, record_refs), chromosome=chromosome(record), species=species(record), common_name=common_name(record), lineage=lineage(record), gene=embl.gene(feature), locus_tag=embl.locus_tag(feature), product=prod, parent_accession=parent_accession(record), project=embl.project(record), keywords=keywords(record), organelle=organelle(record), anticodon=anticodon(record, feature), experiment=embl.experiment(feature), function=function(feature), inference=embl.inference(feature), old_locus_tag=embl.old_locus_tag(feature), operon=operon(feature), standard_name=embl.standard_name(feature), description=description(record), mol_type=mol_type(record), is_composite=is_composite(feature), gene_synonyms=gene_synonyms(feature), references=references(record, feature), )
python
import configureMockStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import * as types from '../actionTypes'; import * as actions from '../profile'; import { getProfile, patchProfile } from '../../services'; jest.mock('../../services', () => ({ getProfile: jest.fn(), patchProfile: jest.fn() })); const middlewares = [thunk]; const mockStore = configureMockStore(middlewares); describe('profile fetch actions', () => { test('action:start', () => { const expectedAction = { type: types.PROFILE_FETCH_START }; expect(actions.profileFetchStart()).toEqual(expectedAction); }); test('action:success', () => { const mockProfile = { id: 1, first_name: 'first_name' }; const expectedAction = { type: types.PROFILE_FETCH_SUCCESS, profile: mockProfile }; expect(actions.profileFetchSuccess(mockProfile)).toEqual(expectedAction); }); test('action:fail', () => { const error = { data: { message: 'Error message: Profile fetch Fail!!!' } }; const expectedAction = { type: types.PROFILE_FETCH_FAIL, error: error }; expect(actions.profileFetchFail(error)).toEqual(expectedAction); }); test('action:fetchProfile', () => { const mockProfile = { id: 1, first_name: 'first_name' }; const expectedActions = [ { type: types.PROFILE_FETCH_START }, { type: types.PROFILE_FETCH_SUCCESS, profile: mockProfile } ]; getProfile.mockReturnValueOnce(Promise.resolve({ ...mockProfile })); const store = mockStore({}); return store.dispatch(actions.fetchProfile()) .then(() => { expect(store.getActions()).toEqual(expectedActions); expect(getProfile).toHaveBeenCalledTimes(1); }); }); }); describe('profile update actions', () => { test('action:start', () => { const expectedAction = { type: types.PROFILE_PUT_START }; expect(actions.profilePutStart()).toEqual(expectedAction); }); test('action:success', () => { const mockProfile = { id: 1, first_name: 'first_name' }; const expectedAction = { type: types.PROFILE_PUT_SUCCESS, profile: mockProfile }; expect(actions.profilePutSuccess(mockProfile)).toEqual(expectedAction); }); test('action:fail', () => { const error = { data: { message: 'Error message: Profile fetch Fail!!!' } }; const expectedAction = { type: types.PROFILE_PUT_FAIL, error: error }; expect(actions.profilePutFail(error)).toEqual(expectedAction); }); test('action:patchProfile', () => { const mockProfile = { id: 1, first_name: 'first_name' }; const expectedActions = [ { type: types.PROFILE_PUT_START }, { type: types.PROFILE_PUT_SUCCESS, profile: mockProfile } ]; patchProfile.mockReturnValueOnce(Promise.resolve({ ...mockProfile })); const store = mockStore({}); return store.dispatch(actions.updateProfile(mockProfile)) .then(() => { expect(store.getActions()).toEqual(expectedActions); expect(patchProfile).toHaveBeenCalledTimes(1); expect(patchProfile).toHaveBeenCalledWith(mockProfile); }); }); });
javascript
<gh_stars>10-100 { "directions": [ "Beat cream cheese, egg yolk, and 1/4 cup sugar together in a bowl until smooth. Pour melted butter into a separate shallow bowl. Mix remaining 3/4 cup sugar and cinnamon together in a third shallow bowl.", "Spread even amounts of the cream cheese mixture onto one side of each bread slice. Roll the bread into a cylinder around the cream cheese filling. Brush melted butter around the outside of each cylinder and roll the cylinders in the cinnamon-sugar mixture to coat.", "Arrange the rolls in a dish, cover with plastic wrap, and freeze for at least 2 hours.", "Preheat oven to 400 degrees F (200 degrees C).", "Transfer frozen rolls to a baking dish and bake in preheated oven until hot in the center, 10 to 12 minutes." ], "ingredients": [ "1 (8 ounce) package cream cheese, softened", "1 egg yolk", "1 cup white sugar, divided", "24 slices white bread, crusts removed", "1 tablespoon ground cinnamon", "3 tablespoons butter, melted" ], "language": "en-US", "source": "allrecipes.com", "tags": [], "title": "French Toast Roll-Ups", "url": "http://allrecipes.com/recipe/236210/french-toast-roll-ups/" }
json
Hyderabad: Veteran Congress leader S Jaipal Reddy's last rites will be performed with full state honours. Telangana Chief Minister K Chandrashekhar Rao directed Chief Secretary SK Joshi to make necessary arrangements to this effect. Reddy breathed his last in Hyderabad past midnight on Sunday. He was undergoing treatment for pneumonia at a private hospital in Hyderabad. He was 77 and is survived by his wife, two sons, and a daughter. The Congress leader will be laid to rest at Necklace Road in Hyderabad on Monday. The funeral procession will begin from his residence in Jubilee Hills on Monday and the mortal remains will be kept in Gandhi Bhavan, the Congress headquarters, for public viewing.
english
import { Component, Input } from '@angular/core'; import { FormControl, FormGroup, Validators } from '@angular/forms'; import { Router } from '@angular/router'; import { NbToastrService } from '@nebular/theme'; import { TranslateService } from '@ngx-translate/core'; import { Questionare, QuestionareService, } from '../../data/questionare.service'; import { UserService } from '../../data/user.service'; @Component({ selector: 'ngx-questionare', templateUrl: './questionare.component.html', styleUrls: ['./questionare.component.scss'], }) export class QuestionareComponent { @Input() skipEnabled = true; form = new FormGroup({ typ: new FormControl(null, [Validators.required]), online: new FormControl(null, [Validators.required]), apps: new FormControl(null, [Validators.required]), experience: new FormControl(null, [Validators.required]), }); private currentDoc: Questionare; constructor( private readonly questionareService: QuestionareService, private readonly userService: UserService, private readonly router: Router, private readonly toast: NbToastrService, private readonly translate: TranslateService, ) {} async ngOnInit() { this.currentDoc = ( await this.questionareService.getData( await this.questionareService .getCollection() .where('user', '==', await this.createCurrentUserRef()) .get(), ) )[0]; this.form.get('typ').setValue(this.currentDoc?.typ); this.form.get('online').setValue(this.currentDoc?.online); this.form.get('apps').setValue(this.currentDoc?.apps); this.form.get('experience').setValue(this.currentDoc?.experience); } private async createCurrentUserRef() { return this.userService.createRef((await this.userService.currentUser).id); } async submit() { const documentData = { id: this.currentDoc?.id, typ: this.form.get('typ').value, online: this.form.get('online').value, apps: this.form.get('apps').value, experience: this.form.get('experience').value, user: await this.createCurrentUserRef(), }; const snapshot = await this.questionareService.upsert(documentData); if (snapshot) { this.currentDoc = { id: snapshot.id, ...documentData, }; } this.toast.success( await this.translate.get('questionare.toast.success.message').toPromise(), await this.translate.get('questionare.toast.success.title').toPromise(), ); setTimeout(() => { this.router.navigate(['pages']); }, 500); } skip() { this.router.navigate(['pages']); } }
typescript
<gh_stars>1-10 {"word":"lacturamic","definition":"Pertaining to, or designating, an organic amido acid, which is regarded as a derivative of lactic acid and urea."}
json
""" Name : __init__.py boxes module This import path is important to allow importing correctly as package """ import os, sys sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
python
Microsoft Copilot has a new tool that allows you to create an AI song using a single line of text. The tool is an integration with Suno AI, an app made by "musicians and artificial intelligence experts" that's geared around simple AI music generation. The tool can be used by signing in to Copilot using your Microsoft account, then enabling the Suno plugin. As with all artificial intelligence used for creative purposes, it's worth noting that becoming a musician using a single text prompt might not be as straightforward as it sounds. According to Suno's FAQs, copyright of the generated music belongs to Suno if users are using the app's free version, but belongs to the user if they're subscribed to Pro or Premier. Even then, though, it's not clear on Suno's site exactly how its AI creates music, or what music it's been trained using. Mashable has reached out to Suno for clarification, but one of their FAQs makes it clear that ownership is, at best, a slightly grey area. "The availability and scope of copyright protection for content generated (in whole or in part) using artificial intelligence is a complex and dynamic area of law, which is rapidly evolving and varies among countries," reads the FAQ. "We encourage you to consult a qualified attorney to advise you about the latest development and the degree of copyright protection available for the output you generate using Suno." The issue of AI and copyright has been in the spotlight this year — and not just in the music industry. A few months ago a number of high profile authors banded together to sue OpenAI, the owner of ChatGPT, claiming that the company had infringed their copyright by using their works to train its AI.
english
body,h1,ul,li { margin: 0; padding: 0; } body { font-family: Meiryo,"メイリオ","MS Pゴシック"; } header { margin: 0; padding: 0; background-color: #BF0000; } header h1 a { margin: 0; padding: 10px; font-size: 32px; font-weight: bold; font-family: cursive; color: #FFFFFF; text-shadow: 1px 1px 1px #000000; text-decoration: none; } div.search-form { margin: 10px 0; text-align: center; } div.search-form input.keyword { font-size: 24px; width: 500px; } div.search-form input.search-button { font-size: 24px; } #itemarea { max-width:980px; margin: 0 auto; clear: both; } #itemlist { margin: 0 auto; display: block; width: 840px; padding: 0; } .item { display: block; width: 250px; float: left; margin: 9px; padding: 5px; background-color: #FFEEFF; height: 300px; overflow: hidden; border: 1px solid #CCCCCC; border-radius: 5px; -webkit-border-radius: 5px; -moz-border-radius: 5px; box-shadow: 5px 5px 5px #DDD; } .item ul { padding: 0; } .item ul li { display: block; margin: 3px; padding: 0; } .item ul li.description { overflow: auto; font-size: 10px; max-height: 100px; } .notice { text-align: center; font-weight: bold; background-color: #BF0000; color: #FFFFFF; border-radius: 10px; padding: 5px; width: 300px; margin: 10px auto; } .pager { clear: both; } .pager ul { text-align: center; } .pager ul li { display: inline-block; padding-right: 1em; } button.button { /* 文字サイズを1.4emに指定 */ font-size: 1.4em; /* 文字の太さをboldに指定 */ font-weight: bold; /* 縦方向に10px、 * 横方向に30pxの余白を指定 */ padding: 10px 30px; /* 文字色を白色に指定 */ color: #fff; /* ボーダーをなくす */ border-style: none; /* ボタンの影の指定 * 影の横幅を2px * 縦長を2px * ぼかしを3px * 広がりを1px * 色を#666(グレー)に指定 */ box-shadow: 2px 2px 3px 1px #666; -moz-box-shadow: 2px 2px 3px 1px #666; -webkit-box-shadow: 2px 2px 3px 1px #666; /* テキストの影の指定 * 影の横幅を1px * 縦長を1px * ぼかしを2px * 色を#000(黒)に指定 */ text-shadow: 1px 1px 2px #000; /* グラデーションの指定 */ background: -moz-linear-gradient(bottom, #36d, #248 50%, #36d); background: -webkit-gradient(linear, left bottom, left top, from(#36d), color-stop(0.5, #248), to(#36d)); /* 角丸の指定 */ -moz-border-radius: 5px; -webkit-border-radius: 5px; border-radius: 5px; } button.button:hover { /* 透明度を20%に指定 */ opacity: 0.8; } .text{ text-align:center; } /* navbar */ .navbar { padding-top: 8px; background-color: red; } .navbar-header img { margin-top: 5px; height: 40px; } /* cover */ .cover { margin-top: -20px; margin-bottom: 20px; width: 100%; height: 300px; background: url("/images/cover-bg.jpg") center center no-repeat; background-size: cover; } .cover .cover-inner { height: 100%; margin: auto; display: table; } .cover .cover-inner .cover-contents { display: table-cell; vertical-align: middle; text-align: center; position: relative; } .cover .cover-inner .cover-contents h1 { margin: 0 0 20px 0; color: #fff; font-weight: bold; letter-spacing: 0.15em; } .cover .btn-success { background-color: #ed486f; border-color: #ed486f; } /* search */ .search { margin-bottom: 20px; } /* item */ .item .panel-body { height: 70px; } .item .panel-heading { height: 250px; } .item .panel-heading img { height: 100%; max-width: 100%; } p.item-title { /* 文の行数を2行に保ち、最後に ... を追加するスタイル */ display: -webkit-box; -webkit-box-orient: vertical; -webkit-line-clamp: 2; overflow: hidden; } /* item */ .item .panel-body { height: 120px; } .item .buttons form { display: inline-block; margin: 0 5px; } /* users */ .user-profile .name { margin-bottom: 20px; } .user-profile .name h1 { text-shadow: 1px 1px 1px #fff; font-weight: bold; } .user-profile .status ul { margin: 0 auto 10px; padding: 0; display: table; } .user-profile .status ul li { width: 120px; padding: 5px 0; border-right: 1px solid #ddd; display: table-cell; vertical-align: middle; text-align: center; color: #777; } .user-profile .status ul li:last-child { border: none; } .user-profile .status ul li .status-label { font-size: 12px; } .user-profile .status ul li .status-value { font-size: 18px; }
css
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.breadcrumbItemStyles = void 0; var tslib_1 = require("tslib"); exports.breadcrumbItemStyles = { root: function (_a) { var p = _a.props, v = _a.variables; return (tslib_1.__assign(tslib_1.__assign(tslib_1.__assign(tslib_1.__assign(tslib_1.__assign(tslib_1.__assign({ display: 'inline-flex', alignItems: 'center', verticalAlign: 'middle' }, (p.active && { fontWeight: v.itemCurrentFontWeight, })), (p.disabled && { color: v.disabledColor, })), (p.size === 'smaller' && { paddingLeft: v.linkPaddingLeftSmaller, paddingRight: v.linkPaddingRightSmaller, gap: v.linkSmallerGap, })), (p.size === 'small' && { paddingLeft: v.linkPaddingLeftSmall, paddingRight: v.linkPaddingRightSmall, gap: v.linkSmallGap, })), (p.size === 'medium' && { paddingLeft: v.linkPaddingLeftMedium, paddingRight: v.linkPaddingRightMedium, gap: v.linkMediumGap, })), (p.size === 'large' && { paddingLeft: v.linkPaddingLeftLarge, paddingRight: v.linkPaddingRightLarge, gap: v.linkLargeGap, }))); }, };
javascript
No doubt mathematics is more challenging than you think it is! And when it comes to Calculus, it is a mathematical subject that many students mostly hate. Here, you will know the reasons to study calculus. The reason is the complicated techniques and elongated formulas you must remember verbally. If you do not do that, you will likely hit yourself in calculus. And who would like that even if the examination is about to come? In such circumstances, you must give a try to the integral calculator. This tool will certainly quench your thirst for integration calculations. This is because integration is the heart of calculus. And once you control your heartbeat, you will certainly be able to understand and solve calculus problems. Let us delve a little bit further and discuss the importance of calculus! Calculus is often used in several STEM domains, including engineering, autonomous driving, building, and robots. It’s crucial to examine both the changes being made and their effects. As you will see later, everyday events and interactions may be examined and portrayed using a calculus lens. Calculus has many applications, and motion is crucial to robotics. Robots can compute their location, speed, and acceleration using calculus. And to introduce precision in the computations, the Integral Calculator provides substantial assistance. This is particularly crucial in autonomous cars for tasks like obstacle avoidance. Calculus develops your ability to think conceptually by dealing with invisible ideas to the human eye or touch. You may benefit from having this crucial talent in all facets of your life. Thinking conceptual has several advantages, including thinking more creatively and seeing things from a greater perspective. It is one of the reasons to Study Calculus. Additionally, you will be much better at solving problems in math and other courses because of your ability to think critically, which is built on your capacity for abstract thought. And if you involve an integral calculator and other such calculus tools in your computations, you will get a firm grip on many calculus queries and techniques. The critical thinking abilities you build when mastering a difficult topic like calculus are useful even if you need to look at the arithmetic involved. It’s vital to acquire these critical abilities and to exercise your brain. Adding more, trying the integration calculator will also aid you more here. You will learn how different calculus problems are solved by this tool. Later, you can learn the techniques and add them to your manual practice. For instance, knowing the several equations and approaches used in calculus to solve problems is crucial. However, these abilities are developed through comprehending the principles underlying these techniques and the reasons you could use them to solve a differential equation in one case and the tangent line of an equation in another. You may start to push your abilities to the maximum and improve as a student by challenging yourself to comprehend some of these concepts rather than merely remembering formulae. Calculus may help you build useful analytical abilities that you can use in various real-world situations. These abilities will also improve as you advance in your study of mathematics and related fields. And one way to do so is by using the integral calculator that helps you to calculate integral free of cost. It is one of the reasons to study calculus. These abilities allow you to dissect complicated issues into their parts, locate the source of the issue, and develop workable solutions. Calculus is a subject that is often taught at colleges. Therefore only some high school students get the chance to study it. Calculus will be used more frequently in college if you continue studying. You should become familiar with it to achieve it without stress and also with the reasons to study calculus. This complicated subject may appear nearly impossible to study in college due to the quicker rate at which material is given. But using an integral calculator will make everything possible with the fast and accurate calculations it yields. It will help you understand how to solve many calculus questions in less time. You’ll be better prepared for this new learning environment if you have a basic understanding of calculus.
english
package org.mockserver.serialization.model; import org.mockserver.model.ParameterBody; import org.mockserver.model.Parameters; import java.util.Objects; /** * @author jamesdbloom */ public class ParameterBodyDTO extends BodyDTO { private final Parameters parameters; public ParameterBodyDTO(ParameterBody parameterBody) { this(parameterBody, null); } public ParameterBodyDTO(ParameterBody parameterBody, Boolean not) { super(parameterBody.getType(), not); parameters = parameterBody.getValue(); withOptional(parameterBody.getOptional()); } public Parameters getParameters() { return parameters; } public ParameterBody buildObject() { return (ParameterBody) new ParameterBody(parameters).withOptional(getOptional()); } }
java
<gh_stars>0 .App{ margin: 0%; padding: 0%; margin-left: 30px; } .heading{ font-size: 40px; }
css
<gh_stars>0 #include <iostream> using namespace std; int main(int argc, char* argv[]) { int n = 0; int s = 1; cin >> n; while (n != 0) { s *= 2; n -= 1; } cout << s << endl; return EXIT_SUCCESS; }
cpp
/* * Copyright 2021 The Go Authors. All rights reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file. */ .SubRepo-links { line-height: 1.5rem; }
css
{ "name": "picture-thing", "version": "0.0.1", "description": "A picture thing game", "main": "index.js", "scripts": { "build": "browserify front-end/js/main.js -o public/js/main.js -t [ babelify --presets [ latest ] --plugins [ transform-object-rest-spread ] ]" }, "repository": { "type": "git", "url": "git+https://github.com/NerdcoreSteve/picture-thing.git" }, "keywords": [ "game" ], "author": "<NAME>", "license": "Apache-2.0", "bugs": { "url": "https://github.com/NerdcoreSteve/picture-thing/issues" }, "homepage": "https://github.com/NerdcoreSteve/picture-thing#readme", "dependencies": { "express": "^4.14.0", "pug": "^2.0.0-beta6" }, "devDependencies": { "babel-plugin-transform-object-rest-spread": "^6.20.2", "babel-preset-latest": "^6.16.0", "babelify": "^7.3.0" } }
json
<reponame>hhdevelopment/ev import {Injectable} from '@angular/core'; import {User} from 'firebase'; import {AngularFireAuth} from '@angular/fire/auth'; import {map} from 'rxjs/operators'; import {Observable} from 'rxjs'; import {AngularFirestore} from '@angular/fire/firestore'; import IdTokenResult = firebase.auth.IdTokenResult; @Injectable({ providedIn: 'root' }) export class UserService { constructor( private afs: AngularFirestore, private afAuth: AngularFireAuth, ) { } getUser(): Observable<User | null> { return this.afAuth.user; } getClaims(): Observable<Partial<Claims>> { return this.afAuth.idTokenResult.pipe( map((idTokenResult: IdTokenResult) => { return !!idTokenResult ? idTokenResult.claims as Partial<Claims> : {}; }) ); } getEmail(): Observable<string | null> { return this.getUser().pipe( map((user: User | null) => { return !!user ? user.email : null; }) ); } getUid(): Observable<string | null> { return this.getUser().pipe( map((user: User | null) => { return !!user ? user.uid : null; }) ); } }
typescript
Chhattisgarh News | हिंदू संगठनों की कड़ी चेतावनी के बाद उदयनिधि बने भीगी बिल्ली! Today Express 24X7 provides comprehensive up-to-date coverage of the Breaking News, Politics, Latest News, Entertainment News, Business News, and Sports News. Stay tuned for all the News in Hindi. Today Express 24X7 पर आप ट्रेंडिंग न्यूज, राजनीति, मनोरंजन, बॉलीवुड, बिजनेस, क्रिकेट और अन्य खेलों की लेटेस्ट खबरों के साथ-साथ विस्तृत विश्लेषण पा सकते हैं। We do NOT own the video materials and all credits belong to respectful owner. In case of copyright issues, please contact us immediately for further credit or clip delete. Under Section 107 of the Copyright Act 1976, allowance is made for "fair use" for purposes such as criticism, commenting, news reporting, teaching, scholarship, and research. Fair use is a use permitted by copyright statute that might otherwise be infringing. Non-profit, educational or personal use tips the balance in favour of fair use. Contact Us for Copyright Related issues, Credit addition and deletion: Chhattisgarh News | हिंदू संगठनों की कड़ी चेतावनी के बाद उदयनिधि बने भीगी बिल्ली! Special Briefing on the Visit of President of Maldives to India (August 02, 2022) Ayushmann Khurrana and Ananya Panday are all set to tickle the audience's funny bones with Dream Girl 2 that releases next week in theatres. The two buddies joined Bollywood Bubble hosts Rashita Sahni and Nayandeep Rakshit on the second episode of Reel Buddies 3. From revealing how Ayushmann cracks some of the funniest bad and dad jokes to how they bonded over their common love for cricket, movies, food and music, Ananya and AK discuss it all. They also address the constant targeting that happens with anyone who's effeminate and shares if the fight is more about feminity than sexuality. Not just that, AP and AK discuss their idea of privilege and how they handle trolls and social media hate on a daily basis. In the end, they open up about their idea of love & Ayushmann reveals he would love to see Ananya with Ranbir Kapoor or Aditya Roy Kapur. Ahem ahem.. watch the full video to know more. Check out the video to know more. SUBSCRIBE To Bollywood Bubble: Also, Visit - https://www.bollywoodbubble.com . One stop Destination for Latest Bollywood Updates. Click on the Subscribe Button NOW and Stay Tuned. Sheldon Jackson is one of the most prolific and consistent batsmen in the Indian domestic circuit. He has scored over 6600 runs in first-class cricket, with 20 centuries and 36 half-centuries. He has also played in the Indian Premier League (IPL) for Kolkata Knight Riders and Royal Challengers Bangalore. He is a right-handed wicket-keeper-batter who can also bowl off-spin. In this exclusive interview with CricTracker, Sheldon Jackson answers 20 FreeHit questions about his cricketing career, his personal life, his hobbies, his favourites, his idols, his goals, and his challenges. He also shares some interesting anecdotes, insights, tips, and trivia about the game he loves. Stay tuned to Crictracker for more cricket updates, and don't forget to like, share, and subscribe to our channel. Follow us on: Hi friends..Today i will show you how to make thorana beautiful..Thorana decorations.. Follow me : जवान का धमाका: 10 मिनट में दीपिका ने लूटी महफिल, सबसे बड़ा सरप्राइज है 'मुन्ना भाई' Follow Us On: Jawan का धमाका: 10 मिनट में Deepika ने लूटी महफिल, सबसे बड़ा Surprise है 'Munna Bhai' About Channel: Navtej Tv National News Channel. Navtej Tv a broadcasting company and one of the leading news channels in Rajasthan. Navtej Tv highly reliable and most trusted for political news. Navtej Tv Rajasthan is people's channel, your channel. The most honest and growing national news channel that covers the latest trending Hindi news, Hindi Bulletin, in-depth coverage of news stories, the Indian film industry, and the latest Bollywood updates. We primarily focus on ground-level reporting and serious news. कांग्रेस से घबराई तेलंगाना सरकार, रैली रोकने की तैयारी ! Top Telugu TV is one of the leading Digital Media channel with 1M plus Subscribers. Top Telugu tv is India's news & entertainment headquarters for Telugu around the world. We operates from Hyderabad, Warangal, Karimnagar, Vijayawada, Vizag, Kurnool. Top Telugu tv over the period become most watched, credible and respected news network in Telugu States. We Have 20 Different Channels and launching Constituency Channels soon. Pls Subscribe Top Telugu TV Now. Telugu, Language Channel owned by Bhavitha Sri Media House Pvt Ltd. For more such videos, subscribe to our YouTube channel ► https://bit.ly/2Omfzlb Don't forget to push the Bell ???? icon to never miss an update. We're always excited to hear from you! If you have any feedback, questions, or concerns, please Connect with us on: Link for registration of the Nature Cure Health Workshop : For any additional information please feel free to reach us below: To know more about us visit our official website : Connect with us on other Social Media platforms : Before adopting this Natural Lifestyle you can learn it thoroughly in the following ways : 1) Attend our upcoming video training workshops to know in detail about the science of nature cure which aims to cure all diseases. Subscribe to our channel to get notifications about the same and register for our next session via the link given below: 2) Watch more videos in this channel especially the science of naturopathy through the following link : 3) Read our books which are also available online under the following link. 4) Attend our 4 days Residential camp which takes place primarily in Delhi and the details of which can be obtained from contact no. (Phone no. 011-27510771, 9870291634/5/6). a)- The next tentative schedule of 4- Days Camp has been mentioned below : 5) Contact our health care no. 011- 40846380 , 9870291634/5/6 for any health-related queries. Link for registration of the Nature Cure Health Workshop : For any additional information please feel free to reach us below: To know more about us visit our official website : Connect with us on other Social Media platforms : Before adopting this Natural Lifestyle you can learn it thoroughly in the following ways : 1) Attend our upcoming video training workshops to know in detail about the science of nature cure which aims to cure all diseases. Subscribe to our channel to get notifications about the same and register for our next session via the link given below: 2) Watch more videos in this channel especially the science of naturopathy through the following link : 3) Read our books which are also available online under the following link. 4) Attend our 4 days Residential camp which takes place primarily in Delhi and the details of which can be obtained from contact no. (Phone no. 011-27510771, 9870291634/5/6). a)- The next tentative schedule of 4- Days Camp has been mentioned below : 5) Contact our health care no. 011- 40846380 , 9870291634/5/6 for any health-related queries. Copyright Disclaimer Under Section 107 of the Copyright Act 1976, allowance is made for "fair use" for purposes such as criticism, comment, news reporting, teaching, scholarship, and research. Fair use is a use permitted by copyright statute that might otherwise be infringing. The opinion being delivered in the video it's personal. Non-profit, educational or personal use tips the balance in favour of fair use. Thanks for watching my video. Thank You For Watching ! This video explains about how to cook paneer kheer recipe . Thank You For Watching ! This Video is about how to cook mysore pak sweet recipe . Thank You For Watching ! Thank You For Watching ! Thank You For Watching ! This video explains about how to cook chandrakanthalu sweet recipe . Thank You For Watching ! This video explains about how to make sunnundalu ., Thank You For Watching ! This video explains about how to make nuvvula laddu recipe . This video explains about how to make rava laddu . Thank You For Watching ! This video explains about how to cook brinjal bajji recipe. Thank You For Watching ! Thank You For Watching !
english
<filename>src/main/java/com/gutmox/ioc/IoC.java package com.gutmox.ioc; import com.gutmox.handlers.HealthHandler; import com.gutmox.handlers.HelloHandler; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class IoC { @Autowired public HelloHandler helloHandler; @Autowired public HealthHandler healthHandler; }
java
<filename>test/unit/math/prim/meta/disjunction_test.cpp<gh_stars>1-10 #include <stan/math/prim/meta.hpp> #include <gtest/gtest.h> TEST(MathMetaPrim, or_type) { bool temp = stan::math::disjunction<std::true_type, std::true_type, std::true_type>::value; EXPECT_TRUE(temp); temp = stan::math::disjunction<std::false_type, std::false_type, std::false_type>::value; EXPECT_FALSE(temp); temp = stan::math::disjunction<std::false_type, std::true_type, std::true_type>::value; EXPECT_TRUE(temp); temp = stan::math::disjunction<std::true_type, std::true_type, std::false_type>::value; EXPECT_TRUE(temp); }
cpp
<filename>pinot-common/src/main/java/com/linkedin/pinot/common/metadata/segment/SegmentZKMetadata.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.linkedin.pinot.common.metadata.segment; import com.linkedin.pinot.common.metadata.ZKMetadata; import com.linkedin.pinot.common.utils.CommonConstants; import com.linkedin.pinot.common.utils.CommonConstants.Segment.SegmentType; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; import org.apache.helix.ZNRecord; import org.joda.time.Duration; import org.joda.time.Interval; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.linkedin.pinot.common.utils.EqualityUtils.*; public abstract class SegmentZKMetadata implements ZKMetadata { private static final Logger LOGGER = LoggerFactory.getLogger(SegmentZKMetadata.class); protected static final String NULL = "null"; private String _segmentName; private String _tableName; private SegmentType _segmentType; private long _startTime = -1; private long _endTime = -1; private TimeUnit _timeUnit; private Duration _timeGranularity; private Interval _timeInterval; private String _indexVersion; private long _totalRawDocs = -1; private long _crc = -1; private long _creationTime = -1; private SegmentPartitionMetadata _partitionMetadata; private long _segmentUploadStartTime = -1; private Map<String, String> _customMap; private String _crypterName; public SegmentZKMetadata() { } public SegmentZKMetadata(ZNRecord znRecord) { _segmentName = znRecord.getSimpleField(CommonConstants.Segment.SEGMENT_NAME); _tableName = znRecord.getSimpleField(CommonConstants.Segment.TABLE_NAME); _crypterName = znRecord.getSimpleField(CommonConstants.Segment.CRYPTER_NAME); _segmentType = znRecord.getEnumField(CommonConstants.Segment.SEGMENT_TYPE, SegmentType.class, SegmentType.OFFLINE); _startTime = znRecord.getLongField(CommonConstants.Segment.START_TIME, -1); _endTime = znRecord.getLongField(CommonConstants.Segment.END_TIME, -1); if (znRecord.getSimpleFields().containsKey(CommonConstants.Segment.TIME_UNIT) && !znRecord.getSimpleField( CommonConstants.Segment.TIME_UNIT).equals(NULL)) { setTimeUnit(znRecord.getEnumField(CommonConstants.Segment.TIME_UNIT, TimeUnit.class, TimeUnit.DAYS)); } _indexVersion = znRecord.getSimpleField(CommonConstants.Segment.INDEX_VERSION); _totalRawDocs = znRecord.getLongField(CommonConstants.Segment.TOTAL_DOCS, -1); _crc = znRecord.getLongField(CommonConstants.Segment.CRC, -1); _creationTime = znRecord.getLongField(CommonConstants.Segment.CREATION_TIME, -1); try { String partitionMetadataJson = znRecord.getSimpleField(CommonConstants.Segment.PARTITION_METADATA); if (partitionMetadataJson != null) { _partitionMetadata = SegmentPartitionMetadata.fromJsonString(partitionMetadataJson); } } catch (IOException e) { LOGGER.error( "Exception caught while reading partition info from zk metadata for segment '{}', partition info dropped.", _segmentName, e); } _segmentUploadStartTime = znRecord.getLongField(CommonConstants.Segment.SEGMENT_UPLOAD_START_TIME, -1); _customMap = znRecord.getMapField(CommonConstants.Segment.CUSTOM_MAP); } public String getSegmentName() { return _segmentName; } public void setSegmentName(String segmentName) { _segmentName = segmentName; } public String getTableName() { return _tableName; } public void setTableName(String tableName) { _tableName = tableName; } public long getStartTime() { return _startTime; } public void setStartTime(long startTime) { _startTime = startTime; } public long getEndTime() { return _endTime; } public void setEndTime(long endTime) { _endTime = endTime; } public TimeUnit getTimeUnit() { return _timeUnit; } /** * NOTE: should be called after setting start and end time. */ public void setTimeUnit(@Nonnull TimeUnit timeUnit) { _timeUnit = timeUnit; _timeGranularity = new Duration(_timeUnit.toMillis(1)); // For consuming segment, end time might not be set if (_startTime >= 0 && _startTime <= _endTime) { _timeInterval = new Interval(_timeUnit.toMillis(_startTime), _timeUnit.toMillis(_endTime)); } } public Duration getTimeGranularity() { return _timeGranularity; } public Interval getTimeInterval() { return _timeInterval; } public String getIndexVersion() { return _indexVersion; } public void setIndexVersion(String indexVersion) { _indexVersion = indexVersion; } public SegmentType getSegmentType() { return _segmentType; } public void setSegmentType(SegmentType segmentType) { _segmentType = segmentType; } public String getCrypterName() { return _crypterName; } public void setCrypterName(String crypterName) { _crypterName = crypterName; } public long getTotalRawDocs() { return _totalRawDocs; } public void setTotalRawDocs(long totalRawDocs) { _totalRawDocs = totalRawDocs; } public long getCrc() { return _crc; } public void setCrc(long crc) { _crc = crc; } public long getCreationTime() { return _creationTime; } public void setCreationTime(long creationTime) { _creationTime = creationTime; } public void setPartitionMetadata(SegmentPartitionMetadata partitionMetadata) { _partitionMetadata = partitionMetadata; } public SegmentPartitionMetadata getPartitionMetadata() { return _partitionMetadata; } public long getSegmentUploadStartTime() { return _segmentUploadStartTime; } public void setSegmentUploadStartTime(long segmentUploadStartTime) { _segmentUploadStartTime = segmentUploadStartTime; } public Map<String, String> getCustomMap() { return _customMap; } public void setCustomMap(Map<String, String> customMap) { _customMap = customMap; } @Override public boolean equals(Object segmentMetadata) { if (isSameReference(this, segmentMetadata)) { return true; } if (isNullOrNotSameClass(this, segmentMetadata)) { return false; } SegmentZKMetadata metadata = (SegmentZKMetadata) segmentMetadata; return isEqual(_segmentName, metadata._segmentName) && isEqual(_crypterName, metadata._crypterName) && isEqual(_tableName, metadata._tableName) && isEqual(_indexVersion, metadata._indexVersion) && isEqual(_timeUnit, metadata._timeUnit) && isEqual(_startTime, metadata._startTime) && isEqual(_endTime, metadata._endTime) && isEqual(_segmentType, metadata._segmentType) && isEqual(_totalRawDocs, metadata._totalRawDocs) && isEqual(_crc, metadata._crc) && isEqual(_creationTime, metadata._creationTime) && isEqual(_partitionMetadata, metadata._partitionMetadata) && isEqual(_segmentUploadStartTime, metadata._segmentUploadStartTime) && isEqual(_customMap, metadata._customMap); } @Override public int hashCode() { int result = hashCodeOf(_segmentName); result = hashCodeOf(result, _tableName); result = hashCodeOf(result, _crypterName); result = hashCodeOf(result, _segmentType); result = hashCodeOf(result, _startTime); result = hashCodeOf(result, _endTime); result = hashCodeOf(result, _timeUnit); result = hashCodeOf(result, _indexVersion); result = hashCodeOf(result, _totalRawDocs); result = hashCodeOf(result, _crc); result = hashCodeOf(result, _creationTime); result = hashCodeOf(result, _partitionMetadata); result = hashCodeOf(result, _segmentUploadStartTime); result = hashCodeOf(result, _customMap); return result; } @Override public ZNRecord toZNRecord() { ZNRecord znRecord = new ZNRecord(_segmentName); znRecord.setSimpleField(CommonConstants.Segment.SEGMENT_NAME, _segmentName); znRecord.setSimpleField(CommonConstants.Segment.TABLE_NAME, _tableName); if (_crypterName != null) { znRecord.setSimpleField(CommonConstants.Segment.CRYPTER_NAME, _crypterName); } znRecord.setEnumField(CommonConstants.Segment.SEGMENT_TYPE, _segmentType); if (_timeUnit == null) { znRecord.setSimpleField(CommonConstants.Segment.TIME_UNIT, NULL); } else { znRecord.setEnumField(CommonConstants.Segment.TIME_UNIT, _timeUnit); } znRecord.setLongField(CommonConstants.Segment.START_TIME, _startTime); znRecord.setLongField(CommonConstants.Segment.END_TIME, _endTime); znRecord.setSimpleField(CommonConstants.Segment.INDEX_VERSION, _indexVersion); znRecord.setLongField(CommonConstants.Segment.TOTAL_DOCS, _totalRawDocs); znRecord.setLongField(CommonConstants.Segment.CRC, _crc); znRecord.setLongField(CommonConstants.Segment.CREATION_TIME, _creationTime); if (_partitionMetadata != null) { try { String partitionMetadataJson = _partitionMetadata.toJsonString(); znRecord.setSimpleField(CommonConstants.Segment.PARTITION_METADATA, partitionMetadataJson); } catch (IOException e) { LOGGER.error( "Exception caught while writing partition metadata into ZNRecord for segment '{}', will be dropped", _segmentName, e); } } if (_segmentUploadStartTime > 0) { znRecord.setLongField(CommonConstants.Segment.SEGMENT_UPLOAD_START_TIME, _segmentUploadStartTime); } if (_customMap != null) { znRecord.setMapField(CommonConstants.Segment.CUSTOM_MAP, _customMap); } return znRecord; } public Map<String, String> toMap() { Map<String, String> configMap = new HashMap<>(); configMap.put(CommonConstants.Segment.SEGMENT_NAME, _segmentName); configMap.put(CommonConstants.Segment.TABLE_NAME, _tableName); configMap.put(CommonConstants.Segment.SEGMENT_TYPE, _segmentType.toString()); if (_timeUnit == null) { configMap.put(CommonConstants.Segment.TIME_UNIT, null); } else { configMap.put(CommonConstants.Segment.TIME_UNIT, _timeUnit.toString()); } configMap.put(CommonConstants.Segment.START_TIME, Long.toString(_startTime)); configMap.put(CommonConstants.Segment.END_TIME, Long.toString(_endTime)); configMap.put(CommonConstants.Segment.INDEX_VERSION, _indexVersion); configMap.put(CommonConstants.Segment.TOTAL_DOCS, Long.toString(_totalRawDocs)); configMap.put(CommonConstants.Segment.CRC, Long.toString(_crc)); configMap.put(CommonConstants.Segment.CREATION_TIME, Long.toString(_creationTime)); if (_partitionMetadata != null) { try { String partitionMetadataJson = _partitionMetadata.toJsonString(); configMap.put(CommonConstants.Segment.PARTITION_METADATA, partitionMetadataJson); } catch (IOException e) { LOGGER.error( "Exception caught while converting partition metadata into JSON string for segment '{}', will be dropped", _segmentName, e); } } if (_segmentUploadStartTime > 0) { configMap.put(CommonConstants.Segment.SEGMENT_UPLOAD_START_TIME, Long.toString(_segmentUploadStartTime)); } if (_customMap == null) { configMap.put(CommonConstants.Segment.CUSTOM_MAP, null); } else { JSONObject jsonObject = new JSONObject(_customMap); configMap.put(CommonConstants.Segment.CUSTOM_MAP, jsonObject.toString()); } return configMap; } }
java