text
stringlengths
1
1.05M
import copy import json import os from datetime import datetime, timedelta import csv import pytz from django.db.models.functions import Lower from django.http.response import HttpResponseRedirect from django.shortcuts import get_object_or_404, render from .forms import SiteForm from .models import Image, Site, TransitionDate def is_rising(sitename, date_time): # method, given input of img_file, output boolean indicating if rising or falling if date_time.month in [12, 1, 2, 3]: return False elif date_time.month in [6, 7, 8]: return True # load json transition dates file module_dir = os.path.dirname(__file__) # get current directory file_path = os.path.join(module_dir, 'static/transition_dates/{}_transition_dates.json'.format(sitename)) with open(file_path, 'r') as f: site_transitions = json.load(f)['transitions'] # find closest distances = [] for i in range(len(site_transitions)): trans_date = datetime(site_transitions[i]['year'], site_transitions[i]['month'], site_transitions[i]['day'], tzinfo=pytz.timezone('UTC')) distance = date_time - trans_date abs_distance = abs(distance) distances.append((i, distance, abs_distance)) closest = min(distances, key=lambda x: x[2]) if site_transitions[closest[0]]['rising']: return closest[1] >= timedelta() else: return closest[1] < timedelta() def transition_dates_on_site_update(site, based_off_images=False): # delete all previous transition dates site.transitiondate_set.all().delete() if based_off_images: # get list of is_rising booleans images = site.image_set.order_by('date_time') rising_bools = [im.is_rising for im in images] def calc_changes(rising_bools): phase = rising_bools[0] prev_i = 0 changes = [] # contains (pos, start_of_section_w_bool, len) for i in range(len(rising_bools)): if rising_bools[i] != phase: changes.append({'pos': prev_i, 'phase': phase, 'len': i - prev_i}) phase = not phase prev_i = i changes.append({'pos': prev_i, 'phase': phase, 'len': len(rising_bools) - prev_i}) return changes def remove_noise(rising_bools, ): # calculate all transitions, including noise """eliminate noise""" # remove tiny blips min_len = 62 for x in [j+2 for j in range(min_len+2)]: changes = calc_changes(rising_bools) for change in changes: if change['len'] < x: for i in range(change['len']): rising_bools[change['pos'] + i] = not rising_bools[change['pos'] + i] return rising_bools processed_rising_bools = remove_noise(rising_bools) # go from indecies to transition dates transitions = calc_changes(processed_rising_bools) for i in range(len(transitions)): date_time = images[transitions[i]['pos']].date_time t = TransitionDate(site=site, date_time=date_time, rising_phase=transitions[i]['phase'], duration=transitions[i]['len']) if (i == 0) or (i == len(transitions)-1): t = TransitionDate(site=site, date_time=date_time, rising_phase=transitions[i]['phase'], duration=None) elif date_time - images[transitions[i]['pos']-1].date_time > timedelta(days=31): t = TransitionDate(site=site, date_time=date_time, rising_phase=transitions[i]['phase'], duration=None) elif transitions[i]['len'] < 30: t = TransitionDate(site=site, date_time=date_time, rising_phase=transitions[i]['phase'], duration=None) t.save() tdates = site.transitiondate_set.order_by('date_time') reference = tdates[0] for tdate in tdates[1:]: tdelta = tdate.date_time - reference.date_time days = int(tdelta.total_seconds() / 86400) if days > 280: # remove large gaps days = None reference.duration = days reference.save() reference = tdate # fix image is_rising fields according to transition dates for i in range(len(processed_rising_bools)): images[i].is_rising = processed_rising_bools[i] images[i].save else: # load json transition dates file module_dir = os.path.dirname(__file__) # get current directory file_path = os.path.join(module_dir, 'static/transition_dates/{}_transition_dates.json'.format(site.sitename)) with open(file_path, 'r') as f: transitions = json.load(f)['transitions'] transitions.sort(key=lambda x: datetime(x['year'], x['month'], x['day'], tzinfo=pytz.timezone('UTC'))) for i in range(len(transitions)): date_time = datetime(transitions[i]['year'], transitions[i]['month'], transitions[i]['day'], tzinfo=pytz.timezone('UTC')) rising_phase = transitions[i]['rising'] t = TransitionDate(site=site, date_time=date_time, rising_phase=rising_phase, duration=None) if (i == 0) or (i == len(transitions)-1): t = TransitionDate(site=site, date_time=date_time, rising_phase=rising_phase, duration=None) t.save() tdates = site.transitiondate_set.order_by('date_time') reference = tdates[0] for tdate in tdates[1:]: tdelta = tdate.date_time - reference.date_time days = int(tdelta.total_seconds() / 86400) if days > 280 or days < 95: # remove large gaps days = None reference.duration = days reference.save() reference = tdate def save_images(images, sitename): site = Site.objects.get(sitename=sitename) for image in images: date_time_list = image.name.split('_') del date_time_list[0] hrminsec = date_time_list[3] date_time_list[3] = hrminsec[:2] date_time_list.append(hrminsec[2:4]) date_time_list.append(hrminsec[4:]) date_time_list[-1] = date_time_list[-1].split('.')[0] date_time_list = [int(i) for i in date_time_list] date_time = datetime(date_time_list[0], date_time_list[1], date_time_list[2], date_time_list[3], date_time_list[4], date_time_list[5], 0, tzinfo=pytz.timezone('UTC')) # run tensorflow model - DO NOT FORGET ABOUT THIS!!!! try: i = Image(site=site, date_time=date_time, is_rising=is_rising(sitename, date_time), image_upload=image) i.save() except: pass transition_dates_on_site_update(site) # Create your views here. def home(response): context = {} return render(response, 'main/home.html', context) def settings(response): if response.method == 'POST': if 'delete_all_images' in response.POST: Image.objects.all().delete() sites = Site.objects.order_by(Lower('sitename')) context = {'sites': sites} return render(response, 'main/settings.html', context) def data_management(response): context = {} return render(response, 'main/data_management.html', context) def sites(response): if response.method == 'POST': post = response.POST sitename = post['site_selected'] return HttpResponseRedirect('{}/'.format(sitename)) all_sites = Site.objects.all().order_by(Lower('sitename')) context = {'sites': all_sites} return render(response, 'main/site_list.html', context) def site_add(response): if response.method == 'POST': form = SiteForm(response.POST) if form.is_valid() and 'save_leave' in response.POST: stnm = form.cleaned_data['sitename'] loc = form.cleaned_data['location'] lat = form.cleaned_data['latitude'] long = form.cleaned_data['longitude'] elev = form.cleaned_data['elevation'] dominant_species = form.cleaned_data['dominant_species'] s = Site(sitename=stnm, location_desc=loc, latitude=lat, longitude=long, elevation=elev, dominant_species=dominant_species) s.save() sitename = s.sitename images = response.FILES.getlist('images') save_images(images, sitename) return HttpResponseRedirect('/data-management/sites/') if form.is_valid() and 'save_add_more' in response.POST: stnm = form.cleaned_data['sitename'] loc = form.cleaned_data['location'] lat = form.cleaned_data['latitude'] long = form.cleaned_data['longitude'] s = Site(sitename=stnm, location_desc=loc, latitude=lat, longitude=long, elevation=elev, dominant_species=dominant_species) s.save() sitename = s.sitename images = response.FILES.getlist('images') save_images(images, sitename) context = {} return render(response, 'main/site_add.html', context) def site_view(response, sitename): site = Site.objects.all().filter(sitename=sitename)[0] dates = [img.date_time.strftime("%m/%d/%Y, %H:%M:%S") for img in site.image_set.order_by('date_time')] img_paths = [str(img.image_upload.name) for img in site.image_set.order_by('date_time')] context = {'site': site, 'date_list': dates, 'img_paths': img_paths} return render(response, 'main/site_view.html', context) def site_view_edit(response, sitename): site = get_object_or_404(Site, sitename=sitename) if response.method == 'POST': form = SiteForm(response.POST) if form.is_valid(): site.sitename = form.cleaned_data['sitename'] site.location_desc = form.cleaned_data['location'] site.latitude = form.cleaned_data['latitude'] site.longitude = form.cleaned_data['longitude'] site.elevation = form.cleaned_data['elevation'] site.dominant_species = form.cleaned_data['dominant_species'] site.save() return HttpResponseRedirect('/data-management/sites/{}/'.format(site.sitename)) else: form = SiteForm(initial={'sitename': site.sitename, 'location_desc': site.location_desc, 'latitude': site.latitude, 'longitude': site.longitude, 'elevation': site.elevation, 'dominant_species': site.dominant_species}) dates = [img.date_time.strftime("%m/%d/%Y, %H:%M:%S") for img in site.image_set.order_by('date_time')] img_paths = [str(img.image_upload.name) for img in site.image_set.order_by('date_time')] context = {'site': site, 'date_list': dates, 'img_paths': img_paths, 'form': form} return render(response, 'main/site_view_edit.html', context) def site_gallery(response, sitename): site = Site.objects.all().filter(sitename=sitename)[0] images = site.image_set.order_by('date_time') context = {'site': site, 'images': images} return render(response, 'main/site_image_gallery.html', context) def individual_image_view(response, sitename, imagename): site = Site.objects.all().filter(sitename=sitename)[0] image = [im for im in site.image_set.filter() if imagename in im.image_upload.name][0] context = {'site': site, 'image': image} return render(response, 'main/image_individual_view.html', context) def upload_images(response): if response.method == 'POST': post = response.POST # check if files were uploaded sitename = post['site_selected'] images = response.FILES.getlist('images') save_images(images, sitename) # return HttpResponseRedirect('/data-management/sites/{site}/'.format(site = sitename)) return HttpResponseRedirect('#') all_sites = Site.objects.all().order_by(Lower('sitename')) sitenames = [str(site.sitename) for site in all_sites] context = {'sites': sitenames} return render(response, 'main/upload_images.html', context) def analysis(response): if response.method == 'POST': post = response.POST sitename = post['site_selected'] return HttpResponseRedirect('/analysis/{}/'.format(sitename)) all_sites = Site.objects.all().order_by(Lower('sitename')) sitenames = [str(site.sitename) for site in all_sites] context = {'sites': sitenames} return render(response, 'main/analysis_home.html', context) def analysis_site(response, sitename): if response.method == 'POST': post = response.POST sitename = post['site_selected'] return HttpResponseRedirect('/analysis/{}/'.format(sitename)) site = Site.objects.get(sitename=sitename) transition_dates_on_site_update(site) all_sites = Site.objects.all().order_by(Lower('sitename')) sitenames = [str(site.sitename) for site in all_sites] def get_phase_data(site): rising_transition_dates = site.transitiondate_set.filter(rising_phase__exact=True).order_by('date_time') falling_transition_dates = site.transitiondate_set.filter(rising_phase__exact=False).order_by('date_time') rising_phases = [] falling_phases = [] for i in range(max([rising_transition_dates.count(), falling_transition_dates.count()])): try: rising_phase = rising_transition_dates[i] if rising_phase.date_time.day < 10: rising_phases.append({'year': rising_phase.date_time.year, 'year_month_day': '0{}/0{}/{}'.format(rising_phase.date_time.month, rising_phase.date_time.day, rising_phase.date_time.year), 'month': rising_phase.date_time.month, 'day': rising_phase.date_time.day, 'duration': rising_phase.duration, 'percent_change': None}) else: rising_phases.append({'year': rising_phase.date_time.year, 'year_month_day': '0{}/{}/{}'.format(rising_phase.date_time.month, rising_phase.date_time.day, rising_phase.date_time.year), 'month': rising_phase.date_time.month, 'day': rising_phase.date_time.day, 'duration': rising_phase.duration, 'percent_change': None}) except: pass try: falling_phase = falling_transition_dates[i] if falling_phase.date_time.day < 10: falling_phases.append({'year': falling_phase.date_time.year, 'year_month_day': '{}/0{}/{}'.format(falling_phase.date_time.month, falling_phase.date_time.day, falling_phase.date_time.year), 'month': falling_phase.date_time.month, 'day': falling_phase.date_time.day, 'duration': falling_phase.duration, 'percent_change': None}) else: falling_phases.append({'year': falling_phase.date_time.year, 'year_month_day': '{}/{}/{}'.format(falling_phase.date_time.month, falling_phase.date_time.day, falling_phase.date_time.year), 'month': falling_phase.date_time.month, 'day': falling_phase.date_time.day, 'duration': falling_phase.duration, 'percent_change': None}) except: pass min_year = 0 max_year = 0 if len(rising_phases) != 0 and len(falling_phases) != 0: min_year = min([rising_phases[0]['year'], falling_phases[0]['year']]) max_year = max([rising_phases[-1]['year'], falling_phases[-1]['year']]) elif len(rising_phases) == 0: min_year = falling_phases[0]['year'] max_year = falling_phases[-1]['year'] elif len(falling_phases) == 0: min_year = rising_phases[0]['year'] max_year = rising_phases[-1]['year'] tdates_by_yr = [] for yr in range(min_year, max_year+1): rising = None for rsng in rising_phases: if yr == rsng['year'] and rsng['duration'] != None: rising = rsng falling = None for fllng in falling_phases: if yr == fllng['year'] and fllng['duration'] != None: falling = fllng tdates_by_yr.append((rising, falling)) tdates = [] for rising_phase, falling_phase in tdates_by_yr: row = [None, None] try: row[0] = rising_phase except: pass try: row[1] = falling_phase except: pass tdates.append(row) # calculate % change in duration for i in range(len(tdates)): for j in range(len(tdates[i])): if tdates[i][j] != None and i != 0: try: percent_change = float(tdates[i][j]['duration'] - tdates[i-1][j]['duration']) / (0.01 * tdates[i-1][j]['duration']) tdates[i][j]['percent_change'] = percent_change except: pass if tdates[i][j]['percent_change'] is None: tdates[1][j]['percent_change'] = 'null' elif tdates[i][j] != None and i == 0: tdates[i][j]['percent_change'] = 'null' # replace null with default dict pad_tdates = copy.deepcopy(tdates) for i in range(len((pad_tdates))): for j in range(len(pad_tdates[i])): if pad_tdates[i][j] is None: pad_tdates[i][j] = {'year': i + min_year, 'year_month_day': None, 'month': None, 'day': None, 'duration': 0, 'percent_change': 'null'} return tdates, pad_tdates def get_phase_years(site): rising_transition_dates = site.transitiondate_set.filter(rising_phase__exact=True).order_by('date_time') falling_transition_dates = site.transitiondate_set.filter(rising_phase__exact=False).order_by('date_time') rising_phases = [] falling_phases = [] for i in range(max([rising_transition_dates.count(), falling_transition_dates.count()])): try: rising_phase = rising_transition_dates[i] # if rising_phase.duration is not None: rising_phases.append({'year': rising_phase.date_time.year, 'month_day': '{}/{}'.format(rising_phase.date_time.month, rising_phase.date_time.day), 'duration': rising_phase.duration, 'percent_change': None}) except: pass try: falling_phase = falling_transition_dates[i] # if falling_phase.duration is not None: falling_phases.append({'year': falling_phase.date_time.year, 'month_day': '{}/{}'.format(falling_phase.date_time.month, falling_phase.date_time.day), 'duration': falling_phase.duration, 'percent_change': None}) except: pass min_year = 0 max_year = 0 if len(rising_phases) != 0 and len(falling_phases) != 0: min_year = min([rising_phases[0]['year'], falling_phases[0]['year']]) max_year = max([rising_phases[-1]['year'], falling_phases[-1]['year']]) elif len(rising_phases) == 0: min_year = falling_phases[0]['year'] max_year = falling_phases[-1]['year'] elif len(falling_phases) == 0: min_year = rising_phases[0]['year'] max_year = rising_phases[-1]['year'] years = [yr for yr in range(min_year, max_year+1)] return years def get_transition_dates(site): rising_transition_dates = site.transitiondate_set.filter(rising_phase__exact=True).order_by('date_time') falling_transition_dates = site.transitiondate_set.filter(rising_phase__exact=False).order_by('date_time') rising_transitions = [r for r in rising_transition_dates] falling_transitions = [f for f in falling_transition_dates] min_year = 0 max_year = 0 if len(rising_transitions) != 0 and len(falling_transitions) != 0: min_year = min([rising_transitions[0].date_time.year, falling_transitions[0].date_time.year]) max_year = max([rising_transitions[-1].date_time.year, falling_transitions[-1].date_time.year]) elif len(rising_transitions) == 0: min_year = falling_transitions[0].date_time.year max_year = falling_transitions[-1].date_time.year elif len(falling_transitions) == 0: min_year = rising_transitions[0].date_time.year max_year = rising_transitions[-1].date_time.year years = [yr for yr in range(min_year, max_year+1)] rising_transitions_yearly = [] falling_transitions_yearly = [] for year in years: rising = None falling = None for transition in rising_transitions: try: if transition.date_time.year == year: rising = transition except: pass for transition in falling_transitions: try: if transition.date_time.year == year: falling = transition except: pass rising_transitions_yearly.append(rising) falling_transitions_yearly.append(falling) rising_years = copy.deepcopy(years) falling_years = copy.deepcopy(years) for i in range(len(years)): try: if rising_transitions_yearly[i] is None: del rising_years[i] del rising_transitions_yearly[i] except: pass for i in range(len(falling_years)): try: if falling_transitions_yearly[i] is None: del falling_years[i] del falling_transitions_yearly[i] except: pass r = [(x, y) for x, y in zip(rising_years, rising_transitions_yearly)] f = [(x, y) for x, y in zip(falling_years, falling_transitions_yearly)] return (rising_years, rising_transitions_yearly), (falling_years, falling_transitions_yearly), r, f phases, padded_phases = get_phase_data(site) _, _, r_data, f_data = get_transition_dates(site) dates = [img.date_time.strftime("%m/%d/%Y, %H:%M:%S") for img in site.image_set.order_by('date_time')] img_paths = [str(img.image_upload.name) for img in site.image_set.order_by('date_time')] phenophases = [img.is_rising for img in site.image_set.order_by('date_time')] context = {'sites': sitenames, 'site': site, 'phases': phases, 'padded_phases': padded_phases, 'use_spline': len(get_phase_years(site)) >= 3, 'phase_years': get_phase_years(site), 'bud_burst_data': r_data, 'senescence_data': f_data, 'date_list': dates, 'img_paths': img_paths, 'phenophases': phenophases} return render(response, 'main/analysis_site.html', context) def aggregate_analysis(response): """ phenophase table ~~~~~~~~~~~~~~~~~ list of data containing sets of each year each year set is dict with: - year - avg budburst month/day - avg budburst duration - avg senescence month/day - avg senescence duration """ years = sorted(list(set([tdate.date_time.year for tdate in TransitionDate.objects.all()]))) yearly_avg_budburst_monthdays = [] for year in years: date_times = [] for td in TransitionDate.objects.filter(rising_phase__exact=True): if td.date_time.year == year: date_times.append(td.date_time) try: avg = datetime.fromtimestamp(int(sum([datetime.timestamp(dt) for dt in date_times]) / len(date_times))) yearly_avg_budburst_monthdays.append(avg.strftime('%m/%d/%Y')) except: pass yearly_avg_budburst_durations = [] for year in years: durations = [td.duration for td in TransitionDate.objects.filter(rising_phase__exact=True) if td.duration != None and td.date_time.year==year] avg = int(sum(durations) / len(durations)) try: yearly_avg_budburst_durations.append(avg) except: pass yearly_avg_senescence_monthdays = [] for year in years: date_times = [] for td in TransitionDate.objects.filter(rising_phase__exact=False): if td.date_time.year == year: date_times.append(td.date_time) try: avg = datetime.fromtimestamp(int(sum([datetime.timestamp(dt) for dt in date_times]) / len(date_times))) yearly_avg_senescence_monthdays.append(avg.strftime('%m/%d/%Y')) except: pass yearly_avg_senescence_durations = [] for year in years: durations = [td.duration for td in TransitionDate.objects.filter(rising_phase__exact=False) if td.duration != None and td.date_time.year==year] try: avg = int(sum(durations) / len(durations)) yearly_avg_senescence_durations.append(avg) except: yearly_avg_senescence_durations.append(None) table_data = [] for i in range(len(years)): table_data.append({'year': years[i], 'bb_date': yearly_avg_budburst_monthdays[i], 'bb_dur': yearly_avg_budburst_durations[i], 'ls_date': yearly_avg_senescence_monthdays[i], 'ls_dur':yearly_avg_senescence_durations[i]}) """ analysis graphs ~~~~~~~~~~~~~~~~~~~ 1. average budburst onset date from previous year (col 2, row 1) 2. average senescence onset date from previous year (col 2, row 2) 3. average budburst duration from prev year (col 1, row 3) 4. average senescence duration from prev year (col 2, row 3) Data: - list of all years there are transition dates (int) - list of all average budburst onset datetimes (will be handeled js side) - list of all average budburst duration - list of all average senescence onset (will be handeled js side) - list of all average senescence duration differences - list of [year, y-value pairings] for all of the above """ yearly_avg_budburst_datetimes = [] for year in years: date_times = [] for td in TransitionDate.objects.filter(rising_phase__exact=True): if td.date_time.year == year: date_times.append(td.date_time) try: avg = datetime.fromtimestamp(int(sum([datetime.timestamp(dt) for dt in date_times]) / len(date_times))) yearly_avg_budburst_datetimes.append(avg) except: pass yearly_avg_senescence_datetimes = [] for year in years: date_times = [] for td in TransitionDate.objects.filter(rising_phase__exact=False): if td.date_time.year == year: date_times.append(td.date_time) try: avg = datetime.fromtimestamp(int(sum([datetime.timestamp(dt) for dt in date_times]) / len(date_times))) yearly_avg_senescence_datetimes.append(avg) except: pass budburst_onset_data = [[year, onset] for year, onset in zip(years, yearly_avg_budburst_datetimes)] senescence_onset_data = [[year, onset] for year, onset in zip(years, yearly_avg_senescence_datetimes)] budburst_duration_data = [[year, duration] for year, duration in zip(years, yearly_avg_budburst_durations)] senescence_duration_data = [[year, duration] for year, duration in zip(years, yearly_avg_senescence_durations)] context = {'table_data': table_data, 'years': years, 'budburst_avg_onset': yearly_avg_budburst_datetimes, 'budburst_avg_dur': yearly_avg_budburst_durations, 'senescence_avg_onset': yearly_avg_senescence_datetimes, 'senescence_avg_dur': yearly_avg_senescence_durations, 'budburst_onset_data': budburst_onset_data, 'budburst_dur_data': budburst_duration_data, 'senescence_onset_data': senescence_onset_data, 'senescence_dur_data': senescence_duration_data} return render(response, 'main/analysis_aggregate.html', context) def site_map(response, view): str_formats = ['last_{PHASE}_tdate', 'last_{PHASE}_doy', 'last_{PHASE}_dur', 'avg_last3yrs_{PHASE}_tdate', 'avg_last3yrs_{PHASE}_doy', 'avg_last3yrs_{PHASE}_dur', 'avg_diff_yrly_{PHASE}_tdate', 'avg_diff_yrly_{PHASE}_dur', 'avg_diff_yrly_{PHASE}_dur_prcnt', 'diff_firstlast_{PHASE}_tdate', 'diff_firstlast_{PHASE}_dur', 'diff_firstlast_{PHASE}_dur_prcnt'] constant_pairs = [['MIN_BUDBURST_RECENT_TDATE', 'MAX_BUDBURST_RECENT_TDATE'], ['MIN_SENESCENCE_RECENT_TDATE', 'MAX_SENESCENCE_RECENT_TDATE'], # MOST RECENT ['MIN_BUDBURST_RECENT_DOY', 'MAX_BUDBURST_RECENT_DOY'], ['MIN_SENESCENCE_RECENT_DOY', 'MAX_SENESCENCE_RECENT_DOY'], ['MIN_BUDBURST_RECENT_DUR', 'MAX_BUDBURST_RECENT_DUR'], ['MIN_SENESCENCE_RECENT_DUR', 'MAX_SENESCENCE_RECENT_DUR'], # END MOST RECENT ['MIN_BUDBURST_AVG3YRS_TDATE', 'MAX_BUDBURST_AVG3YRS_TDATE'], ['MIN_SENESCENCE_AVG3YRS_TDATE', 'MAX_SENESCENCE_AVG3YRS_TDATE'], # 3YR AVERAGE ['MIN_BUDBURST_AVG3YRS_DOY', 'MAX_BUDBURST_AVG3YRS_DOY'], ['MIN_SENESCENCE_AVG3YRS_DOY', 'MAX_SENESCENCE_AVG3YRS_DOY'], ['MIN_BUDBURST_AVG3YRS_DUR', 'MAX_BUDBURST_AVG3YRS_DUR'], ['MIN_SENESCENCE_AVG3YRS_DUR', 'MAX_SENESCENCE_AVG3YRS_DUR'], # END 3YR AVERAGE ['MIN_BUDBURST_AVG_DIFF_TDATE', 'MAX_BUDBURST_AVG_DIFF_TDATE'], ['MIN_SENESCENCE_AVG_DIFF_TDATE', 'MAX_SENESCENCE_AVG_DIFF_TDATE'], # AVG DIFF ['MIN_BUDBURST_AVG_DIFF_DUR', 'MAX_BUDBURST_AVG_DIFF_DUR'], ['MIN_SENESCENCE_AVG_DIFF_DUR', 'MAX_SENESCENCE_AVG_DIFF_DUR'], ['MIN_BUDBURST_AVG_DIFF_DUR_PRCNT', 'MAX_BUDBURST_AVG_DIFF_DUR_PRCNT'], ['MIN_SENESCENCE_AVG_DIFF_DUR_PRCNT', 'MAX_SENESCENCE_AVG_DIFF_DUR_PRCNT'], # END AVG DIF ['MIN_BUDBURST_TOTAL_DIFF_DOY', 'MAX_BUDBURST_TOTAL_DIFF_DOY'], ['MIN_SENESCENCE_TOTAL_DIFF_DOY', 'MAX_SENESCENCE_TOTAL_DIFF_DOY'], # TOTAL DIFF ['MIN_BUDBURST_TOTAL_DIFF_DUR', 'MAX_BUDBURST_TOTAL_DIFF_DUR'], ['MIN_SENESCENCE_TOTAL_DIFF_DUR', 'MAX_SENESCENCE_TOTAL_DIFF_DUR'], ['MIN_BUDBURST_TOTAL_DIFF_DUR_PRCNT', 'MAX_BUDBURST_TOTAL_DIFF_DUR_PRCNT'], ['MIN_SENESCENCE_TOTAL_DIFF_DUR_PRCNT', 'MAX_SENESCENCE_TOTAL_DIFF_DUR_PRCNT']] # END TOTAL DIFF titles = ['Most Recent {PHASE} Onset Date', # most recent *complete* ...? 'Most Recent {PHASE} Onset Date', 'Most Recent {PHASE} Duration', 'Three Year Average {PHASE} Onset Date', 'Three Year Average {PHASE} Onset Date', 'Three Year Average {PHASE} Duration', 'Average Yearly Change in {PHASE} Onset Date', # move {PHASE} first?? 'Average Yearly Change in {PHASE} Duration', 'Average Yearly Change in {PHASE} Duration', 'Change in {PHASE} Onset Date Since First Year of Data', 'Change in {PHASE} Duration Since First Year of Data', 'Change in {PHASE} Duration Since First Year of Data'] legend_descs = ['Earlier onset dates reflected by darker gradients and smaller circles', 'Earlier onset dates reflected by darker gradients and smaller circles', 'Lower phenophase durations reflected by darker gradients and smaller circles', 'Earlier onset dates reflected by darker gradients and smaller circles', 'Earlier onset dates reflected by darker gradients and smaller circles', 'Lower phenophase durations reflected by darker gradients and smaller circles', 'Lower average yearly phenophase onset date differences reflected by darker gradients and smaller circles', 'Lower average yearly phenophase duration differences reflected by darker gradients and smaller circles', 'Lower average yearly phenophase duration differences in percentage reflected by darker gradients and smaller circles', 'Lower differences in phenophase onset date compared to first record reflected by darker gradients and smaller circles', 'Lower differences in phenophase duration compared to first record reflected by darker gradients and smaller circles', 'Lower differences in phenophase duration compared to first record in percentage reflected by darker gradients and smaller circles'] units = ['', '', 'days', '', '', 'days', 'days', 'days', '%', 'days', 'days', '%'] fields = [] for i, format in enumerate(str_formats): phase = 'budburst' phase_title = 'Bud Burst' field = format.format(PHASE = phase) title = titles[i].format(PHASE = phase_title) legend_desc = legend_descs[i].format(PHASE = phase_title.lower()) fields.append([field, constant_pairs[2*i][0], constant_pairs[2*i][1], constant_pairs[2*i][0], constant_pairs[2*i][1], title, legend_desc, units[i]]) phase = 'senescence' phase_title = 'Leaf Senescence' field = format.format(PHASE = phase) title = titles[i].format(PHASE = phase_title) legend_desc = legend_descs[i].format(PHASE = phase_title.lower()) fields.append([field, constant_pairs[2*i+1][0], constant_pairs[2*i+1][1], constant_pairs[2*i+1][0], constant_pairs[2*i+1][1], title, legend_desc, units[i]]) field_data = [] with open('./main/static/mapdata.csv') as f: data = [{k: v for k, v in row.items()} for row in csv.DictReader(f, skipinitialspace=True)] for field_set in fields: set_data = {'field': field_set[0], 'min_field': field_set[1], 'min': data[0][field_set[1]], 'max_field': field_set[2], 'max': data[0][field_set[2]], 'min_label': data[0][field_set[3]], 'max_label': data[0][field_set[4]], 'title': field_set[5], 'legend_description': field_set[6], 'unit': field_set[7]} field_data.append(set_data) # only return requested data idx = 0 for i, set_data in enumerate(field_data): if view == set_data['field']: idx = i break # fix date of year labels target_field = field_data[idx]['field'] if ('doy' in target_field) and (not 'diff' in target_field): field_data[idx]['min_label'] = field_data[idx-2]['min_label'] field_data[idx]['max_label'] = field_data[idx-2]['max_label'] if 'PRCNT' in field_data[idx]['min_field'] or 'PRCNT' in field_data[idx]['max_field']: minfield = float(field_data[idx]['min_label']) * 100 maxfield = float(field_data[idx]['max_label']) * 100 field_data[idx]['min_label'] = f"{minfield:.4f}" field_data[idx]['max_label'] = f"{maxfield:.4f}" context = {'view': view, 'field': field_data[idx]} return render(response, 'main/site-map.html', context) def site_datamap(response): str_formats = ['last_{PHASE}_doy', 'last_{PHASE}_dur', 'avg_last3yrs_{PHASE}_doy', 'avg_last3yrs_{PHASE}_dur', 'avg_diff_yrly_{PHASE}_tdate', 'avg_diff_yrly_{PHASE}_dur', 'avg_diff_yrly_{PHASE}_dur_prcnt', 'diff_firstlast_{PHASE}_tdate', 'diff_firstlast_{PHASE}_dur', 'diff_firstlast_{PHASE}_dur_prcnt'] titles = ['Most Recent {PHASE} Onset Date', 'Most Recent {PHASE} Duration', 'Three Year Average {PHASE} Onset Date', 'Three Year Average {PHASE} Duration', 'Average Yearly Change in {PHASE} Onset Date', 'Average Yearly Change in {PHASE} Duration', 'Average Yearly Change in {PHASE} Duration', 'Change in {PHASE} Onset Date Since First Year of Data', 'Change in {PHASE} Duration Since First Year of Data', 'Change in {PHASE} Duration Since First Year of Data'] fields = [] for i, format in enumerate(str_formats): phase = 'budburst' phase_title = 'Bud Burst' field = format.format(PHASE = phase) title = titles[i].format(PHASE = phase_title) fields.append({'field': field, 'title': title}) phase = 'senescence' phase_title = 'Leaf Senescence' field = format.format(PHASE = phase) title = titles[i].format(PHASE = phase_title) fields.append({'field': field, 'title': title}) context = {'fields': fields} return render(response, 'main/site-datamap.html', context)
/** * */ package jframe.pay.alipay; import jframe.core.plugin.DefPlugin; /** * @author dzh * @date Aug 31, 2015 3:41:08 PM * @since 1.0 */ public class AlipayPlugin extends DefPlugin { }
#!/bin/sh # CYBERWATCH SAS - 2017 # # Security fix for DSA-2865-1 # # Security announcement date: 2014-02-20 00:00:00 UTC # Script generation date: 2017-01-01 21:06:50 UTC # # Operating System: Debian 7 (Wheezy) # Architecture: i386 # # Vulnerable packages fix on version: # - postgresql-9.1:9.1.12-0wheezy1 # # Last versions recommanded by security team: # - postgresql-9.1:9.1.24-0+deb7u1 # # CVE List: # - CVE-2014-0060 # - CVE-2014-0061 # - CVE-2014-0062 # - CVE-2014-0063 # - CVE-2014-0064 # - CVE-2014-0065 # - CVE-2014-0066 # - CVE-2014-0067 # - CVE-2014-2669 # # More details: # - https://www.cyberwatch.fr/vulnerabilites # # Licence: Released under The MIT License (MIT), See LICENSE FILE sudo apt-get install --only-upgrade postgresql-9.1=9.1.24-0+deb7u1 -y
<reponame>addcolouragency/craft_storefront<filename>node_modules/ts-toolbelt/out/List/Includes.d.ts import { Match } from '../Any/_Internal'; import { Includes as OIncludes } from '../Object/Includes'; import { ObjectOf } from './ObjectOf'; import { List } from './List'; /** * Check whether `L` has entries that match `M` * @param L to be inspected * @param M to check entry type * @param match (?=`'default'`) to change precision * @returns [[Boolean]] * @example * ```ts * ``` */ export declare type Includes<L extends List, M extends any, match extends Match = 'default'> = OIncludes<ObjectOf<L>, M, match>;
#include "text.h" #include "utility.h" #include "iostream" using namespace std; void help() { string str = "\n" "uniqLines: keep lines with unique value in a given column \n" " - <NAME> (<EMAIL>)\n" "\n" "Usage: uniqLines -i input -o output -c column\n" "\n" "Options:\n" "\n" " -i input input file\n" " -o output output file\n" " -c column column number, 1-based\n" "\n"; cerr << str; exit(0); } int main(int argc, char* argv[]) { if (argc < 2) help(); // default string input="input"; string output="output"; int col=0; // parse arguments string str; for (int i = 1; i < argc; i++) { if (i != argc) { str=argv[i]; if (str == "-i") { input = argv[i + 1]; i=i+1; } else if (str == "-o") { output = argv[i + 1]; i=i+1; } else if (str == "-c") { col = atoi(argv[i + 1]); i=i+1; } else { message("unknown option: "+str); help(); } } } int n = find_unique_lines( input, output, col); message(to_string(n)+" unique lines saved to "+output); return 0; }
def dot_product(v1, v2): """Computes the dot product of two vectors.""" # check if the vector length are equal assert len(v1) == len(v2), "vectors are of different length" product = 0 for i in range(len(v1)): product += (v1[i] * v2[i]) return product if __name__ == '__main__': v1 = [1, 2, 3] v2 = [4, 5, 6] print(dot_product(v1, v2)) # Output: 32
<reponame>lgoldstein/communitychest /* * */ package net.community.chest.math.compare; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.NoSuchElementException; import net.community.chest.math.NumbersFunction; import net.community.chest.math.functions.DualArgumentsCalculator; import net.community.chest.math.functions.MathFunctions; import net.community.chest.util.collection.CollectionsUtils; import net.community.chest.util.compare.AbstractComparator; /** * <P>Copyright GPLv2</P> * * <P>An {@link Enum}-eration that provides comparison operators for * {@link Comparable} objects</P> * @author <NAME>. * @since Apr 13, 2009 10:32:00 AM */ public enum ComparableOperator implements ComparisonExecutor, NumbersFunction, DualArgumentsCalculator { EQ("==") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes == 0); } }, NE("<>") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes != 0); } }, LT("<") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes < 0); } }, LE("<=") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes <= 0); } }, GT(">") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes > 0); } }, GE(">=") { /* * @see net.community.chest.ComparisonExecutor#getComparisonResult(int) */ @Override public Boolean getComparisonResult (final int nRes) { return Boolean.valueOf(nRes >= 0); } }; /* * @see net.community.chest.ComparisonExecutor#invoke(java.lang.Comparable, java.lang.Comparable) */ @Override public <V extends Comparable<V>> Boolean invoke (V o1, V o2) { return getComparisonResult(AbstractComparator.compareComparables(o1, o2)); } /* * @see net.community.chest.ComparisonExecutor#invoke(java.util.Comparator, java.lang.Object, java.lang.Object) */ @Override public <V> Boolean invoke (Comparator<? super V> c, V o1, V o2) { if (null == c) return null; return getComparisonResult(c.compare(o1, o2)); } /* * @see net.community.chest.math.functions.DualArgumentsCalculator#execute(double, double) */ @Override public final double execute (double v1, double v2) { final Boolean res=invoke(Double.valueOf(v1), Double.valueOf(v2)); if (null == res) throw new IllegalArgumentException("Failed to compare " + v1 + " and " + v2); return res.booleanValue() ? 1.0d : 0.0d; } /* * @see net.community.chest.math.functions.DualArgumentsCalculator#execute(long, long) */ @Override public final long execute (long v1, long v2) { final Boolean res=invoke(Long.valueOf(v1), Long.valueOf(v2)); if (null == res) throw new IllegalArgumentException("Failed to compare " + v1 + " and " + v2); return res.booleanValue() ? 1L : 0L; } /* * @see net.community.chest.math.NumbersFunction#getFloatingPointExecutionState() */ @Override public final Boolean getFloatingPointExecutionState () { return null; } /* * @see net.community.chest.math.NumbersFunction#invoke(java.util.List) */ @Override public Number invoke (final List<? extends Number> args) throws IllegalArgumentException, ClassCastException { return MathFunctions.invokeDual(this, args); } /* * @see net.community.chest.math.NumbersFunction#invoke(java.lang.Number[]) */ @Override public Number invoke (Number... args) throws IllegalArgumentException, ClassCastException { return MathFunctions.invokeDual(this, args); } /* * @see net.community.chest.math.FunctionInterface#getName() */ @Override public final String getName () { return name(); } /* * @see net.community.chest.math.FunctionInterface#getNumArguments() */ @Override public final int getNumArguments () { return 2; } private final String _symbol; /* * @see net.community.chest.math.FunctionInterface#getSymbol() */ @Override public final String getSymbol () { return _symbol; } ComparableOperator (String sym) { _symbol = sym; } public static final List<ComparableOperator> VALUES=Collections.unmodifiableList(Arrays.asList(values())); public static final ComparableOperator fromString (final String s) { return CollectionsUtils.fromString(VALUES, s, false); } public static final ComparableOperator fromSymbol (final String sym) { return MathFunctions.fromSymbol(VALUES, sym, false); } public static final ComparableOperator inverse (final ComparableOperator op) { if (null == op) return null; switch(op) { case EQ : return NE; case GE : return LT; case GT : return LE; case LE : return GT; case LT : return GE; case NE : return EQ; default : // should not happen throw new NoSuchElementException("inverse(" + op + ") unknown operator"); } } // negates whatever Boolean invocation result is received from the real comparator public static final ComparisonExecutor negate (final ComparisonExecutor c) { // some "shortcuts" if (c instanceof ComparableOperator) return inverse((ComparableOperator) c); return ComparatorNegator.negate(ComparisonExecutor.class, c); } }
#! /usr/bin/env bash # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e # Handle different project namings if [ -z "$GCE_PROJECT" ]; then GCE_PROJECT="$PROJECT" fi ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" if [ -f "$ROOT/scripts/istio.env" ]; then # shellcheck source=scripts/istio.env source "$ROOT/scripts/istio.env" fi # Set number of stars for review # Globals: # GCE_VM - Name used for GCE VM # GCE_PROJECT - Project hosting GCE VM # ZONE - Zone of GCE VM # Arguments: # NUM_STARS - The variable to check # Returns: # None set_ratings() { if [[ $1 =~ ^[1-5]$ ]]; then COMMAND="mysql -u root --password=password test -e \"update ratings set rating=${1} where reviewid=1\"" gcloud compute ssh "${GCE_VM}" --project "${GCE_PROJECT}" --zone "${ZONE}" --command "${COMMAND}" return 0 fi echo "Passed an invalid value to update the database. Aborting..." return 1 } # Test that changes to db are reflected in web ui # Globals: # None # Arguments: # URL - application URL to test # Returns: # None test_integration() { # Get and store the currently served webpage BEFORE="$(curl -s "$1")" # Update the MySQL database rating with a two star review to generate a diff # proving the MySQL on GCE database is being used by the application set_ratings "$2" # Get the updated webpage with the updated ratings AFTER="$(curl -s "$1")" # Check to make sure that changing the rating in the DB generated a diff in the # webpage if ! diff --suppress-common-lines <(echo "${AFTER}") <(echo "${BEFORE}") \ > /dev/null then echo "SUCCESS: Web UI reflects DB change" return 0 else if [[ $(echo "${AFTER}" | grep "glyphicon-star" | grep -cv "glyphicon-star-empty") == $((4 + ${2})) ]]; then echo "SUCCESS: No changes made to database as new value is same as old value" return 0 fi echo "ERROR: DB change wasn't reflected in web UI:" diff --suppress-common-lines <(echo "${AFTER}") <(echo "${BEFORE}") return 1 fi } # set to jenkins if there is no $USER USER=$(whoami) [[ "${USER}" == "root" ]] && export USER=jenkins echo "user is: $USER" # Get the IP address and port of the cluster's gateway to run tests against INGRESS_HOST=$(HTTPS_PROXY=localhost:8888 kubectl -n istio-system get service istio-ingressgateway \ -o jsonpath='{.status.loadBalancer.ingress[0].ip}') INGRESS_PORT=$(HTTPS_PROXY=localhost:8888 kubectl -n istio-system get service istio-ingressgateway \ -o jsonpath='{.spec.ports[?(@.name=="http")].port}') ## Check if port is set or not. if [ -z "$INGRESS_PORT" ]; then GATEWAY_URL="${INGRESS_HOST}" else GATEWAY_URL="${INGRESS_HOST}:${INGRESS_PORT}" fi APP_URL="http://${GATEWAY_URL}/productpage" for x in {1..30} do if [ $x == 30 ]; then echo "We have exceeded attempts to validate service..." exit 1 fi if test_integration "$APP_URL" "${1}"; then exit 0 fi sleep 10 done
def calculate_interest_accrual(NT: float) -> float: NTIED = 1 NTOL = 2 if NT == NTIED: return round(0.05 * NT, 2) elif NT == NTOL: return round(0.03 * NT, 2) else: return round(0.02 * NT, 2)
<gh_stars>0 var localVideo; var localStream; var remoteVideo; var peerConnection; var socket; var recorder; var request; const peerConnectionConfig = { iceServers: [ { urls: "stun:stun.stunprotocol.org:3478" }, { urls: "stun:stun.l.google.com:19302" } ] }; (function(angular) { "use strict"; var myApp = angular.module("app", ["stopwatch"]); myApp.controller("QuestionarioController", [ "$scope","$timeout", function($scope,$timeout) { var ctrl = this; var points = []; ctrl.uuid; // ctrl.width = 600; // ctrl.height = 340; ctrl.width = 300; ctrl.height = 400; $scope.chamada_iniciada = false; ctrl.anexos = []; ctrl.questoes = [ { titulo: "Pergunta A", tipo: "Gravacao", documentos: [] }, { titulo: "Pergunta B", tipo: "SelecaoMultipla", documentos: [] }, { titulo: "Pergunta C", tipo: "Gravacao", documentos: [] }, { titulo: "Pergunta D", tipo: "SelecaoMultipla", documentos: [] } ]; var services = [ { name: "Resposta A" }, { name: "Resposta B" }, { name: "Resposta C" }, { name: "Resposta D" } ]; ctrl.availability = { services: services }; //all your init controller goodness in here ctrl.onInit = function() { localVideo = document.getElementById("localVideo"); remoteVideo = document.getElementById("remoteVideo"); ctrl.uuid = createUUID(); defineCanvasDesenho(); criarSocket(); criaChamadaLocal(); }; ctrl.encerrarChamada = function() { //Emite evento de desenho enviarDadosSocket("call:end", { request: Number(request), uuid: ctrl.uuid }); encerrarChamada(); $scope.stop_video(); }; ctrl.limparPontos = function() { //Limpa os campos do canvas limparDadosCanvas(); //Chama Método de Limpar pontos enviarDadosSocket("canvas:clean", { request: Number(request), uuid: ctrl.uuid }); }; ctrl.abrirDocumentos = function(pergunta) { ctrl.anexos = pergunta.documentos; $("#anexos-perguntas").modal("show"); }; ctrl.abrirLocalizacao = function(pergunta) { $("#modal-localizacao").modal("show"); }; ctrl.tirarPrint = function(questao) { var video = document.getElementById("remoteVideo"); var canvas_print = document.getElementById("canvas-print"); //Animation do print angular.element("#remoteVideo").animate( { opacity: 0.3 }, function() { //call when the animation is complete angular.element("#remoteVideo").animate({ opacity: 1 }); } ); canvas_print.height = video.height; canvas_print.width = video.width; // canvas_print.height = 1080; // canvas_print.width = 1920; var ctx = canvas_print.getContext("2d"); ctx.drawImage(video, 0, 0, video.width, video.height); // ctx.drawImage(video, 0, 0, 1920, 1080); var dataURL = ctx.canvas.toBlob(function(blob) { questao.documentos.push({ type: "Imagem", src: blob }); $scope.$apply(); }); }; ctrl.gravarVideo = function(questao) { debugger; questao.gravando = true; debugger; // $scope.onTimeout(); mytimeout = $timeout($scope.onTimeout,1000); recorder.startRecording(); }; $scope.timer = "" $scope.counter = 0; $scope.timer_video = "" $scope.counter_video = 0; var mytimeout ; var mytimeout_video ; $scope.onTimeout = function(){ $scope.counter++; $scope.timer = fancyTimeFormat($scope.counter); mytimeout = $timeout($scope.onTimeout,1000); } $scope.onTimeout_video = function(){ $scope.counter_video++; $scope.timer_video = fancyTimeFormat($scope.counter_video); mytimeout_video = $timeout($scope.onTimeout_video,1000); } $scope.stop = function(){ $scope.counter = 0; $scope.timer = fancyTimeFormat($scope.counter); $timeout.cancel(mytimeout); } $scope.stop_video = function(){ $scope.counter_video = 0; $scope.timer_video = fancyTimeFormat($scope.counter_video); $timeout.cancel(mytimeout_video); } function fancyTimeFormat(time) { // Hours, minutes and seconds var hrs = ~~(time / 3600); var mins = ~~((time % 3600) / 60); var secs = time % 60; // Output like "1:01" or "4:03:59" or "123:03:59" var ret = ""; if (hrs > 0) { ret += "" + hrs + ":" + (mins < 10 ? "0" : ""); } ret += "" + mins + ":" + (secs < 10 ? "0" : ""); ret += "" + secs; return ret; } ctrl.pararGravacao = function(questao) { questao.gravando = false; $scope.stop(); recorder.stopRecording(function(url) { questao.documentos.push({ type: "Video", src: url }); $scope.$apply(); }); }; ctrl.downloadVideo = function(url) { var xhr = new XMLHttpRequest(); xhr.responseType = "blob"; xhr.onload = function() { var recoveredBlob = xhr.response; var reader = new FileReader(); saveAs(xhr.response, "teste.webm"); }; xhr.open("GET", url); xhr.send(); }; ctrl.downloadImagem = function(blob) { saveAs(blob, "image.png"); }; ctrl.start = function() { changeCallStatus(true); start(true); }; function start(isCaller) { mytimeout_video = $timeout($scope.onTimeout_video,1000); changeCallStatus(true); peerConnection = new RTCPeerConnection(peerConnectionConfig); peerConnection.onicecandidate = gotIceCandidate; peerConnection.ontrack = gotRemoteStream; peerConnection.onremovestream = stop; peerConnection.addStream(localStream); if (isCaller) { peerConnection .createOffer() .then(createdDescription) .catch(errorHandler); } } function getUserMediaSuccess(stream) { localStream = stream; localVideo.srcObject = stream; } function receivedSDP(signal) { if (signal.sdp) { peerConnection .setRemoteDescription(new RTCSessionDescription(signal.sdp)) .then(function() { // Only create answers in response to offers if (signal.sdp.type == "offer") { peerConnection .createAnswer() .then(createdDescription) .catch(errorHandler); } }) .catch(errorHandler); } } function receivedIceCandidate(signal) { if (signal.ice) { peerConnection .addIceCandidate(new RTCIceCandidate(signal.ice)) .catch(errorHandler); } } function gotIceCandidate(event) { if (event.candidate != null) { //Notifica o inicio da chamada enviarDadosSocket("call:ICECandidate", { request: Number(request), ice: event.candidate, uuid: ctrl.uuid }); } } function enviarDadosSocket(eventName, dados) { if (!request) alert("Chamada não encontrada"); //Notifica o inicio da chamada socket.emit(eventName, dados); } function createdDescription(description) { console.log("got description"); peerConnection .setLocalDescription(description) .then(function() { //Notifica o inicio da chamada enviarDadosSocket("call:start", { sdp: peerConnection.localDescription, uuid: ctrl.uuid, request: Number(request) }); }) .catch(errorHandler); } function gotRemoteStream(event) { console.log("got remote stream"); var stream = event.streams[0]; remoteVideo.srcObject = stream; // var teste = localStream; // teste.addTrack(stream) var mixer = new MultiStreamsMixer([stream, localStream]); mixer.frameInterval = 1; mixer.startDrawingFrames(); //Faz a mescla dos streams para salvar com os dois áudios // recorder = RecordRTC(mixer.getMixedStream(), { recorder = RecordRTC(stream, { type: "video", mimeType: "video/webm", //audioBitsPerSecond: 192000, video: { width: 1920, height: 1080 }, ignoreMutedMedia: false, recorderType: MediaStreamRecorder || CanvasRecorder || StereoAudioRecorder }); } function errorHandler(error) { console.log(error); } function createUUID() { return Math.floor(Math.random() * 10000) + 1; } function defineCanvasDesenho() { var canvas_camera = document.getElementById("canvas-camera"); var ctx = canvas_camera.getContext("2d"), isDown = false, prevX, prevY; canvas_camera.onmousedown = function(e) { var pos = getXY(e); prevX = pos.x; prevY = pos.y; /// add new stroke points.push([]); /// record point in this stroke points[points.length - 1].push([pos.x, pos.y]); isDown = true; }; canvas_camera.onmousemove = function(e) { if (!isDown) return; var pos = getXY(e); ctx.lineWidth = 4; ctx.strokeStyle = "#FF0000"; ctx.fillStyle = "transparent"; ctx.globalAlpha = 1; ctx.globalCompositeOperation = "source-over"; ctx.lineCap = "round"; ctx.lineJoin = "round"; ctx.font = '15px "Arial"'; ctx.beginPath(); ctx.moveTo(prevX, prevY); ctx.lineTo(pos.x, pos.y); ctx.stroke(); prevX = pos.x; prevY = pos.y; points[points.length - 1].push([pos.x, pos.y]); }; canvas_camera.onmouseup = function() { isDown = false; //Emite evento de desenho enviarDadosSocket("canvas:draw", { request: Number(request), points: points, uuid: ctrl.uuid }); }; function getXY(e) { var r = canvas_camera.getBoundingClientRect(); return { x: e.clientX - r.left, y: e.clientY - r.top }; } } function desenharPontos(data) { points = data.points; var canvas_other = document.getElementById("canvas-camera"); var ctx_other = canvas_other.getContext("2d"); ctx_other.clearRect(0, 0, canvas_other.width, canvas_other.height); ctx_other.lineWidth = 4; ctx_other.strokeStyle = "#FF0000"; ctx_other.fillStyle = "transparent"; ctx_other.globalAlpha = 1; ctx_other.globalCompositeOperation = "source-over"; ctx_other.lineCap = "round"; ctx_other.lineJoin = "round"; ctx_other.font = '15px "Arial"'; /// get a stroke for (var i = 0, t, p, pts; (pts = points[i]); i++) { /// render stroke ctx_other.beginPath(); ctx_other.moveTo(pts[0][0], pts[0][1]); for (t = 1; (p = pts[t]); t++) { ctx_other.lineTo(p[0], p[1]); } ctx_other.stroke(); } } function limparDadosCanvas() { var canvas_other = document.getElementById("canvas-camera"); var ctx_other = canvas_other.getContext("2d"); ctx_other.clearRect(0, 0, canvas_other.width, canvas_other.height); points = []; } function changeCallStatus(status) { $scope.chamada_iniciada = status; $scope.$apply(); } function criarSocket() { socket = io.connect("https://remote.irisk.com.br:4443"); //Método de Conexão socket.emit("init", { uuid: ctrl.uuid }); //Resposta dos eventos socket.on("call:start", function(data) { socketCall(data); }); socket.on("call:ICECandidate", function(data) { socketICE(data); }); socket.on("call:end", function(data) { socketCallEnd(data); }); socket.on("canvas:clean", function(data) { socketCanvasClean(data); }); socket.on("canvas:draw", function(data) { socketCanvasDraw(data); }); socket.on("canvas:receivePhoto", function(data) { socketReceivePhoto(data); }); } function socketReceivePhoto(data) { ctrl.questoes[2].documentos.push({ type: "Imagem", src: new Blob([new Uint8Array(data.blob)]) }); $scope.$apply(); } function socketICE(data) { //Verfico a Conexao verificaConexao(); if (data.uuid == ctrl.uuid) return; receivedIceCandidate(data); } function socketCall(data) { //Verfico a Conexao verificaConexao(); // Ignore messages from ourself if (data.uuid == ctrl.uuid) return; //Atribui o ID do caller if (!request) request = data.uuid; receivedSDP(data); } function socketCallEnd(data) { //Verfico a Conexao verificaConexao(); // Ignore messages from ourself if (data.uuid == ctrl.uuid) return; encerrarChamada(); } function socketCanvasClean(data) { //Verfico a Conexao verificaConexao(); // Ignore messages from ourself if (data.uuid == ctrl.uuid) return; //Chama o método que limpa os dados limparDadosCanvas(); } function socketCanvasDraw(data) { //Verfico a Conexao verificaConexao(); // Ignore messages from ourself if (data.uuid == ctrl.uuid) return; //Chama o método que limpa os dados desenharPontos(data); } function receivePhoto(data) { //Verfico a Conexao verificaConexao(); // Ignore messages from ourself if (data.uuid == ctrl.uuid) return; //Chama o método que limpa os dados desenharPontos(data); } receivePhoto; function verificaConexao() { //Verifica se já está estabelicida a conexão if (!peerConnection || peerConnection.signalingState == "closed") start(false); } function criaChamadaLocal() { var constraints = { audio: true }; if (navigator.mediaDevices.getUserMedia) { navigator.mediaDevices .getUserMedia(constraints) .then(getUserMediaSuccess) .catch(errorHandler); } else { alert("Your browser does not support getUserMedia API"); } } function encerrarChamada() { peerConnection.close(); localStream.stop(); request = null; changeCallStatus(false); limparDadosCanvas(); alert("Chamada encerrada"); } ctrl.onInit(); //return ctrl; } ]); })(window.angular); angular .module("stopwatch", []) .directive("khs", function($timeout) { return { restrict: "E", transclude: true, scope: {}, controller: function($scope, $element) { var timeoutId; $scope.seconds = 0; $scope.minutes = 0; $scope.running = false; $scope.stop = function() { $timeout.cancel(timeoutId); $scope.running = false; }; $scope.start = function() { timer(); $scope.running = true; }; $scope.clear = function() { $scope.seconds = 0; $scope.minutes = 0; }; function timer() { timeoutId = $timeout(function() { updateTime(); // update Model timer(); }, 1000); } function updateTime() { $scope.seconds++; if ($scope.seconds === 60) { $scope.seconds = 0; $scope.minutes++; } } }, template: '<div class="blueborder">' + "<div>{{minutes|numberpad:2}}:{{seconds|numberpad:2}}</div><br/>" + '<input type="button" ng-model="startButton" ng-click="start()" ng-disabled="running" value="START" />' + '<input type="button" ng-model="stopButton" ng-click="stop()" ng-disabled="!running" value="STOP" />' + '<input type="button" ng-model="clearButton" ng-click="clear()" ng-disabled="running" value="CLEAR" />' + "</div>", replace: true }; }) .filter("numberpad", function() { return function(input, places) { var out = ""; if (places) { var placesLength = parseInt(places, 10); var inputLength = input.toString().length; for (var i = 0; i < placesLength - inputLength; i++) { out = "0" + out; } out = out + input; } return out; }; });
#!/bin/bash -eux hive -e 'DROP DATABASE IF EXISTS pyhive_test_database' hive -e 'CREATE DATABASE pyhive_test_database' hive -e 'GRANT ALL ON DATABASE pyhive_test_database TO USER hadoop' hive -e 'CREATE TABLE pyhive_test_database.dummy_table (a INT)'
#!/bin/bash # Creates RPM or DEB repository for biniries from # $pre_repo_dir/$target/$box, signs it with keys # from ${gpg_keys_path} and puts signed repo to set -x export work_dir="MaxScale" echo "creating repository" echo "cleaning VM" ssh $sshopt "rm -rf dest; rm -rf src;" echo " creating dirs on VM" ssh $sshopt "mkdir -p dest ; mkdir -p src; mkdir gpg_keys" echo "copying stuff to VM" if [ $1 == "full_repo" ] ; then find ${repo_path}/maxscale-${major_ver}.*-release/mariadb-maxscale/${platform}/${platform_version}/* -name "*.rpm" -exec scp $scpopt {} $sshuser@$IP:src/ \; find ${repo_path}/maxscale-${major_ver}.*-release/mariadb-maxscale/${platform}/dists/${platform_version}/* -name "*.deb" -exec scp $scpopt {} $sshuser@$IP:src/ \; else scp $scpopt $pre_repo_dir/$target/$box/* $sshuser@$IP:src/ fi scp $scpopt -r ${gpg_keys_path}/* $sshuser@$IP:./gpg_keys/ ssh $sshopt "key=\`ls ~/gpg_keys/*.public -1\` ; gpg --import \$key" ssh $sshopt "key=\`ls ~/gpg_keys/*.private -1\` ; gpg --allow-secret-key-import --import \$key" echo "executing create_repo.sh on VM" ssh $sshopt "export platform=$platform; export platform_version=$platform_version; ./$work_dir/BUILD/mdbci/create_repo.sh dest/ src/" if [ $? != 0 ] ; then echo "Repo creation failed!" exit 1 fi echo "cleaning ${unsorted_repo_dir}/$target/$box/" rm -rf ${unsorted_repo_dir}/$target/$box/* echo "copying repo from $box" mkdir -p ${unsorted_repo_dir}/$target/$box scp $scpopt -r $sshuser@$IP:dest/* ${unsorted_repo_dir}/$target/$box/
class BankAccount: def __init__(self, ownerName, initialBalance): self.ownerName = ownerName self.balance = initialBalance def deposit(self, amount): self.balance += amount print('Deposit Successful! Now your total balance is {}'.format(self.balance)) def withdraw(self, amount): if amount > self.balance: print('Insufficient balance!') else: self.balance -= amount print('Withdrawal Successful! Now your total balance is {}'.format(self.balance)) account1 = BankAccount("John", 1000) account1.deposit(500) account1.withdraw(200)
<filename>src/__mocks__/customers.js<gh_stars>0 import { v4 as uuid } from 'uuid'; export default [ { id: uuid(), address: { country: 'USA', state: '財務部', city: '財務長', street: '2849 Fulton Street' }, avatarUrl: '/static/images/avatars/avatar_3.png', createdAt: 1555016400000, email: '<EMAIL>', name: '<NAME>', phone: '資工系' }, { id: uuid(), address: { country: 'USA', state: '公關部', city: '幹部', street: '1865 Pleasant Hill Road' }, avatarUrl: '/static/images/avatars/avatar_4.png', createdAt: 1555016400000, email: '<EMAIL>', name: '<NAME>', phone: '政治系' }, { id: uuid(), address: { country: 'USA', state: '活動部', city: '社員', street: '4894 Lakeland Park Drive' }, avatarUrl: '/static/images/avatars/avatar_2.png', createdAt: 1555016400000, email: '<EMAIL>', name: '<NAME>', phone: '財金系' }, ];
#!/usr/bin/env bash # Description: Renders clusters YAML into different files for each spoke cluster set -o pipefail set -o nounset set -m create_kustomization() { # Loop for spokes # Prepare loop for spokes local cluster=${1} local spokenumber=${2} # Pregenerate kustomization.yaml and spoke cluster config OUTPUT="${OUTPUTDIR}/kustomization.yaml" # Write header echo "resources:" >${OUTPUT} echo ">> Detecting number of masters" export NUM_M=$(yq e ".spokes[${spokenumber}].[]|keys" ${SPOKES_FILE} | grep master | wc -l | xargs) echo ">> Masters: ${NUM_M}" export NUM_M=$((NUM_M - 1)) echo ">> Rendering Kustomize for: ${cluster}" for node in $(seq 0 ${NUM_M}); do echo " - ${cluster}-master-${node}.yaml" >>${OUTPUT} done echo " - ${cluster}-cluster.yaml" >>${OUTPUT} } create_spoke_definitions() { # Reset loop for spoke general definition local cluster=${1} local spokenumber=${2} echo ">> Detecting number of masters" export NUM_M=$(yq e ".spokes[${spokenumber}].[]|keys" ${SPOKES_FILE} | grep master | wc -l | xargs) # Generic vars for all spokes export CHANGE_MACHINE_CIDR=192.168.7.0/24 export CHANGE_SPOKE_PULL_SECRET_NAME=pull-secret-spoke-cluster export CHANGE_PULL_SECRET=$(cat "${PULL_SECRET}") export CHANGE_SPOKE_CLUSTERIMAGESET=${CLUSTERIMAGESET} export CHANGE_SPOKE_API=192.168.7.243 export CHANGE_SPOKE_INGRESS=192.168.7.242 export CHANGE_SPOKE_CLUSTER_NET_PREFIX=23 export CHANGE_SPOKE_CLUSTER_NET_CIDR=10.128.0.0/14 export CHANGE_SPOKE_SVC_NET_CIDR=172.30.0.0/16 export CHANGE_RSA_HUB_PUB_KEY=$(oc get cm -n kube-system cluster-config-v1 -o yaml | grep -A 1 sshKey | tail -1) # RSA generate_rsa_spoke ${cluster} export CHANGE_RSA_PUB_KEY=$(cat ${RSA_PUB_FILE}) export CHANGE_RSA_PRV_KEY=$(cat ${RSA_KEY_FILE}) # Set vars export CHANGE_SPOKE_NAME=${cluster} grab_api_ingress ${cluster} export CHANGE_BASEDOMAIN=${HUB_BASEDOMAIN} export IGN_OVERRIDE_API_HOSTS=$(echo -n "${CHANGE_SPOKE_API} ${SPOKE_API_NAME}" | base64 -w0) export IGN_CSR_APPROVER_SCRIPT=$(base64 csr_autoapprover.sh -w0) export JSON_STRING_CFG_OVERRIDE_INFRAENV='{"ignition": {"version": "3.1.0"}, "storage": {"files": [{"path": "/etc/hosts", "append": [{"source": "data:text/plain;base64,'${IGN_OVERRIDE_API_HOSTS}'"}]}]}}' export JSON_STRING_CFG_OVERRIDE_BMH='{"ignition":{"version":"3.2.0"},"systemd":{"units":[{"name":"csr-approver.service","enabled":true,"contents":"[Unit]\nDescription=CSR Approver\nAfter=network.target\n\n[Service]\nUser=root\nType=oneshot\nExecStart=/bin/bash -c /opt/bin/csr-approver.sh\n\n[Install]\nWantedBy=multi-user.target"}]},"storage":{"files":[{"path":"/opt/bin/csr-approver.sh","mode":492,"append":[{"source":"data:text/plain;base64,'${IGN_CSR_APPROVER_SCRIPT}'"}]}]}}' # Generate the spoke definition yaml cat <<EOF >${OUTPUTDIR}/${cluster}-cluster.yaml --- apiVersion: v1 kind: Namespace metadata: name: $CHANGE_SPOKE_NAME --- apiVersion: v1 kind: Secret metadata: name: $CHANGE_SPOKE_PULL_SECRET_NAME namespace: $CHANGE_SPOKE_NAME stringData: .dockerconfigjson: '$CHANGE_PULL_SECRET' type: kubernetes.io/dockerconfigjson --- apiVersion: extensions.hive.openshift.io/v1beta1 kind: AgentClusterInstall metadata: name: $CHANGE_SPOKE_NAME namespace: $CHANGE_SPOKE_NAME spec: clusterDeploymentRef: name: $CHANGE_SPOKE_NAME imageSetRef: name: $CHANGE_SPOKE_CLUSTERIMAGESET fips: true EOF if [ "${NUM_M}" -eq "3" ]; then cat <<EOF >>${OUTPUTDIR}/${cluster}-cluster.yaml apiVIP: "$CHANGE_SPOKE_API" ingressVIP: "$CHANGE_SPOKE_INGRESS" networking: clusterNetwork: - cidr: "$CHANGE_SPOKE_CLUSTER_NET_CIDR" hostPrefix: $CHANGE_SPOKE_CLUSTER_NET_PREFIX serviceNetwork: - "$CHANGE_SPOKE_SVC_NET_CIDR" provisionRequirements: controlPlaneAgents: 3 EOF else # SNO cat <<EOF >>${OUTPUTDIR}/${cluster}-cluster.yaml networking: clusterNetwork: - cidr: "$CHANGE_SPOKE_CLUSTER_NET_CIDR" hostPrefix: $CHANGE_SPOKE_CLUSTER_NET_PREFIX serviceNetwork: - "$CHANGE_SPOKE_SVC_NET_CIDR" machineNetwork: - cidr: "$CHANGE_MACHINE_CIDR" provisionRequirements: controlPlaneAgents: 1 EOF fi cat <<EOF >>${OUTPUTDIR}/${cluster}-cluster.yaml sshPublicKey: '$CHANGE_RSA_PUB_KEY' --- apiVersion: hive.openshift.io/v1 kind: ClusterDeployment metadata: name: $CHANGE_SPOKE_NAME namespace: $CHANGE_SPOKE_NAME spec: baseDomain: $CHANGE_BASEDOMAIN clusterName: $CHANGE_SPOKE_NAME controlPlaneConfig: servingCertificates: {} clusterInstallRef: group: extensions.hive.openshift.io kind: AgentClusterInstall name: $CHANGE_SPOKE_NAME version: v1beta1 platform: agentBareMetal: agentSelector: matchLabels: cluster-name: "$CHANGE_SPOKE_NAME" pullSecretRef: name: $CHANGE_SPOKE_PULL_SECRET_NAME --- apiVersion: agent.open-cluster-management.io/v1 kind: KlusterletAddonConfig metadata: name: $CHANGE_SPOKE_NAME namespace: $CHANGE_SPOKE_NAME spec: clusterName: $CHANGE_SPOKE_NAME clusterNamespace: $CHANGE_SPOKE_NAME clusterLabels: name: $CHANGE_SPOKE_NAME cloud: Baremetal applicationManager: argocdCluster: false enabled: true certPolicyController: enabled: true iamPolicyController: enabled: true policyController: enabled: true searchCollector: enabled: true --- apiVersion: cluster.open-cluster-management.io/v1 kind: ManagedCluster metadata: name: $CHANGE_SPOKE_NAME namespace: $CHANGE_SPOKE_NAME labels: name: $CHANGE_SPOKE_NAME ztpfw: "true" spec: hubAcceptsClient: true leaseDurationSeconds: 60 --- apiVersion: agent-install.openshift.io/v1beta1 kind: InfraEnv metadata: name: '$CHANGE_SPOKE_NAME' namespace: '$CHANGE_SPOKE_NAME' spec: clusterRef: name: '$CHANGE_SPOKE_NAME' namespace: '$CHANGE_SPOKE_NAME' pullSecretRef: name: '$CHANGE_SPOKE_PULL_SECRET_NAME' nmStateConfigLabelSelector: matchLabels: nmstate_config_cluster_name: $CHANGE_SPOKE_NAME ignitionConfigOverride: '${JSON_STRING_CFG_OVERRIDE_INFRAENV}' sshAuthorizedKey: '$CHANGE_RSA_PUB_KEY' EOF # Generic vars for all masters export CHANGE_SPOKE_MASTER_PUB_INT_MASK=24 export CHANGE_SPOKE_MASTER_PUB_INT_GW=192.168.7.1 export CHANGE_SPOKE_MASTER_PUB_INT_ROUTE_DEST=192.168.7.0/24 # Now process blocks for each master for master in $(echo $(seq 0 $(($(yq eval ".spokes[${spokenumber}].[]|keys" ${SPOKES_FILE} | grep master | wc -l) - 1)))); do # Master loop export CHANGE_SPOKE_MASTER_PUB_INT=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.nic_int_static" ${SPOKES_FILE}) export CHANGE_SPOKE_MASTER_MGMT_INT=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.nic_ext_dhcp" ${SPOKES_FILE}) export CHANGE_SPOKE_MASTER_PUB_INT_IP=192.168.7.1${master} export CHANGE_SPOKE_MASTER_PUB_INT_MAC=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.mac_int_static" ${SPOKES_FILE}) export CHANGE_SPOKE_MASTER_BMC_USERNAME=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.bmc_user" ${SPOKES_FILE} | base64) export CHANGE_SPOKE_MASTER_BMC_PASSWORD=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.bmc_pass" ${SPOKES_FILE} | base64) export CHANGE_SPOKE_MASTER_BMC_URL=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.bmc_url" ${SPOKES_FILE}) export CHANGE_SPOKE_MASTER_MGMT_INT_MAC=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.mac_ext_dhcp" ${SPOKES_FILE}) export CHANGE_SPOKE_MASTER_ROOT_DISK=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.root_disk" ${SPOKES_FILE}) # Now, write the template to disk OUTPUT="${OUTPUTDIR}/${cluster}-master-${master}.yaml" cat <<EOF >${OUTPUT} --- apiVersion: agent-install.openshift.io/v1beta1 kind: NMStateConfig metadata: name: ztpfw-${cluster}-master-${master} namespace: $CHANGE_SPOKE_NAME labels: nmstate_config_cluster_name: $CHANGE_SPOKE_NAME spec: config: interfaces: - name: $CHANGE_SPOKE_MASTER_MGMT_INT type: ethernet state: up ethernet: auto-negotiation: true duplex: full speed: 10000 ipv4: enabled: true dhcp: true auto-dns: true auto-gateway: true auto-routes: true mtu: 1500 - name: $CHANGE_SPOKE_MASTER_PUB_INT type: ethernet state: up ethernet: auto-negotiation: true duplex: full speed: 1000 ipv4: enabled: true address: - ip: $CHANGE_SPOKE_MASTER_PUB_INT_IP prefix-length: $CHANGE_SPOKE_MASTER_PUB_INT_MASK mtu: 1500 mac-address: '$CHANGE_SPOKE_MASTER_PUB_INT_MAC' EOF echo ">> Checking Ignored Interfaces" echo "Spoke: ${cluster}" echo "Master: ${master}" IGN_IFACES=$(yq eval ".spokes[${spokenumber}].${cluster}.master${master}.ignore_ifaces" ${SPOKES_FILE}) if [[ ${IGN_IFACES} != "null" ]]; then yq eval -ojson ".spokes[${spokenumber}].${cluster}.master${master}.ignore_ifaces" ${SPOKES_FILE} | jq -c '.[]' | while read IFACE; do echo "Ignoring Interface: ${IFACE}" echo " - name: ${IFACE}" >>${OUTPUT} done fi cat <<EOF >>${OUTPUT} routes: config: - destination: $CHANGE_SPOKE_MASTER_PUB_INT_ROUTE_DEST next-hop-address: $CHANGE_SPOKE_MASTER_PUB_INT_GW next-hop-interface: $CHANGE_SPOKE_MASTER_PUB_INT EOF if [ "${NUM_M}" -eq "1" ]; then cat <<EOF >>${OUTPUT} - destination: 0.0.0.0/0 next-hop-address: $CHANGE_SPOKE_MASTER_PUB_INT_GW next-hop-interface: $CHANGE_SPOKE_MASTER_PUB_INT metric: 99 table-id: 254 EOF fi if [[ ${IGN_IFACES} != "null" ]]; then yq eval -ojson ".spokes[${spokenumber}].${cluster}.master${master}.ignore_ifaces" ${SPOKES_FILE} | jq -c '.[]' | while read IFACE; do echo "Ignoring route for: ${IFACE}" echo " - next-hop-interface: ${IFACE}" >>${OUTPUT} echo " state: absent" >>${OUTPUT} done fi cat <<EOF >>${OUTPUT} interfaces: - name: "$CHANGE_SPOKE_MASTER_MGMT_INT" macAddress: '$CHANGE_SPOKE_MASTER_MGMT_INT_MAC' - name: "$CHANGE_SPOKE_MASTER_PUB_INT" macAddress: '$CHANGE_SPOKE_MASTER_PUB_INT_MAC' --- apiVersion: v1 kind: Secret metadata: name: 'ztpfw-${cluster}-master-${master}-bmc-secret' namespace: '$CHANGE_SPOKE_NAME' type: Opaque data: username: '$CHANGE_SPOKE_MASTER_BMC_USERNAME' password: '$CHANGE_SPOKE_MASTER_BMC_PASSWORD' --- apiVersion: metal3.io/v1alpha1 kind: BareMetalHost metadata: name: 'ztpfw-${cluster}-master-${master}' namespace: '$CHANGE_SPOKE_NAME' labels: infraenvs.agent-install.openshift.io: '$CHANGE_SPOKE_NAME' annotations: inspect.metal3.io: disabled bmac.agent-install.openshift.io/hostname: 'ztpfw-${cluster}-master-${master}' bmac.agent-install.openshift.io/ignition-config-overrides: '${JSON_STRING_CFG_OVERRIDE_BMH}' spec: online: false bootMACAddress: '$CHANGE_SPOKE_MASTER_MGMT_INT_MAC' rootDeviceHints: deviceName: '$CHANGE_SPOKE_MASTER_ROOT_DISK' bmc: disableCertificateVerification: true address: '$CHANGE_SPOKE_MASTER_BMC_URL' credentialsName: 'ztpfw-${cluster}-master-${master}-bmc-secret' EOF done } ## MAIN # Load common vars source ${WORKDIR}/shared-utils/common.sh # Cleanup echo ">>>> Cleaning up the previous BUILD folder" find ${OUTPUTDIR} -type f | grep -vE 'spokes.yaml|pull-secret.json|kubeconfig-hub' | xargs rm -fv # Check first item only RESULT=$(yq eval ".spokes[0]" ${SPOKES_FILE}) if [ "${RESULT}" == "null" ]; then echo "Couldn't evaluate name of first spoke in YAML at $SPOKES_FILE, please check and retry" exit 1 fi if [[ -z ${ALLSPOKES} ]]; then ALLSPOKES=$(yq e '(.spokes[] | keys)[]' ${SPOKES_FILE}) fi index=0 for spoke in ${ALLSPOKES}; do create_kustomization ${spoke} ${index} create_spoke_definitions ${spoke} ${index} index=$((index + 1)) done
package com.hebnu.cs.gd.dao; import com.hebnu.cs.gd.model.entity.SysPermission; import org.apache.ibatis.annotations.Param; import java.util.List; public interface SysPermissionMapper { //新增 public Long insert(SysPermission SysPermission); //更新 public void update(SysPermission SysPermission); //通过对象进行查询 public SysPermission select(SysPermission SysPermission); //通过id进行查询 public SysPermission selectById(@Param("id") Long id); //查询全部 public List<SysPermission> selectAll(); //查询数量 public int selectCounts(); boolean isExistName(@Param("groupId") long groupId, @Param("name") String name); boolean isExistCode(@Param("groupId") long groupId, @Param("code") String code); boolean isExistNameExcludeId(@Param("id") long id, @Param("groupId") long groupId, @Param("name") String name); boolean isExistCodeExcludeId(@Param("id") long id, @Param("groupId") long groupId, @Param("code") String code); }
/* * @(#)uploader.js */ /* * Author: <NAME> * Created: 2015/08/28 * Description: The uploader module */ var Util = require('./utils.js'); var fs = require('fs'); var path = require('path'); var http = require('http'); var digestClient = require('http-digest-client'); var exec = require('child_process').exec; var csv = require('csv'); var xml2js = require('xml2js'); var UPLOADER_DIRECTORY = '/uploader/'; function makeIndexes(fields) { var indexes = []; fields.forEach(function(field) { var index = { 'scalar-type': 'string', 'namespace-uri': '', 'localname': field, 'collation': 'http://marklogic.com/collation/codepoint', 'range-value-positions': false, 'invalid-values': 'ignore' }; indexes.push(index); }); var obj = { 'range-element-index': indexes }; return obj; } function saveFields(fields) { var obj = makeIndexes(fields); fs.writeFile("index.json", JSON.stringify(obj, null, 2), function(err) { console.log("The file was saved!"); }); } function isValidFileType(type) { if (type === 'csv' || type === 'xml' || type === 'json') { return true; } else { return false; } } function startsWith(str, prefix) { return str.slice(0, prefix.length) == prefix; } function endsWith(str, suffix) { return str.slice(-suffix.length) == suffix; } function cleanup(res, filepath, message) { fs.unlink(filepath, function(err) { res.json({ success: true, message: message }); }); } // Authorization: // Digest username="admin", // realm="public", // nonce="12c2f5945e5dfbfe0eb07cab5d31f96b", // uri="/manage/v2/databases/analytics-dashboard-content/properties", // response="4394f6634650a37635190b564a6d242d", // opaque="51a1a2547c74dea8", // qop=auth, nc=00000001, cnonce="723810329ed73e3b" // Gets the range indexes in the database properties. function getIndexes(req, res, options, jobj) { var digest = digestClient(req.session.user.name, req.session.user.password); var apipath = '/manage/v2/databases/' + options.database + '/properties?format=json'; digest.request({ host: options.mlHost, path: apipath, port: 8002, // 8002 method: 'GET', headers: { 'Content-Type': 'application/json' } }, null, function(finalRes) { var body = ''; finalRes.on('data', function(data) { body += data; }); finalRes.on('end', function() { // Data reception is done, do whatever with it! var obj = JSON.parse(body); var indexes = obj['range-element-index']; if (!indexes) indexes = []; jobj.importer.indexes = { 'range-element-index': indexes }; res.json(jobj); }); finalRes.on('error', function(err) { jobj.importer.indexes = { 'range-element-index': [] }; res.json(jobj); }); }); } var uploader = { removeAll: function(req, res, marklogic, dbconfig) { var directory = req.body['directory']; var db = marklogic.createDatabaseClient(Util.getConnection(dbconfig, req)); if (directory) { // Removes all documents db.documents.removeAll({directory: directory}).result(function(response) { res.json({ success: true, message: 'All documents have been removed' }); }); } }, upload: function(req, res, current_dir, options) { // req.body holds the text fields, if there were any //console.log(req.body); // req.file is the data file //console.log(req.file); var filename = req.file.originalname; var filepath = path.join(current_dir, req.file.path); var isCsvFile = endsWith(filename, '.csv'); if (isCsvFile || endsWith(filename, '.xml')) { var jobj = { success: true, importer: { filename: filename, filepath: filepath } }; if (isCsvFile) { var parser = csv.parse({delimiter: ','}, function(err, data) { //console.log(data); jobj.importer.filetype = 'csv'; jobj.importer.element = ''; var fields = data[0]; jobj.importer.fields = fields; jobj.importer.uri_id = fields[0]; //saveFields(fields); getIndexes(req, res, options, jobj); }); fs.createReadStream(filepath).pipe(parser); } else { // You can create one xml2js.Parser per file. var parser = new xml2js.Parser(); fs.readFile(filepath, function(err, data) { parser.parseString(data, function(err, result) { //console.dir(result); //console.log(JSON.stringify(result)); //console.log(result.persons.person[0]); //console.log(result.persons.person[1]); jobj.importer.filetype = 'xml'; var directory = Object.keys(result)[0]; var value = result[directory]; jobj.importer.element = Object.keys(value)[0]; var fields = Object.keys(value[jobj.importer.element][0]); jobj.importer.fields = fields; jobj.importer.uri_id = fields[0]; getIndexes(req, res, options, jobj); }); }); } } else { res.json({ success: false, message: 'file not supported' }); } }, load: function(req, res, current_dir, options) { var filename = req.body['filename']; var filepath = req.body['filepath']; var elementName = req.body['element']; var directory = '/' + elementName + '/'; var mlcp = (process.platform === 'win32') ? 'mlcp.bat' : '/usr/local/mlcp/bin/mlcp.sh'; var cmd = mlcp + ' import -mode local -host ' + options.mlHost + ' -port ' + options.mlPort + ' -username ' + req.session.user.name + ' -password ' + req.session.user.password + ' -input_file_path ' + filepath + ' -output_uri_prefix ' + directory + ' -output_uri_suffix .xml'; var isCsvFile = endsWith(filename, '.csv'); if (isCsvFile || endsWith(filename, '.xml')) { if (isCsvFile) { cmd += ' -input_file_type delimited_text ' + ' -delimited_root_name ' + elementName + ' -delimited_uri_id ' + req.body['uri_id']; } else { cmd += ' -input_file_type aggregates ' + ' -aggregate_record_element ' + elementName + ' -aggregate_uri_id ' + req.body['uri_id']; } } else { res.json({ success: false, message: 'file not supported' }); } console.log(cmd); var child = exec(cmd, function(error, stdout, stderr) { // Replace new line characters in stderr with <br/> var message = stderr.replace(/\r?\n|\r/g, '<br/>'); // On success, error will be null. On error, error will be // an instance of Error and error.code will be the exit // code of the child process. if (error !== null) { message += 'Error Code: ' + error.code; } // Sets the range indexes in the database properties. var indexes = req.body['indexes']; // Uncomment this to create indexes for all fields. //var indexes = makeIndexes(req.body['fields']); // Uncomment this to remove all indexes. //var indexes = { // 'range-element-index': [] //}; if (indexes) { var digest = digestClient(req.session.user.name, req.session.user.password); var apipath = '/manage/v2/databases/' + options.database + '/properties?format=json'; digest.request({ host: options.mlHost, path: apipath, port: 8002, // 8002 method: 'PUT', headers: { 'Content-Type': 'application/json' } }, JSON.stringify(indexes), function(finalRes) { var body = ''; // The status code is 204 upon success // In order to be as supportive of the client as // possible, a REST API should not return // 204 (No Content) responses. //console.log(finalRes.headers); //console.log(finalRes.statusCode); //console.log(finalRes.statusMessage); finalRes.on('data', function(data) { body += data; }); finalRes.on('end', function() { console.log('manage: ' + body); cleanup(res, filepath, message); }); finalRes.on('error', function(err) { cleanup(res, filepath, message); }); }); } else { cleanup(res, filepath, message); } }); } } module.exports = uploader;
package com.stylefeng.guns.rest.modular.cinema; import com.alibaba.dubbo.config.annotation.Reference; import com.baomidou.mybatisplus.plugins.Page; import com.stylefeng.guns.api.cinema.CinemaServiceApi; import com.stylefeng.guns.api.cinema.vo.*; import com.stylefeng.guns.api.order.OrderServiceAPI; import com.stylefeng.guns.rest.modular.auth.VO.ResponseVO; import com.stylefeng.guns.rest.modular.cinema.vo.CinemaConditionResponseVO; import com.stylefeng.guns.rest.modular.cinema.vo.CinemaFieldResponseVO; import com.stylefeng.guns.rest.modular.cinema.vo.CinemaFieldsResponseVO; import lombok.extern.slf4j.Slf4j; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import java.util.List; @Slf4j @RestController @RequestMapping("/cinema/") public class CinemaController { @Reference(interfaceClass = CinemaServiceApi.class, check = false,cache = "lru",connections = 10) private CinemaServiceApi cinemaServiceApi; @Reference(interfaceClass = OrderServiceAPI.class,check = false) private OrderServiceAPI orderServiceAPI; private static final String IMG_PRE = "http://img.meetingshop.cn/"; @RequestMapping(value = "getCinemas") public ResponseVO getCinemas(CinemaQueryVO cinemaQueryVO){ try{ // 按照五个条件进行筛选 Page<CinemaVO> cinemas = cinemaServiceApi.getCinemas(cinemaQueryVO); // 判断是否有满足条件的影院 if(cinemas.getRecords() == null || cinemas.getRecords().size()==0){ return ResponseVO.success("没有影院可查"); }else{ return ResponseVO.success(cinemas.getCurrent(),(int)cinemas.getPages(),"",cinemas.getRecords()); } }catch (Exception e){ // 如果出现异常,应该如何处理 log.error("获取影院列表异常",e); return ResponseVO.serviceFail("查询影院列表失败"); } } // 获取影院的查询条件 /* 1、热点数据 -> 放缓存 2、banner */ @RequestMapping(value = "getCondition") public ResponseVO getCondition(CinemaQueryVO cinemaQueryVO){ try{ // 获取三个集合,然后封装成一个对象返回即可 List<BrandVO> brands = cinemaServiceApi.getBrands(cinemaQueryVO.getBrandId()); List<AreaVO> areas = cinemaServiceApi.getAreas(cinemaQueryVO.getDistrictId()); List<HallTypeVO> hallTypes = cinemaServiceApi.getHallTypes(cinemaQueryVO.getHallType()); CinemaConditionResponseVO cinemaConditionResponseVO = new CinemaConditionResponseVO(); cinemaConditionResponseVO.setAreaList(areas); cinemaConditionResponseVO.setBrandList(brands); cinemaConditionResponseVO.setHalltypeList(hallTypes); return ResponseVO.success(cinemaConditionResponseVO); }catch (Exception e) { log.error("获取条件列表失败", e); return ResponseVO.serviceFail("获取影院查询条件失败"); } } @RequestMapping(value = "getFields") public ResponseVO getFields(Integer cinemaId){ try{ CinemaInfoVO cinemaInfoById = cinemaServiceApi.getCinemaInfoById(cinemaId); List<FilmInfoVO> filmInfoByCinemaId = cinemaServiceApi.getFilmInfoByCinemaId(cinemaId); CinemaFieldsResponseVO cinemaFieldResponseVO = new CinemaFieldsResponseVO(); cinemaFieldResponseVO.setCinemaInfo(cinemaInfoById); cinemaFieldResponseVO.setFilmList(filmInfoByCinemaId); return ResponseVO.success(IMG_PRE,cinemaFieldResponseVO); }catch (Exception e){ log.error("获取播放场次失败",e); return ResponseVO.serviceFail("获取播放场次失败"); } } @RequestMapping(value = "getFieldInfo",method = RequestMethod.POST) public ResponseVO getFieldInfo(Integer cinemaId,Integer fieldId){ try{ CinemaInfoVO cinemaInfoById = cinemaServiceApi.getCinemaInfoById(cinemaId); FilmInfoVO filmInfoByFieldId = cinemaServiceApi.getFilmInfoByFieldId(fieldId); HallInfoVO filmFieldInfo = cinemaServiceApi.getFilmFieldInfo(fieldId); // 造几个销售的假数据,后续会对接订单接口 filmFieldInfo.setSoldSeats(orderServiceAPI.getSoldSeatsByFieldId(fieldId)); CinemaFieldResponseVO cinemaFieldResponseVO = new CinemaFieldResponseVO(); cinemaFieldResponseVO.setCinemaInfo(cinemaInfoById); cinemaFieldResponseVO.setFilmInfo(filmInfoByFieldId); cinemaFieldResponseVO.setHallInfo(filmFieldInfo); return ResponseVO.success(IMG_PRE,cinemaFieldResponseVO); }catch (Exception e){ log.error("获取选座信息失败",e); return ResponseVO.serviceFail("获取选座信息失败"); } } }
package com.travelaudience.nexus.proxy; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.http.HttpClient; import io.vertx.core.http.HttpClientRequest; import io.vertx.core.http.HttpClientResponse; import io.vertx.core.http.HttpHeaders; import io.vertx.core.http.HttpMethod; import io.vertx.core.http.HttpServerRequest; import io.vertx.core.http.HttpServerResponse; import io.vertx.ext.web.RoutingContext; /** * A basic class which proxies user requests to a Nexus instance, conveying authentication information. * * @see <a href="https://books.sonatype.com/nexus-book/reference3/security.html#remote-user-token">Authentication via Remote User Token</a> */ public final class NexusHttpProxy { private static final CharSequence X_FORWARDED_PROTO = HttpHeaders.createOptimized("X-Forwarded-Proto"); private static final CharSequence X_FORWARDED_FOR = HttpHeaders.createOptimized("X-Forwarded-For"); private final String host; private final HttpClient httpClient; private final String nexusRutHeader; private final int port; private final boolean passThruAuthHeader; private NexusHttpProxy(final Vertx vertx, final String host, final int port, final boolean passThruAuthHeader) { this.host = host; this.httpClient = vertx.createHttpClient(); this.nexusRutHeader = "X-Auth-Username"; this.port = port; this.passThruAuthHeader = passThruAuthHeader; } /** * Creates a new instance of {@link NexusHttpProxy}. * * @param vertx the base {@link Vertx} instance. * @param host the host we will be proxying to. * @param port the port we will be proxying to. * @return a new instance of {@link NexusHttpProxy}. */ public static final NexusHttpProxy create(final Vertx vertx, final String host, final int port, final boolean passThruAuthHeader) { return new NexusHttpProxy(vertx, host, port, passThruAuthHeader); } /** * Proxies the specified HTTP request, enriching its headers with authentication information. * * @param userId the ID of the user making the request. * @param accessToken the validated JWT token * @param origReq the original request (i.e., {@link RoutingContext#request()}. * @param origRes the original response (i.e., {@link RoutingContext#request()}. */ public void proxyUserRequest(final String userId, final String accessToken, final HttpServerRequest origReq, final HttpServerResponse origRes) { final Handler<HttpClientResponse> proxiedResHandler = proxiedRes -> { origRes.setChunked(true); origRes.setStatusCode(proxiedRes.statusCode()); origRes.headers().setAll(proxiedRes.headers()); origRes.headers().remove(HttpHeaders.CONTENT_LENGTH); proxiedRes.handler(origRes::write); proxiedRes.endHandler(v -> origRes.end()); }; final HttpClientRequest proxiedReq; proxiedReq = httpClient.request(origReq.method(), port, host, origReq.uri(), proxiedResHandler); if(origReq.method() == HttpMethod.OTHER) { proxiedReq.setRawMethod(origReq.rawMethod()); } proxiedReq.setChunked(true); proxiedReq.headers().add(X_FORWARDED_PROTO, getHeader(origReq, X_FORWARDED_PROTO, origReq.scheme())); proxiedReq.headers().add(X_FORWARDED_FOR, getHeader(origReq, X_FORWARDED_FOR, origReq.remoteAddress().host())); proxiedReq.headers().addAll(origReq.headers()); // Don't pass auth header to upstream if there's a valid JWT if (!passThruAuthHeader || accessToken != null) { proxiedReq.headers().remove(HttpHeaders.AUTHORIZATION); } // Always include valid JWT in header if (accessToken != null) { proxiedReq.headers().add("X-Auth-Token", accessToken); } proxiedReq.headers().remove(HttpHeaders.CONTENT_LENGTH); injectRutHeader(proxiedReq, userId); origReq.handler(proxiedReq::write); origReq.endHandler(v -> proxiedReq.end()); } private final void injectRutHeader(final HttpClientRequest req, final String userId) { if (nexusRutHeader != null && nexusRutHeader.length() > 0 && userId != null && userId.length() > 0) { req.headers().add(nexusRutHeader, userId); } } private static final String getHeader(final HttpServerRequest req, final CharSequence name, final String defaultValue) { final String originalHeader = req.headers().get(name); if (originalHeader == null) { return defaultValue; } else { return originalHeader; } } }
<gh_stars>0 $(document).ready(function () { /* * SPARKLINE */ function sparklineBar(id, values, height, barWidth, barColor, barSpacing) { $('.'+id).sparkline(values, { type: 'bar', height: height, barWidth: barWidth, barColor: barColor, barSpacing: barSpacing }) } function sparklineLine(id, values, width, height, lineColor, fillColor, lineWidth, maxSpotColor, minSpotColor, spotColor, spotRadius, hSpotColor, hLineColor) { $('.'+id).sparkline(values, { type: 'line', width: width, height: height, lineColor: lineColor, fillColor: fillColor, lineWidth: lineWidth, maxSpotColor: maxSpotColor, minSpotColor: minSpotColor, spotColor: spotColor, spotRadius: spotRadius, highlightSpotColor: hSpotColor, highlightLineColor: hLineColor }); } function sparklinePie(id, values, width, height, sliceColors) { $('.'+id).sparkline(values, { type: 'pie', width: width, height: height, sliceColors: sliceColors, offset: 0, borderWidth: 0 }); } /* Mini Chart - Bar Chart 1 */ if ($('.stats-bar')[0]) { sparklineBar('stats-bar', [6,4,8,6,5,6,7,8,3,5,9,5,8,4,3,6,8], '45px', 3, '#fff', 2); } /* Mini Chart - Bar Chart 2 */ if ($('.stats-bar-2')[0]) { sparklineBar('stats-bar-2', [4,7,6,2,5,3,8,6,6,4,8,6,5,8,2,4,6], '45px', 3, '#fff', 2); } /* Mini Chart - Line Chart 1 */ if ($('.stats-line')[0]) { sparklineLine('stats-line', [9,4,6,5,6,4,5,7,9,3,6,5], 85, 45, '#fff', 'rgba(0,0,0,0)', 1.25, 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 3, '#fff', 'rgba(255,255,255,0.4)'); } /* Mini Chart - Line Chart 2 */ if ($('.stats-line-2')[0]) { sparklineLine('stats-line-2', [5,6,3,9,7,5,4,6,5,6,4,9], 85, 45, '#fff', 'rgba(0,0,0,0)', 1.25, 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 3, '#fff', 'rgba(255,255,255,0.4)'); } /* Mini Chart - Pie Chart 1 */ if ($('.stats-pie')[0]) { sparklinePie('stats-pie', [20, 35, 30, 5], 45, 45, ['#fff', 'rgba(255,255,255,0.7)', 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.2)']); } /* Dash Widget Line Chart */ if ($('.dash-widget-visits')[0]) { sparklineLine('dash-widget-visits', [9,4,6,5,6,4,5,7,9,3,6,5], '100%', '95px', 'rgba(255,255,255,0.7)', 'rgba(0,0,0,0)', 2, 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 'rgba(255,255,255,0.4)', 5, 'rgba(255,255,255,0.4)', '#fff'); } /* * Easy Pie Charts - Used in widgets */ function easyPieChart(id, trackColor, scaleColor, barColor, lineWidth, lineCap, size) { $('.'+id).easyPieChart({ trackColor: trackColor, scaleColor: scaleColor, barColor: barColor, lineWidth: lineWidth, lineCap: lineCap, size: size }); } /* Main Pie Chart */ if ($('.main-pie')[0]) { // easyPieChart('main-pie', 'rgba(255,255,255,0.2)', 'rgba(255,255,255,0.5)', 'rgba(255,255,255,0.7)', 7, 'butt', 148); easyPieChart('main-pie', '#eee', '#ccc', '#FFC107', 7, 'butt', 148); } /* Others */ if ($('.sub-pie-1')[0]) { easyPieChart('sub-pie-1', '#eee', '#ccc', '#2196F3', 4, 'butt', 95); } if ($('.sub-pie-2')[0]) { easyPieChart('sub-pie-2', '#eee', '#ccc', '#FFC107', 4, 'butt', 95); } });
<reponame>mindhivenz/meteor-base<filename>store/SubscriptionDocStore.js module.exports = require('./../dist/store/SubscriptionDocStore')
#!/usr/bin/env bash # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. function start_build_docker() { docker pull $IMG apt_mirror='s#http://archive.ubuntu.com/ubuntu#mirror://mirrors.ubuntu.com/mirrors.txt#g' DOCKER_ENV=$(cat <<EOL -e FLAGS_fraction_of_gpu_memory_to_use=0.15 \ -e CTEST_OUTPUT_ON_FAILURE=1 \ -e CTEST_PARALLEL_LEVEL=1 \ -e APT_MIRROR=${apt_mirror} \ -e WITH_GPU=ON \ -e CUDA_ARCH_NAME=Auto \ -e WITH_AVX=ON \ -e WITH_GOLANG=OFF \ -e WITH_TESTING=ON \ -e WITH_COVERAGE=ON \ -e COVERALLS_UPLOAD=ON \ -e WITH_DEB=OFF \ -e CMAKE_BUILD_TYPE=RelWithDebInfo \ -e PADDLE_FRACTION_GPU_MEMORY_TO_USE=0.15 \ -e CUDA_VISIBLE_DEVICES=0,1 \ -e WITH_DISTRIBUTE=ON \ -e WITH_FLUID_ONLY=ON \ -e RUN_TEST=ON EOL ) DOCKER_CMD="nvidia-docker" if ! [ -x "$(command -v ${DOCKER_CMD})" ]; then DOCKER_CMD="docker" fi if [ ! -d "${HOME}/.ccache" ]; then mkdir ${HOME}/.ccache fi set -ex ${DOCKER_CMD} run -it \ ${DOCKER_ENV} \ -e SCRIPT_NAME=$0 \ -e CONTENT_DEC_PASSWD=$CONTENT_DEC_PASSWD \ -e TRAVIS_BRANCH=$TRAVIS_BRANCH \ -e TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST \ -v $PADDLE_ROOT:/paddle \ -v ${HOME}/.ccache:/root/.ccache \ -w /paddle \ $IMG \ paddle/scripts/paddle_build.sh $@ set +x } function main() { DOCKER_REPO="paddlepaddle/paddle" VERSION="latest-dev" PADDLE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}")/../../" && pwd )" IMG=${DOCKER_REPO}:${VERSION} start_build_docker $@ } main $@
<reponame>MrPepperoni/Reaping2-1 #ifndef INCLUDED_MAP_PROPERTY_EDITOR_BASE_SYSTEM_H #define INCLUDED_MAP_PROPERTY_EDITOR_BASE_SYSTEM_H #include "platform/i_platform.h" #include "engine/system.h" #include "../../input/keyboard.h" #include "editor_hud_state.h" #include "editor_back_event.h" #include "../../engine/engine.h" #include "ui/ui.h" #include "room_editor_loaded_event.h" #include "level_generator/i_room.h" namespace map { template<typename EDITOR, typename PROPERTY> class PropertyEditorBaseSystem : public engine::System { public: PropertyEditorBaseSystem(std::string const& HUD, std::string const& mode); protected: std::string mHUD; std::string mMode; void HandleModeChanged( map::EditorModeChangedEvent const& Evt ); void HandleSpace(); void HandleEditorBack( map::EditorBackEvent const& Evt ); virtual void LoadPropNames(); std::vector<std::string> mPropNames; Opt<RoomDesc> mRoomDesc; AutoReg mOnRoomEditorLoaded; void OnRoomEditorLoaded( map::RoomEditorLoadedEvent const& Evt ); virtual void Init(); }; template<typename EDITOR, typename PROPERTY> void map::PropertyEditorBaseSystem<EDITOR, PROPERTY>::LoadPropNames() { mPropNames.clear(); if (!mRoomDesc.IsValid()) { return; } for (auto&& prop : mRoomDesc->GetRoom()->GetProperties()) { if (prop.GetType() == PROPERTY::GetType_static()) { mPropNames.push_back( "prop" + std::to_string( prop.GetUID() ) ); } } } template<typename EDITOR, typename PROPERTY> void PropertyEditorBaseSystem<EDITOR, PROPERTY>::OnRoomEditorLoaded( map::RoomEditorLoadedEvent const& Evt ) { mRoomDesc = Evt.mRoomDesc; LoadPropNames(); } template<typename EDITOR, typename PROPERTY> void PropertyEditorBaseSystem<EDITOR, PROPERTY>::Init() { mOnRoomEditorLoaded = EventServer<map::RoomEditorLoadedEvent>::Get().Subscribe( boost::bind( &PropertyEditorBaseSystem::OnRoomEditorLoaded, this, _1 ) ); } template<typename EDITOR, typename PROPERTY> void map::PropertyEditorBaseSystem<EDITOR, PROPERTY>::HandleEditorBack( map::EditorBackEvent const& Evt ) { if (mEnabled) { if (!Evt.mBackToBaseHud) { Ui::Get().Load( mHUD ); EditorHudState::Get().SetHudShown( true ); } } } template<typename EDITOR, typename PROPERTY> void PropertyEditorBaseSystem<EDITOR, PROPERTY>::HandleSpace() { static Opt<engine::KeyboardSystem> keyboard = ::engine::Engine::Get().GetSystem<engine::KeyboardSystem>(); if (keyboard->GetKey( GLFW_KEY_SPACE ).State == KeyState::Typed) { if (EditorHudState::Get().IsHudShown()) { EventServer<EditorBackEvent>::Get().SendEvent( EditorBackEvent( true ) ); } else { Ui::Get().Load( mHUD ); EditorHudState::Get().SetHudShown( true ); } } } template<typename EDITOR, typename PROPERTY> PropertyEditorBaseSystem<EDITOR, PROPERTY>::PropertyEditorBaseSystem( std::string const& HUD, std::string const& mode ) : mHUD(HUD) , mMode(mode) { } template<typename EDITOR, typename PROPERTY> void PropertyEditorBaseSystem<EDITOR, PROPERTY>::HandleModeChanged( map::EditorModeChangedEvent const& Evt ) { if (Evt.mMode == mMode) { ::engine::Engine::Get().SetEnabled<EDITOR>( true ); Ui::Get().Load( mHUD ); EditorHudState::Get().SetHudShown( true ); } else { ::engine::Engine::Get().SetEnabled<EDITOR>( false ); } } } // namespace map #endif//INCLUDED_MAP_PROPERTY_EDITOR_BASE_SYSTEM_H //command: "classgenerator.exe" -g "system" -c "property_editor_base_system" -n "map"
<filename>test/analysis/constraints_type_inference.ts import * as infer from '../../src/analysis/type_inference'; import TypeMap from '../../src/type_map'; import { ConstraintTypeUsage, TypeInfo, TypeUsage } from '../../src/type_map'; import { EnumNode, EqualityNode, GenderNode, IdentifierNode, IneqNode, Node, NumberNode, } from '../../src/trees/constraint'; const makeEmptyPos = () => ({ firstColumn: 0, firstLine: 1, lastColumn: 0, lastLine: 1, }); function getTypeInfo(typeMap: TypeMap, variable: string): TypeInfo { expect(typeMap.hasInfoForType(variable)).toBe(true); return typeMap.getVariableTypeInfo(variable); } function makeNumberNode(variable: string, op: '=' | '!=' | '>' | '<' | '<=' | '>=' = '='): Node { const numberNode: NumberNode = { pos: makeEmptyPos(), type: 'number', value: 5, }; const varNode: IdentifierNode = { name: variable, pos: makeEmptyPos(), type: 'identifier', }; if (op === '=' || op === '!=') { const res: EqualityNode = { lhs: varNode, op: op as '=' | '!=', pos: makeEmptyPos(), rhs: numberNode, }; return res; } else { const res: IneqNode = { lhs: varNode, op: op as '>' | '<' | '<=' | '>=', pos: makeEmptyPos(), rhs: numberNode, }; return res; } } function makeEnumNode(variable: string, op: '=' | '!=' = '='): Node { const enumNode: EnumNode = { pos: makeEmptyPos(), type: 'enum', value: 'some-enum', }; const varNode: IdentifierNode = { name: variable, pos: makeEmptyPos(), type: 'identifier', }; const res: EqualityNode = { lhs: varNode, op: op, pos: makeEmptyPos(), rhs: enumNode, }; return res; } function makeGenderNode(variable: string, op: '=' | '!=' = '='): Node { const genderNode: GenderNode = { pos: makeEmptyPos(), type: 'gender', value: 'F', }; const varNode: IdentifierNode = { name: variable, pos: makeEmptyPos(), type: 'identifier', }; const res: EqualityNode = { lhs: varNode, op: op, pos: makeEmptyPos(), rhs: genderNode, }; return res; } function makeIgnoreNode(variable: string): Node { const varNode: IdentifierNode = { name: variable, pos: makeEmptyPos(), type: 'identifier', }; return { op: '!', operand: varNode, pos: makeEmptyPos(), }; } function assertConstraintUsage(usage: TypeUsage): ConstraintTypeUsage { expect('constraint').toEqual(usage.nodeType); if (usage.nodeType !== 'constraint') { throw new Error('Make flow happy'); } return usage; } describe('Type inference', () => { describe('Constraints', () => { describe('Ignore', () => { it('Should infer ignore constraints as unknown', () => { const constraints = [makeIgnoreNode('i')]; const typeMap = new TypeMap(); const textAST = { input: 'test', nodes: [], }; const constraintAST = { input: '!i', nodes: constraints, }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'i'); expect({ type: 'unknown', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'unknown', }, ], }).toEqual(typeInfo); const usage = assertConstraintUsage(typeInfo.usages[0]); expect(constraints[0]).toEqual(usage.node); expect(constraintAST).toEqual(usage.location.constraints); expect(textAST).toEqual(usage.location.text); }); }); describe('Numbers', () => { it('Should infer number equals as number', () => { const constraints = [makeNumberNode('n', '=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer number not-equals as number', () => { const constraints = [makeNumberNode('n', '!=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n!=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer number less-than as number', () => { const constraints = [makeNumberNode('n', '<')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n<5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer number less-than-equals as number', () => { const constraints = [makeNumberNode('n', '<=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n<=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer number greater-than as number', () => { const constraints = [makeNumberNode('n', '>')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n>5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer number greater-than-equals as number', () => { const constraints = [makeNumberNode('n', '>=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n>=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); }); describe('Genders', () => { it('Should infer gender equals as gender type', () => { const constraints = [makeGenderNode('g', '=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'g=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'g'); expect({ type: 'gender', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'gender', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer gender not-equals as gender type', () => { const constraints = [makeGenderNode('g', '!=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'g!=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'g'); expect({ type: 'gender', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'gender', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); }); describe('Enums', () => { it('Should infer enum equals as enum type', () => { const constraints = [makeEnumNode('e', '=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'e=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'e'); expect({ type: 'enum', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'enum', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); it('Should infer enum not-equals as enum type', () => { const constraints = [makeEnumNode('e', '!=')]; const typeMap = new TypeMap(); const constraintAST = { input: 'e!=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'e'); expect({ type: 'enum', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'enum', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); }); }); describe('Multiple distinct variable', () => { it('Should infer types for both variables', () => { const constraints = [makeEnumNode('e'), makeNumberNode('i')]; const typeMap = new TypeMap(); const constraintAST = { input: 'e="enum", i=5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(2).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'e'); expect({ type: 'enum', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'enum', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); const numberTypeInfo = getTypeInfo(typeMap, 'i'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[1], nodeType: 'constraint', type: 'number', }, ], }).toEqual(numberTypeInfo); assertConstraintUsage(numberTypeInfo.usages[0]); }); }); describe('Same variable multiple constraints', () => { describe('Initial number', () => { it('Is number when used as number two times', () => { const constraints = [makeNumberNode('n'), makeNumberNode('n', '>')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n=5, n>5', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, { location: { constraints: constraintAST, text: textAST, }, node: constraints[1], nodeType: 'constraint', type: 'number', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); assertConstraintUsage(typeInfo.usages[1]); }); it('Is number when used as ignore', () => { const constraints = [makeNumberNode('n'), makeIgnoreNode('n')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n=5,!n', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'number', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, { location: { constraints: constraintAST, text: textAST, }, node: constraints[1], nodeType: 'constraint', type: 'unknown', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); assertConstraintUsage(typeInfo.usages[1]); }); }); describe('Fail cases', () => { it('Cannot reconcile with enum', () => { const constraints = [makeNumberNode('n'), makeEnumNode('n')]; const typeMap = new TypeMap(); const constraintAST = { input: 'n=5,n="enum"', nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'error', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, { location: { constraints: constraintAST, text: textAST, }, node: constraints[1], nodeType: 'constraint', type: 'enum', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); assertConstraintUsage(typeInfo.usages[1]); }); it('Cannot reconcile with gender', () => { const constraints = [makeNumberNode('n'), makeGenderNode('n')]; const typeMap = new TypeMap(); const constraintAST = { input: "n=5,n='F'", nodes: constraints, }; const textAST = { input: 'test', nodes: [], }; infer.inferConstraintTypes(typeMap, constraintAST, textAST); expect(1).toEqual(typeMap.size); const typeInfo = getTypeInfo(typeMap, 'n'); expect({ type: 'error', usages: [ { location: { constraints: constraintAST, text: textAST, }, node: constraints[0], nodeType: 'constraint', type: 'number', }, { location: { constraints: constraintAST, text: textAST, }, node: constraints[1], nodeType: 'constraint', type: 'gender', }, ], }).toEqual(typeInfo); assertConstraintUsage(typeInfo.usages[0]); assertConstraintUsage(typeInfo.usages[1]); }); }); }); }); });
#!/bin/bash CONTENTDIR="content" BUILDDIR="build" FILENAME="index" ASSETSDIR="assets" download_csl() { mkdir "${ASSETSDIR}" -p wget -O "${ASSETSDIR}/citation-style.csl" \ "https://raw.githubusercontent.com/citation-style-language/styles/master/harvard-anglia-ruskin-university.csl" } pdf() { mkdir "${BUILDDIR}" -p echo "Creating pdf output" pandoc "${CONTENTDIR}/${FILENAME}.md" \ --resource-path="${CONTENTDIR}" \ --citeproc \ --csl="${ASSETSDIR}/citation-style.csl" \ --from="markdown+tex_math_single_backslash+tex_math_dollars" \ --to="latex" \ --output="${BUILDDIR}/output.pdf" \ --pdf-engine="xelatex" } html() { mkdir "${BUILDDIR}" -p echo "Creating html output" pandoc "${CONTENTDIR}/${FILENAME}.md" \ --resource-path="${CONTENTDIR}" \ --citeproc \ --csl="${ASSETSDIR}/citation-style.csl" \ --from="markdown+tex_math_single_backslash+tex_math_dollars" \ --to="html5" \ --output="${BUILDDIR}/output.html" \ --self-contained } epub() { mkdir "${BUILDDIR}" -p echo "Creating epub output" pandoc "${CONTENTDIR}/${FILENAME}.md" \ --resource-path="${CONTENTDIR}" \ --citeproc \ --csl="${ASSETSDIR}/citation-style.csl" \ --from="markdown+tex_math_single_backslash+tex_math_dollars" \ --to="epub" \ --output="${BUILDDIR}/output.epub" } # Allows to call a function based on arguments passed to the script # Example: `./build.sh pdf` $*
package de.lmu.cis.ocrd.ml; import com.google.gson.Gson; import org.pmw.tinylog.Logger; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; public class LEProtocol implements Protocol { private static class ProtocolValue { private double confidence = 0; private int count = 0; } private static class Protocol { private final Map<String, ProtocolValue> yes = new HashMap<>(); private final Map<String, ProtocolValue> no = new HashMap<>(); } private Protocol protocol = new Protocol(); @Override public void read(InputStream is) { protocol = new Gson().fromJson(new InputStreamReader(is, StandardCharsets.UTF_8), protocol.getClass()); } @Override public void write(OutputStream out) throws Exception { out.write(new Gson().toJson(protocol).getBytes(StandardCharsets.UTF_8)); } @Override public void protocol(OCRToken token, String correction, double confidence, boolean taken) { Logger.debug("putting token into le protocol: {} {} {} {}", token, correction, confidence, taken); // do *not* ignore case final String word = token.getMasterOCR().getWordNormalized(); ProtocolValue val; if (taken) { val = protocol.yes.computeIfAbsent(word, k -> new ProtocolValue()); } else { val = protocol.no.computeIfAbsent(word, k -> new ProtocolValue()); } val.count++; val.confidence = confidence; } }
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then set -x fi # shellcheck source=scripts/in_container/_in_container_script_init.sh . /opt/airflow/scripts/in_container/_in_container_script_init.sh # Add "other" and "group" write permission to the tmp folder # Note that it will also change permissions in the /tmp folder on the host # but this is necessary to enable some of our CLI tools to work without errors chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.6} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} : "${AIRFLOW_SOURCES:?"ERROR: AIRFLOW_SOURCES not set !!!!"}" echo echo "Airflow home: ${AIRFLOW_HOME}" echo "Airflow sources: ${AIRFLOW_SOURCES}" echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" echo RUN_TESTS=${RUN_TESTS:="false"} CI=${CI:="false"} USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" if [[ ${AIRFLOW_VERSION} == *1.10* || ${USE_AIRFLOW_VERSION} == *1.10* ]]; then export RUN_AIRFLOW_1_10="true" else export RUN_AIRFLOW_1_10="false" fi if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then export PYTHONPATH=${AIRFLOW_SOURCES} echo echo "Using already installed airflow version" echo if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null ./ask_for_recompile_assets_if_needed.sh popd >/dev/null fi # Cleanup the logs, tmp when entering the environment sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* mkdir -p "${AIRFLOW_SOURCES}"/logs/ mkdir -p "${AIRFLOW_SOURCES}"/tmp/ elif [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then echo echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally" echo uninstall_airflow_and_providers elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then echo echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." echo uninstall_airflow_and_providers install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]" uninstall_providers elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then echo echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers." echo uninstall_airflow_and_providers install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]" uninstall_providers else echo echo "Install airflow from PyPI without extras" echo install_released_airflow_version "${USE_AIRFLOW_VERSION}" fi if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then echo echo "Install all packages from dist folder" if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then echo "(except apache-airflow)" fi if [[ ${PACKAGE_FORMAT} == "both" ]]; then echo echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}" echo exit 1 fi echo installable_files=() for file in /dist/*.{whl,tar.gz} do if [[ ${USE_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then # Skip Apache Airflow package - it's just been installed above with extras echo "Skipping ${file}" continue fi if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then echo "Adding ${file} to install" installable_files+=( "${file}" ) fi if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then echo "Adding ${file} to install" installable_files+=( "${file}" ) fi done if (( ${#installable_files[@]} )); then pip install "${installable_files[@]}" --no-deps fi fi export RUN_AIRFLOW_1_10=${RUN_AIRFLOW_1_10:="false"} # Added to have run-tests on path export PATH=${PATH}:${AIRFLOW_SOURCES} # This is now set in conftest.py - only for pytest tests unset AIRFLOW__CORE__UNIT_TEST_MODE mkdir -pv "${AIRFLOW_HOME}/logs/" cp -f "${IN_CONTAINER_DIR}/airflow_ci.cfg" "${AIRFLOW_HOME}/unittests.cfg" # Change the default worker_concurrency for tests export AIRFLOW__CELERY__WORKER_CONCURRENCY=8 set +e "${IN_CONTAINER_DIR}/check_environment.sh" ENVIRONMENT_EXIT_CODE=$? set -e if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then echo echo "Error: check_environment returned ${ENVIRONMENT_EXIT_CODE}. Exiting." echo exit ${ENVIRONMENT_EXIT_CODE} fi # Create symbolic link to fix possible issues with kubectl config cmd-path mkdir -p /usr/lib/google-cloud-sdk/bin touch /usr/lib/google-cloud-sdk/bin/gcloud ln -s -f /usr/bin/gcloud /usr/lib/google-cloud-sdk/bin/gcloud # Set up ssh keys echo 'yes' | ssh-keygen -t rsa -C your_email@youremail.com -m PEM -P '' -f ~/.ssh/id_rsa \ >"${AIRFLOW_HOME}/logs/ssh-keygen.log" 2>&1 cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2 chmod 600 ~/.ssh/* # SSH Service sudo service ssh restart >/dev/null 2>&1 # Sometimes the server is not quick enough to load the keys! while [[ $(ssh-keyscan -H localhost 2>/dev/null | wc -l) != "3" ]] ; do echo "Not all keys yet loaded by the server" sleep 0.05 done ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null # shellcheck source=scripts/in_container/configure_environment.sh . "${IN_CONTAINER_DIR}/configure_environment.sh" # shellcheck source=scripts/in_container/run_init_script.sh . "${IN_CONTAINER_DIR}/run_init_script.sh" cd "${AIRFLOW_SOURCES}" if [[ ${START_AIRFLOW:="false"} == "true" ]]; then export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} # shellcheck source=scripts/in_container/bin/run_tmux exec run_tmux fi set +u # If we do not want to run tests, we simply drop into bash if [[ "${RUN_TESTS}" != "true" ]]; then exec /bin/bash "${@}" fi set -u export RESULT_LOG_FILE="/files/test_result-${TEST_TYPE}-${BACKEND}.xml" EXTRA_PYTEST_ARGS=( "--verbosity=0" "--strict-markers" "--durations=100" "--maxfail=50" "--color=yes" "--junitxml=${RESULT_LOG_FILE}" # timeouts in seconds for individual tests "--timeouts-order" "moi" "--setup-timeout=60" "--execution-timeout=60" "--teardown-timeout=60" # Only display summary for non-expected case # f - failed # E - error # X - xpassed (passed even if expected to fail) # The following cases are not displayed: # s - skipped # x - xfailed (expected to fail and failed) # p - passed # P - passed with output "-rfEX" ) if [[ "${TEST_TYPE}" == "Helm" ]]; then # Enable parallelism EXTRA_PYTEST_ARGS+=( "-n" "auto" ) else EXTRA_PYTEST_ARGS+=( "--with-db-init" ) fi if [[ ${ENABLE_TEST_COVERAGE:="false"} == "true" ]]; then EXTRA_PYTEST_ARGS+=( "--cov=airflow/" "--cov-config=.coveragerc" "--cov-report=xml:/files/coverage-${TEST_TYPE}-${BACKEND}.xml" ) fi declare -a SELECTED_TESTS CLI_TESTS API_TESTS PROVIDERS_TESTS CORE_TESTS WWW_TESTS \ ALL_TESTS ALL_PRESELECTED_TESTS ALL_OTHER_TESTS # Finds all directories that are not on the list of tests # - so that we do not skip any in the future if new directories are added function find_all_other_tests() { local all_tests_dirs all_tests_dirs=$(find "tests" -type d) all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/tests$/d" ) all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/tests\/dags/d" ) local path for path in "${ALL_PRESELECTED_TESTS[@]}" do escaped_path="${path//\//\\\/}" all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/${escaped_path}/d" ) done for path in ${all_tests_dirs} do ALL_OTHER_TESTS+=("${path}") done } if [[ ${#@} -gt 0 && -n "$1" ]]; then SELECTED_TESTS=("${@}") else CLI_TESTS=("tests/cli") API_TESTS=("tests/api" "tests/api_connexion") PROVIDERS_TESTS=("tests/providers") ALWAYS_TESTS=("tests/always") CORE_TESTS=( "tests/core" "tests/executors" "tests/jobs" "tests/models" "tests/serialization" "tests/ti_deps" "tests/utils" ) WWW_TESTS=("tests/www") HELM_CHART_TESTS=("chart/tests") ALL_TESTS=("tests") ALL_PRESELECTED_TESTS=( "${CLI_TESTS[@]}" "${API_TESTS[@]}" "${PROVIDERS_TESTS[@]}" "${CORE_TESTS[@]}" "${ALWAYS_TESTS[@]}" "${WWW_TESTS[@]}" ) if [[ ${TEST_TYPE:=""} == "CLI" ]]; then SELECTED_TESTS=("${CLI_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "API" ]]; then SELECTED_TESTS=("${API_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "Providers" ]]; then SELECTED_TESTS=("${PROVIDERS_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "Core" ]]; then SELECTED_TESTS=("${CORE_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "Always" ]]; then SELECTED_TESTS=("${ALWAYS_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "WWW" ]]; then SELECTED_TESTS=("${WWW_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "Helm" ]]; then SELECTED_TESTS=("${HELM_CHART_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "Other" ]]; then find_all_other_tests SELECTED_TESTS=("${ALL_OTHER_TESTS[@]}") elif [[ ${TEST_TYPE:=""} == "All" || ${TEST_TYPE} == "Quarantined" || \ ${TEST_TYPE} == "Always" || \ ${TEST_TYPE} == "Postgres" || ${TEST_TYPE} == "MySQL" || \ ${TEST_TYPE} == "Long" || \ ${TEST_TYPE} == "Integration" ]]; then SELECTED_TESTS=("${ALL_TESTS[@]}") else echo echo "${COLOR_RED}ERROR: Wrong test type ${TEST_TYPE} ${COLOR_RESET}" echo exit 1 fi fi readonly SELECTED_TESTS CLI_TESTS API_TESTS PROVIDERS_TESTS CORE_TESTS WWW_TESTS \ ALL_TESTS ALL_PRESELECTED_TESTS if [[ -n ${LIST_OF_INTEGRATION_TESTS_TO_RUN=} ]]; then # Integration tests for INT in ${LIST_OF_INTEGRATION_TESTS_TO_RUN} do EXTRA_PYTEST_ARGS+=("--integration" "${INT}") done elif [[ ${TEST_TYPE:=""} == "Long" ]]; then EXTRA_PYTEST_ARGS+=( "-m" "long_running" "--include-long-running" ) elif [[ ${TEST_TYPE:=""} == "Postgres" ]]; then EXTRA_PYTEST_ARGS+=( "--backend" "postgres" ) elif [[ ${TEST_TYPE:=""} == "MySQL" ]]; then EXTRA_PYTEST_ARGS+=( "--backend" "mysql" ) elif [[ ${TEST_TYPE:=""} == "Quarantined" ]]; then EXTRA_PYTEST_ARGS+=( "-m" "quarantined" "--include-quarantined" ) fi echo echo "Running tests ${SELECTED_TESTS[*]}" echo ARGS=("${EXTRA_PYTEST_ARGS[@]}" "${SELECTED_TESTS[@]}") if [[ ${RUN_SYSTEM_TESTS:="false"} == "true" ]]; then "${IN_CONTAINER_DIR}/run_system_tests.sh" "${ARGS[@]}" else "${IN_CONTAINER_DIR}/run_ci_tests.sh" "${ARGS[@]}" fi
package com.groupon.nakala; import com.groupon.nakala.core.WordSentiment; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * @author <EMAIL> */ public class WordSentimentTest { @Test public final void testPolarity() throws Exception { WordSentiment ws = WordSentiment.getInstance(); assertEquals(1, ws.polarity("greatest")); assertEquals(0, ws.polarity("went")); assertEquals(-1, ws.polarity("worst")); } }
<gh_stars>1-10 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Oct 4 18:26:36 2019 @author: <NAME> This code solves the scheduling problem using a genetic algorithm. Implementation taken from pyeasyga As input this code receives: 1. T = number of jobs [integer] 2. ni = number of operations of the job i [list of T elements] 3. m = number of machines [integer] 3. Mj´ = feasible machines for the operaion j of the job i [matrix of sum(ni) row, each row with n' feasible machines] 4. pj'k = processing time of the operation j' in the machine k [matrix of sum(ni) row, each row with n' feasible machines] """ from time import time # Inputs #T = 2 # number of jobs #ni =[2,2] # number of operations of the job i #ma = 2 # number of machines #Mij = [[1,2],[1],[2],[1,2]] #pjk = [[3,4],[5,1000],[1000,6],[2,2]] #T = 3 # number of jobs #ni =[2,2,2] # number of operations of the job i #ma = 2 # number of machines #Mij = [[1,2],[1,2],[1],[1,2],[2],[1,2]] #pjk = [[3,4],[5,4],[2,1000],[2,4],[1000,3],[1,2]] #T = 4 # number of jobs #ni =[1,3,2,2] # number of operations of the job i #ma = 3 # number of machines #Mij = [[1,2,3],[1,3],[3],[1,2],[1,3],[1,2],[1,2,3],[1,3]] #pjk = [[3,4,3],[5,1000,5],[1000,1000,6],[2,4,3],[1,1000,3],[1,2,1000],[2,2,2],[1,1000,1000]] #T = 3 # number of jobs #ni =[2,3,4] # number of operations of the job i #ma = 5 # number of machines #Mij = [[1,2,3,4,5],[1,3,4],[3,2],[1,2,5],[1,3,4],[1,2],[1,2,3],[1,3,5],[1,5]] #pjk = [[3,4,3,4,4],[5,1000,5,4,1000],[1000,4,6,1000,1000],[2,4,1000,1000,4], # [1,1000,3,4,1000],[1,2,1000,1000,1000],[2,2,2,1000,1000],[1,1000,1,1000,2], # [4,1000,1000,1000,3]] T = 4 # number of jobs ni =[2,3,4,2] # number of operations of the job i ma = 6 # number of machines Mij = [[1,2,3,4,5],[1,3,4,6],[1,3,2],[1,2,5],[1,2,3,4],[1,2,5],[1,2,3,6],[1,3,5],[1,5,6], [1,6],[2,3,4]] pjk = [[3,4,3,4,4,1000],[5,1000,5,4,1000,4],[3,4,6,1000,1000,1000],[2,4,1000,1000,4,1000], [1,3,3,2,1000,1000],[1,3,1000,1000,2,1000],[2,2,2,1000,1000,2],[1,1000,1,1000,2,1000], [4,1000,1000,1000,3,3],[3,1000,1000,1000,1000,4],[1000,5,3,4,1000,1000]] """ The individual is a list with T*ni*2 digits. For each operation in each job it has the variable S and the variable X The S for start time to process and the X for the machine where this operation will be done. E.g: individual = [S11,X11,S12,X12..........Sini,Xini] But first of all a dataset to be used during the algorithm must be made """ from pyeasyga import pyeasyga # import the library to be used import random data=[] data.append(T) data.append(ni) data.append(ma) data.append(Mij) data.append(pjk) def is_data_ok(data): sum_ni=0 for i in range(0,len(data[1])): sum_ni+=data[1][i] if len(data[1])!=data[0]: print("Data invalid. Please check the length of ni list") exit elif len(data[3])!=sum_ni: print("Data invalid. Please check the length of Mij list") exit elif len(data[4])!=sum_ni: print("Data invalid. Please check the length of pjk list") exit is_data_ok(data) """ To create a random individual a function called create_individual is created. In this case, random values to S from 0 to the max of pjk*T are generated and for X values between the feasible machines are generated """ def max_processing_time(data): pjk=data[4] max_time=0 for i in range(0,len(pjk)): for j in range(0,len(pjk[i])): if pjk[i][j]>max_time and pjk[i][j]!=1000: max_time=pjk[i][j] return max_time def create_individual(data): individual=[] start_times=[0]*data[2] jobs=data[0] list_to=[2,1,2,0,1,2,0,1,1,0] random_number=random.randint(0,len(list_to)-1) reference=list_to[random_number] if reference == 1: a=0 for i in range(0,jobs): for j in range(0,data[1][i]): position_X=random.randint(0,len(data[3][a])-1) X=data[3][a][position_X] S=start_times[X-1] individual.append(S) individual.append(X) start_times[X-1]=start_times[X-1]+data[4][a][X-1] a+=1 elif reference == 2: a=len(data[3])-1 for i in range(0,jobs): for j in range(0,data[1][i]): position_X=random.randint(0,len(data[3][a])-1) X=data[3][a][position_X] S=start_times[X-1] individual.append(S) individual.append(X) start_times[X-1]=start_times[X-1]+data[4][a][X-1] a-=1 else: for i in range(0,jobs): for j in range(0,data[1][i]): X=random.randint(1,data[2]) max_time=max_processing_time(data) S=random.randint(0,max_time) individual.append(S) individual.append(X) return individual def mutate(individual): mutate_index1=random.randrange(len(individual)) mutate_index2=random.randrange(len(individual)) #max_time=max_processing_time(data) if ((mutate_index1%2)==0 and (mutate_index2%2)==0) or ((mutate_index1%2)!=0 and \ (mutate_index2%2!=0)): individual[mutate_index1], individual[mutate_index2] = individual[mutate_index2], individual[mutate_index1] elif (mutate_index1%2)==0 and (mutate_index2%2)!=0: #if individual[mutate_index1]>(max_time/2): # individual[mutate_index1]=individual[mutate_index1]+random.randint(-(max_time/2),(max_time/2)) new_index=random.randrange(0,len(individual),2) individual[mutate_index1], individual[new_index] = individual[new_index], individual[mutate_index1] individual[mutate_index2]=random.randint(1,data[2]) else: #if individual[mutate_index2]>(max_time/2): # individual[mutate_index2]=individual[mutate_index2]+random.randint(-(max_time/2),(max_time/2)) new_index=random.randrange(0,len(individual),2) individual[mutate_index2], individual[new_index] = individual[new_index], individual[mutate_index2] individual[mutate_index1]=random.randint(1,data[2]) """ The fitness function is divided in two parts: 1. the Cmax is calculated from the individual, 2. the restrictions of the problema are validated to count how many fouls has the individual. At the end the fitness value = cmax + fouls*constant """ def is_feasible_machine(operation,machine,data): Mij=data[3] count=0 for i in range(0,len(Mij[operation])): if machine==Mij[operation][i]: count+=1 if count == 0: return False else: return True def operations_in_machine(machine,individual): result=[] i=0 while i<len(individual): if individual[i+1]==machine: result.append(int(i/2)) i+=2 return result def fitness(individual,data): fitness=0 pjk=data[4] i=0 for op in range(0,len(pjk)): if (individual[i]+pjk[op][individual[i+1]-1])>fitness: fitness=individual[i]+pjk[op][individual[i+1]-1] i+=2 # ------restrictions--------------- fouls=0 j=0 k=0 # for each job, C of current operation must be less than the next for job in range(0,len(ni)): for op2 in range(0,ni[job]-1): if (individual[j]+pjk[k][individual[j+1]-1])>individual[j+2] or\ individual[j]>=individual[j+2]: fouls+=4 j+=2 k+=1 j+=2 k+=1 # an operation must be made in a feasible machine l=0 while l<len(individual): if not is_feasible_machine(int(l/2),individual[l+1],data): fouls+=2 l+=2 # for each machine an operation must start at zero # for each mahcine, the operations cannot be mixed. Only one operation at a time count_zeros=0 for machine2 in range(1,data[2]+1): #count_zeros=0 operations2=operations_in_machine(machine2,individual) for op4 in range(0,len(operations2)): if individual[operations2[op4]*2]==0: count_zeros+=1 start_reference=individual[operations2[op4]*2] end_reference=individual[operations2[op4]*2]+pjk[operations2[op4]][machine2-1] for op5 in range(0,len(operations2)): if op5 != op4: s=individual[operations2[op5]*2] c=individual[operations2[op5]*2]+pjk[operations2[op5]][machine2-1] if s<=start_reference and c>=end_reference: fouls+=2 elif s>=start_reference and s<=end_reference and c<=end_reference: fouls+=2 elif s<=start_reference and c>start_reference and c<=end_reference: fouls+=2 elif s>=start_reference and s<end_reference and c>=end_reference: fouls+=2 #if count_zeros != 1: #fouls+=1 if count_zeros == 0: fouls+=1 fitness=fitness+(fouls*1000) return fitness """ At the end the create_individual and the fitness functions are added to the ga. Then run and print the best individual """ steps=[] count_increment=0 def genetic_algorithm_scheduling(data,counter,pop_size=100,num_generations=500): start_time=time() ga=pyeasyga.GeneticAlgorithm(data,maximise_fitness=False,population_size=pop_size,generations=num_generations,mutation_probability=0.3) # initialization of the algorithm ga.create_individual=create_individual ga.mutate_function=mutate ga.fitness_function=fitness ga.run() best_individual=ga.best_individual() steps.append(best_individual) best_fitness=best_individual[0] if best_fitness>1000 and counter<10: counter+=1 new_generations=num_generations+100 print("Incrementing generations to ",new_generations,"......") genetic_algorithm_scheduling(data,counter,pop_size,new_generations) elif best_fitness>1000 and counter==10: print("Feasible individual wasn't found!") print("Best infeasible individual: ",ga.best_individual()) end_time=time() print("The execution time was: ",(end_time-start_time)," seconds") elif best_fitness<1000: end_time=time() print("Best feasible individual found! ",ga.best_individual()) print("The execution time was: ",(end_time-start_time)," seconds") print("These were the different best individuals:") for i in range(0,len(steps)): print(steps[i]) return steps genetic_algorithm_scheduling(data,count_increment,pop_size=200)
#!/usr/bin/env python3 import pandas as pd import numpy as np from helper import train_test_split def predict(row, weights): weighted_sum = weights[0] + np.dot(weights[1:], row[:-1]) return 1 if weighted_sum >= 0 else 0 def train_weights(train, learn_rate, epochs): weights = np.zeros_like(train[0]) for epoch in range(epochs): for row in train: error = row[-1] - predict(row, weights) weights[0] += learn_rate * error for i in range(len(row)-1): weights[i + 1] += learn_rate * error * row[i] return weights def accuracy(data, weights): predicted = [predict(row, weights) for row in data] actual = [row[-1] for row in data] return sum(1 for p,a in zip(predicted, actual) if p == a) / len(data) * 100.0 train, test = train_test_split() weights = train_weights(train['artificial'], 0.1, 5) acc = accuracy(test['artificial'], weights) print(acc) weights = train_weights(train['income'], 0.1, 5) acc = accuracy(test['income'], weights) print(acc)
#!/usr/bin/env bash set -ue export IMG="r-py.sif" echo "Using Singularity image: ${IMG}" # ################# Verify R version (label) # version () { singularity inspect "${IMG}" | \ grep "R_Version" | \ awk '{print $2}' } singularity exec "$IMG" R -q -e "stopifnot(getRversion() == '$(version)')" WD="$PWD" set -x # ################ Test container setup # cd "$WD" mkdir test mkdir test/container-r cd test/container-r cp -L ../../r-py.sif . singularity run r-py.sif setup # ################ Test project creation # cd .. mkdir project-r cd project-r ../container-r/createproject --yes # ################ Test cexec working dir # wd="$(./cexec pwd)" test "$wd" = "/proj" mkdir subdir cd subdir wd="$(../cexec pwd)" test "$wd" = "/proj/subdir" # ################ RStudio: setup # cd "$WD" mkdir test/container-rstudio cd test/container-rstudio cp -L ../../rstudio.sif . singularity run rstudio.sif setup # ################ RStudio: project creation # cd .. mkdir project-rstudio cd project-rstudio ../container-rstudio/createproject --yes # ################ RStudio: cexec working dir # wd="$(./rstudio exec pwd)" test "$wd" = "/proj" mkdir subdir cd subdir wd="$(../rstudio exec pwd)" test "$wd" = "/proj/subdir" cd .. # ################ RStudio: start, list, stop, restart # # Set RStudio passwort "test" cat >.rstudio-passwd <<'EOF' $6$LqGleFffXVZs1uew$SQRgaAXw18WLbfKagMGGkbz5ZZ/zEsIDWDT4Q4L5P5x9ZcfYpAH18eZUppOvOY.AGfP5B0Sn20pPoFbBPKBv71 EOF ./rstudio start test # wait until RStudio ist started sleep 5s mkdir listtest cd listtest n="$(../rstudio list | wc -l)" test "$n" = 2 # header and 1 running instance cd .. ./rstudio stop test sleep 5s ./rstudio start test sleep 5s ./rstudio stop test echo "All tests passed!"
#!/bin/sh MODEL_SUFFIX="$1" DATA_SUFFIX="$2" DATASET_IM_DIR="/tmp/val2014" DATASET_ANN="val2014/person_keypoints_minival2014$DATA_SUFFIX.json" python code/eval_kpt_cpu.py \ --net "model/int8$MODEL_SUFFIX/model-nnapi.pb" \ --init_net "model/int8$MODEL_SUFFIX/model_init.pb" \ --dataset "coco_2014_minival$DATA_SUFFIX" \ --dataset_dir "$DATASET_IM_DIR" \ --dataset_ann "$DATASET_ANN" \ --output_dir "output$MODEL_SUFFIX" \ --min_size 320 \ --max_size 640 \
/** * */ package org.ednovo.gooru.core.api.model; /** * @author parthi * */ public enum PartyType { USER("user"), ORGANIZATION("organization"), USERGROUP("userGroup"), NETWORK("network"); private String type; /** * */ PartyType(String type) { setType(type); } public String getType() { return type; } private void setType(String type) { this.type = type; } }
<gh_stars>1-10 /* * Copyright 2009-2012 The MyBatis Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibatis.sqlmap.engine.builder; import com.ibatis.sqlmap.client.SqlMapException; import com.ibatis.sqlmap.client.extensions.TypeHandlerCallback; import com.ibatis.sqlmap.engine.datasource.DataSourceFactory; import com.ibatis.sqlmap.engine.transaction.TransactionConfig; import com.ibatis.sqlmap.engine.transaction.TransactionManager; import com.ibatis.common.util.NodeEvent; import org.apache.ibatis.io.Resources; import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.reflection.MetaClass; import org.apache.ibatis.reflection.factory.ObjectFactory; import org.apache.ibatis.type.JdbcType; import org.apache.ibatis.type.TypeHandler; import org.apache.ibatis.parsing.XNode; import com.ibatis.common.util.NodeEventParser; import java.io.Reader; import java.util.HashMap; import java.util.Map; import java.util.Properties; public class XmlSqlMapConfigParser { private Reader reader; private NodeEventParser parser = new NodeEventParser(); private Ibatis2Configuration config = new Ibatis2Configuration(); private Properties dataSourceProps = new Properties(); private Properties transactionManagerProps = new Properties(); private boolean useStatementNamespaces; private Map<String, XNode> sqlFragments = new HashMap<String, XNode>(); public XmlSqlMapConfigParser(Reader reader) { this.reader = reader; this.parser.addNodeletHandler(this); this.useStatementNamespaces = false; this.parser.setEntityResolver(new SqlMapEntityResolver()); } public XmlSqlMapConfigParser(Reader reader, Properties props) { this(reader); this.config.setVariables(props); this.parser.setVariables(props); this.parser.setEntityResolver(new SqlMapEntityResolver()); } public void parse() { parser.parse(reader); } public boolean hasSqlFragment(String id) { return sqlFragments.containsKey(id); } public XNode getSqlFragment(String id) { return sqlFragments.get(id); } public void addSqlFragment(String id, XNode context) { sqlFragments.put(id, context); } public Ibatis2Configuration getConfiguration() { return config; } public boolean isUseStatementNamespaces() { return useStatementNamespaces; } @NodeEvent("/sqlMapConfig/properties") public void sqlMapConfigproperties(XNode context) throws Exception { String resource = context.getStringAttribute("resource"); String url = context.getStringAttribute("url"); Properties fileVariables; if (resource != null) { fileVariables = Resources.getResourceAsProperties(resource); } else if (url != null) { fileVariables = Resources.getUrlAsProperties(url); } else { throw new RuntimeException("The properties element requires either a resource or a url attribute."); } // Override file variables with those passed in programmatically Properties passedVariables = config.getVariables(); if (passedVariables != null) { fileVariables.putAll(passedVariables); } config.setVariables(fileVariables); parser.setVariables(fileVariables); } @NodeEvent("/sqlMapConfig/settings") public void sqlMapConfigsettings(XNode context) throws Exception { boolean classInfoCacheEnabled = context.getBooleanAttribute("classInfoCacheEnabled", true); MetaClass.setClassCacheEnabled(classInfoCacheEnabled); boolean lazyLoadingEnabled = context.getBooleanAttribute("lazyLoadingEnabled", true); config.setLazyLoadingEnabled(lazyLoadingEnabled); boolean statementCachingEnabled = context.getBooleanAttribute("statementCachingEnabled", true); if (statementCachingEnabled) { config.setDefaultExecutorType(ExecutorType.REUSE); } boolean batchUpdatesEnabled = context.getBooleanAttribute("batchUpdatesEnabled", true); if (batchUpdatesEnabled) { config.setDefaultExecutorType(ExecutorType.BATCH); } boolean cacheModelsEnabled = context.getBooleanAttribute("cacheModelsEnabled", true); config.setCacheEnabled(cacheModelsEnabled); boolean useColumnLabel = context.getBooleanAttribute("useColumnLabel", false); config.setUseColumnLabel(useColumnLabel); boolean forceMultipleResultSetSupport = context.getBooleanAttribute("forceMultipleResultSetSupport", true); config.setMultipleResultSetsEnabled(forceMultipleResultSetSupport); useStatementNamespaces = context.getBooleanAttribute("useStatementNamespaces", false); Integer defaultTimeout = context.getIntAttribute("defaultStatementTimeout"); config.setDefaultStatementTimeout(defaultTimeout); } @NodeEvent("/sqlMapConfig/typeAlias") public void sqlMapConfigtypeAlias(XNode context) throws Exception { String alias = context.getStringAttribute("alias"); String type = context.getStringAttribute("type"); config.getTypeAliasRegistry().registerAlias(alias, type); } @NodeEvent("/sqlMapConfig/typeHandler") public void sqlMapConfigtypeHandler(XNode context) throws Exception { String jdbcType = context.getStringAttribute("jdbcType"); String javaType = context.getStringAttribute("javaType"); String callback = context.getStringAttribute("callback"); if (javaType != null && callback != null) { JdbcType jdbcTypeEnum = JdbcType.valueOf(jdbcType); Class javaTypeClass = config.getTypeAliasRegistry().resolveAlias(javaType); Class callbackClass = config.getTypeAliasRegistry().resolveAlias(callback); Object o = callbackClass.newInstance(); if (o instanceof TypeHandlerCallback) { TypeHandler typeHandler = new TypeHandlerCallbackAdapter((TypeHandlerCallback) o); config.getTypeHandlerRegistry().register(javaTypeClass, jdbcTypeEnum, typeHandler); } } } @NodeEvent("/sqlMapConfig/transactionManager/end()") public void sqlMapConfigtransactionManagerend(XNode context) throws Exception { String type = context.getStringAttribute("type"); Class txClass = config.getTypeAliasRegistry().resolveAlias(type); boolean commitRequired = context.getBooleanAttribute("commitRequired", false); TransactionConfig txConfig = (TransactionConfig) txClass.newInstance(); txConfig.setDataSource(config.getDataSource()); txConfig.setProperties(transactionManagerProps); txConfig.setForceCommit(commitRequired); config.setTransactionManager(new TransactionManager(config, txConfig)); } @NodeEvent("/sqlMapConfig/transactionManager/property") public void sqlMapConfigtransactionManagerproperty(XNode context) throws Exception { String name = context.getStringAttribute("name"); String value = context.getStringAttribute("value"); transactionManagerProps.setProperty(name, value); } @NodeEvent("/sqlMapConfig/transactionManager/dataSource/property") public void sqlMapConfigtransactionManagerdataSourceproperty(XNode context) throws Exception { String name = context.getStringAttribute("name"); String value = context.getStringAttribute("value"); dataSourceProps.setProperty(name, value); } @NodeEvent("/sqlMapConfig/transactionManager/dataSource/end()") public void sqlMapConfigtransactionManagerdataSourceend(XNode context) throws Exception { String type = context.getStringAttribute("type"); Class dataSourceClass = config.getTypeAliasRegistry().resolveAlias(type); DataSourceFactory dsFactory = (DataSourceFactory) dataSourceClass.newInstance(); dsFactory.initialize(dataSourceProps); config.setDataSource(dsFactory.getDataSource()); } @NodeEvent("/sqlMapConfig/resultObjectFactory") public void sqlMapConfigresultObjectFactory(XNode context) throws Exception { String type = context.getStringAttribute("type"); Class factoryClass = Class.forName(type); ObjectFactory factory = (ObjectFactory) factoryClass.newInstance(); Properties props = context.getChildrenAsProperties(); factory.setProperties(props); config.setObjectFactory(factory); } @NodeEvent("/sqlMapConfig/sqlMap") public void sqlMapConfigsqlMap(XNode context) throws Exception { String resource = context.getStringAttribute("resource"); String url = context.getStringAttribute("url"); Reader reader = null; if (resource != null) { reader = Resources.getResourceAsReader(resource); } else if (url != null) { reader = Resources.getUrlAsReader(url); } else { throw new SqlMapException("The sqlMap element requires either a resource or a url attribute."); } new XmlSqlMapParser(this, reader).parse(); } }
#!/bin/bash # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. mkdir -p data/mnist pushd data/mnist wget http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz wget http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz wget http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz wget http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz gzip -d *.gz popd
import makeActionCreator from "./creator" import { PHOTOS_REQUEST, PHOTOS_SUCCESS, PHOTOS_FAILURE, PHOTOIDS_SELECTED_BY_KEY_SET, PHOTOIDS_SELECTED_BY_KEY_REMOVE } from "./types" // Action Creators export const photosRequest = makeActionCreator( PHOTOS_REQUEST ) export const photosSuccess = makeActionCreator( PHOTOS_SUCCESS ) export const photosFailure = makeActionCreator( PHOTOS_FAILURE ) export const photoIDsSelectedByKeySet = makeActionCreator( PHOTOIDS_SELECTED_BY_KEY_SET, "data" ) export const photoIDsSelectedByKeyRemove = makeActionCreator( PHOTOIDS_SELECTED_BY_KEY_REMOVE, "key" )
/*************************************************************************** * Copyright (C) 2021 by <NAME>, <NAME> * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * As a special exception, if other files instantiate templates or use * * macros or inline functions from this file, or you compile this file * * and link it with other works to produce a work based on this file, * * this file does not by itself cause the resulting work to be covered * * by the GNU General Public License. However the source code for this * * file must still be made available in accordance with the GNU General * * Public License. This exception does not invalidate any other reasons * * why a work based on this file might be covered by the GNU General * * Public License. * * * * You should have received a copy of the GNU General Public License * * along with this program; if not, see <http://www.gnu.org/licenses/> * ***************************************************************************/ #pragma once #include <config/mxgui_settings.h> #include "display.h" #include "point.h" #include "color.h" #include "iterator_direction.h" #include "miosix.h" #include <cstdio> #include <cstring> #include <algorithm> namespace mxgui { #ifndef MXGUI_COLOR_DEPTH_16_BIT #error The ST7735 driver requires a color depth 16 per pixel #endif //Used to transiently pull low either the csx or dcx pin class Transaction { public: Transaction(miosix::GpioPin pin) : pin(pin) { pin.low(); } ~Transaction() { pin.high(); } private: miosix::GpioPin pin; }; /** * Generic driver for a ST7735 display. The SPI interface and mapping of the * csx, dcx and resx pins is retargetable. */ class DisplayGenericST7735 : public Display { public: /** * Turn the display On after it has been turned Off. * Display initial state On. */ void doTurnOn() override; /** * Turn the display Off. It can be later turned back On. */ void doTurnOff() override; /** * Set display brightness. Depending on the underlying driver, * may do nothing. * \param brt from 0 to 100 */ void doSetBrightness(int brt) override; /** * \return a pair with the display height and width */ std::pair<short int, short int> doGetSize() const override; /** * Write text to the display. If text is too long it will be truncated * \param p point where the upper left corner of the text will be printed * \param text, text to print. */ void write(Point p, const char *text) override; /** * Write part of text to the display * \param p point of the upper left corner where the text will be drawn. * Negative coordinates are allowed, as long as the clipped view has * positive or zero coordinates * \param a Upper left corner of clipping rectangle * \param b Lower right corner of clipping rectangle * \param text text to write */ void clippedWrite(Point p, Point a, Point b, const char *text) override; /** * Clear the Display. The screen will be filled with the desired color * \param color fill color */ void clear(Color color) override; /** * Clear an area of the screen * \param p1 upper left corner of area to clear * \param p2 lower right corner of area to clear * \param color fill color */ void clear(Point p1, Point p2, Color color) override; /** * This member function is used on some target displays to reset the * drawing window to its default value. You have to call beginPixel() once * before calling setPixel(). You can then make any number of calls to * setPixel() without calling beginPixel() again, as long as you don't * call any other member function in this class. If you call another * member function, for example line(), you have to call beginPixel() again * before calling setPixel(). */ void beginPixel() override; /** * Draw a pixel with desired color. * \param p point where to draw pixel * \param color pixel color */ void setPixel(Point p, Color color) override; /** * Draw a line between point a and point b, with color c * \param a first point * \param b second point * \param c line color */ void line(Point a, Point b, Color color) override; /** * Draw an horizontal line on screen. * Instead of line(), this member function takes an array of colors to be * able to individually set pixel colors of a line. * \param p starting point of the line * \param colors an array of pixel colors whoase size must be b.x()-a.x()+1 * \param length length of colors array. * p.x()+length must be <= display.width() */ void scanLine(Point p, const Color *colors, unsigned short length) override; /** * \return a buffer of length equal to this->getWidth() that can be used to * render a scanline. */ Color *getScanLineBuffer() override; /** * Draw the content of the last getScanLineBuffer() on an horizontal line * on the screen. * \param p starting point of the line * \param length length of colors array. * p.x()+length must be <= display.width() */ void scanLineBuffer(Point p, unsigned short length) override; /** * Draw an image on the screen * \param p point of the upper left corner where the image will be drawn * \param i image to draw */ void drawImage(Point p, const ImageBase& img) override; /** * Draw part of an image on the screen * \param p point of the upper left corner where the image will be drawn. * Negative coordinates are allowed, as long as the clipped view has * positive or zero coordinates * \param a Upper left corner of clipping rectangle * \param b Lower right corner of clipping rectangle * \param img Image to draw */ void clippedDrawImage(Point p, Point a, Point b, const ImageBase& img) override; /** * Draw a rectangle (not filled) with the desired color * \param a upper left corner of the rectangle * \param b lower right corner of the rectangle * \param c color of the line */ void drawRectangle(Point a, Point b, Color c) override; /** * Make all changes done to the display since the last call to update() * visible. */ void update() override; /** * Pixel iterator. A pixel iterator is an output iterator that allows to * define a window on the display and write to its pixels. */ class pixel_iterator { public: /** * Default constructor, results in an invalid iterator. */ pixel_iterator(): pixelLeft(0) {} /** * Set a pixel and move the pointer to the next one * \param color color to set the current pixel * \return a reference to this */ pixel_iterator& operator= (Color color) { pixelLeft--; unsigned char lsb = color & 0xFF; unsigned char msb = (color >> 8) & 0xFF; Transaction t(display->csx); display->writeRam(msb); display->writeRam(lsb); return *this; } /** * Compare two pixel_iterators for equality. * They are equal if they point to the same location. */ bool operator== (const pixel_iterator& itr) { return this->pixelLeft==itr.pixelLeft; } /** * Compare two pixel_iterators for inequality. * They different if they point to different locations. */ bool operator!= (const pixel_iterator& itr) { return this->pixelLeft!=itr.pixelLeft; } /** * \return a reference to this. */ pixel_iterator& operator* () { return *this; } /** * \return a reference to this. Does not increment pixel pointer. */ pixel_iterator& operator++ () { return *this; } /** * \return a reference to this. Does not increment pixel pointer. */ pixel_iterator& operator++ (int) { return *this; } /** * Must be called if not all pixels of the required window are going * to be written. */ void invalidate() {} private: /** * Constructor * \param pixelLeft number of remaining pixels */ pixel_iterator(unsigned int pixelLeft): pixelLeft(pixelLeft) {} unsigned int pixelLeft; ///< How many pixels are left to draw DisplayGenericST7735 *display; friend class DisplayGenericST7735; //Needs access to ctor }; /** * Specify a window on screen and return an object that allows to write * its pixels. * Note: a call to begin() will invalidate any previous iterator. * \param p1 upper left corner of window * \param p2 lower right corner (included) * \param d increment direction * \return a pixel iterator */ pixel_iterator begin(Point p1, Point p2, IteratorDirection d); /** * \return an iterator which is one past the last pixel in the pixel * specified by begin. Behaviour is undefined if called before calling * begin() */ pixel_iterator end() const { // Default ctor: pixelLeft is zero return pixel_iterator(); } /** * Destructor */ ~DisplayGenericST7735() override; protected: /** * Constructor. * \param csx chip select pin * \param dcx data/command pin * \param resx reset pin */ DisplayGenericST7735(miosix::GpioPin csx, miosix::GpioPin dcx, miosix::GpioPin resx); void initialize(); miosix::GpioPin csx; ///< Chip select miosix::GpioPin dcx; ///< Data/Command miosix::GpioPin resx; ///< Reset private: #if defined MXGUI_ORIENTATION_VERTICAL static const short int width = 128; static const short int height = 160; #elif defined MXGUI_ORIENTATION_HORIZONTAL static const short int width = 160; static const short int height = 128; #else #error Orientation not defined #endif /** * Set cursor to desired location * \param point where to set cursor (0<=x<=127, 0<=y<=159) */ inline void setCursor(Point p) { window(p, p, false); } /** * Register 0x36: MADCTL * bit 7------0 * 4: |||||+-- MH horizontal referesh (0 L-to-R, 1 R-to-L) * 8: ||||+--- RGB BRG order (0 for RGB) * 16: |||+---- ML vertical refesh (0 T-to-B, 1 B-to-T) * 32: ||+----- MV row column exchange (1 for X-Y exchange) * 64: |+------ MX column address order (1 for mirror X axis) * 128: +------- MY row address order (1 for mirror Y axis) */ /** * Set a hardware window on the screen, optimized for writing text. * The GRAM increment will be set to up-to-down first, then left-to-right * which is the correct increment to draw fonts * \param p1 upper left corner of the window * \param p2 lower right corner of the window */ inline void textWindow(Point p1, Point p2) { #ifdef MXGUI_ORIENTATION_VERTICAL writeReg (0x36, 0xE0); // MADCTL: MX + MY + MV window(p1, p2, true); #else //MXGUI_ORIENTATION_HORIZONTAL writeReg (0x36, 0x80); // MADCTL: MY window(p1, p2, true); #endif } /** * Set a hardware window on the screen, optimized for drawing images. * The GRAM increment will be set to left-to-right first, then up-to-down * which is the correct increment to draw images * \param p1 upper left corner of the window * \param p2 lower right corner of the window */ inline void imageWindow(Point p1, Point p2) { #ifdef MXGUI_ORIENTATION_VERTICAL writeReg (0x36, 0xC0); // MADCTL: MX + MY window(p1, p2, false); #else //MXGUI_ORIENTATION_HORIZONTAL writeReg (0x36, 0xA0); // MADCTL: MY + MV window(p1, p2, false); #endif } /** * Common part of all window commands */ void window(Point p1, Point p2, bool swap); /** * Sends command 0x2C to signal the start of data sending */ void writeRamBegin() { Transaction c(dcx); writeRam(0x2C); //ST7735_RAMWR, to write the GRAM } /** * Used to send pixel data to the display's RAM, and also to send commands. * The SPI chip select must be low before calling this member function * \param data data to write */ virtual unsigned char writeRam(unsigned char data) = 0; /** * Write data to a display register * \param reg which register? * \param data data to write */ virtual void writeReg(unsigned char reg, unsigned char data) = 0; /** * Write data to a display register * \param reg which register? * \param data data to write, if null only reg will be written (zero arg cmd) * \param len length of data, number of argument bytes */ virtual void writeReg(unsigned char reg, const unsigned char *data=0, int len=1) = 0; /** * Send multiple commands to the display MCU (we use to send init sequence) * \param cmds static array containing the commands */ void sendCmds(const unsigned char *cmds); Color *buffer; ///< For scanLineBuffer }; } //namespace mxgui
rm -rf relax scf nscf bands database gw gw_conv bse bse_conv ip gw_bse relax.log scf.log nscf.log yambo_bse.log yambo_gw.log yambo.log p2y.log rt nscf-dg elphon work elphon.json bands phonons proj.in rt-dg
<reponame>ElreyB/candyhunt import { Component, OnInit } from '@angular/core'; import { Router } from '@angular/router'; import { Location } from '@angular/common'; import { Player } from '../player.model'; import { Storyline } from '../storyline.model'; import { STORYLINE } from '../mock-storyline'; import { PlayerService } from '../player.service'; import { StorylineService } from '../storyline.service'; import { COSTUMES, PRANKS, CANDY } from '../mock-characteristics'; @Component({ selector: 'app-welcome', templateUrl: './welcome.component.html', styleUrls: ['./welcome.component.css'], providers: [PlayerService, StorylineService] }) export class WelcomeComponent implements OnInit { player: Player; storyline: Storyline[]; storylineId: number; costumes: string[]; constructor( private playerService: PlayerService, private storylineService: StorylineService, private router: Router ) { } ngOnInit() { // this.storyline = this.storylineService.getStorylineById(); } getValueFromSelect(newName: string, selectedCostume: string, selectedPrank: string, selectedCandy: string) { this.player = new Player( newName, selectedCostume, selectedPrank, selectedCandy ) } makeChoice(direction) { if (direction === 1){ this.player.location += 1; } else { this.player.location += 6; } console.log(this.player.location); return this.player.location } goToNextHouse(clickedButton: Storyline) { this.router.navigate(['house', this.player.location]); } }
#!/bin/bash set -e echo "> Test" GO111MODULE=on go test -race -mod=vendor $@ | grep -v 'no test files'
package com.zhiyi.onepay.util; import android.os.Handler; import android.os.Message; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; public class RequestUtils { private static final int SHOW_RESPONSE=1; public static void getRequest(final String url,final Handler handler) { new Thread(new Runnable() { @Override public void run() { //用HttpClient发送请求,分为五步 //第一步:创建HttpClient对象 HttpClient httpCient = new DefaultHttpClient(); //第二步:创建代表请求的对象,参数是访问的服务器地址 HttpGet httpGet = new HttpGet(url); try { //第三步:执行请求,获取服务器发还的相应对象 HttpResponse httpResponse = httpCient.execute(httpGet); //第四步:检查相应的状态是否正常:检查状态码的值是200表示正常 if (httpResponse.getStatusLine().getStatusCode() == 200) { //第五步:从相应对象当中取出数据,放到entity当中 HttpEntity entity = httpResponse.getEntity(); String response = EntityUtils.toString(entity,"utf-8");//将entity当中的数据转换为字符串 //在子线程中将Message对象发出去 Message message = new Message(); message.what = SHOW_RESPONSE; message.obj = response.toString(); handler.sendMessage(message); } } catch (Exception e) { e.printStackTrace(); } } }).start();//这个start()方法不要忘记了 } }
<gh_stars>0 var should = require('chai').should(), LinkedList = require('../'), list; describe('LinkedList', function() { describe('#unshift', function () { before(function () { var values = [[4, 5], { test: 3 }, null, '2', 1]; list = new LinkedList(); values.forEach(function (v) { list.unshift(v); }); }); it('should be equal to head value', function () { list.get(list.head).should.be.equal(1); }); it('should be equal to next value after head', function () { list.get(list.head.next).should.be.equal('2'); }); it('should not exist', function () { should.not.exist(list.get(list.head.prev)); }); }); describe('#shift', function () { before(function () { var values = [[4, 5], { test: 3 }, null, '2', 1]; list = new LinkedList(); values.forEach(function (v) { list.unshift(v); }); }); it('should be equal to removed value', function () { list.shift().should.be.equal(1); }); it('should be equal to head value', function () { list.get(list.head).should.be.equal('2'); }); it('should be equal to removed value', function () { list.shift().should.be.equal('2'); }); it('should not exist', function () { list.shift(); list.shift(); list.shift(); list.shift(); should.not.exist(list.get(list.head)); should.not.exist(list.head.prev); should.not.exist(list.head.next); }); }); describe('#remove', function () { before(function () { var values = [[4, 5], { test: 3 }, null, '2', 1]; list = new LinkedList(); values.forEach(function (v) { list.unshift(v); }); }); it('should be equal to removed value', function () { list.remove(list.head).should.be.equal(1); }); it('should not exist', function () { list.remove(list.head); list.remove(list.head); list.remove(list.head); list.remove(list.head); list.remove(list.head); should.not.exist(list.get(list.head)); should.not.exist(list.head.prev); should.not.exist(list.head.next); }); }); describe('#prepend', function () { before(function () { var values = [[4, 5], { test: 3 }, null, '2', 1]; list = new LinkedList(); values.forEach(function (v) { list.unshift(v); }); }); }); describe('#forEach', function () { }); describe('#reverse', function () { }); describe('#insertBefore', function () { }); describe('#insertAfter', function () { }); describe('#join', function () { }); describe('#toString', function () { }); describe('#valueOf', function () { }); describe('#isEmpty', function () { }); describe('#Symbol.iterator', function () { }); describe('#isLinkedList', function () { }); });
const path = require('path'); const fs = require('fs'); const { getNewDefaultNodeProcessor } = require('../utils/utils'); jest.mock('fs'); afterEach(() => fs.vol.reset()); test('includeFile replaces <include> with <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md" />', '', ].join('\n'); const include = ['# Include'].join('\n'); const json = { 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div><h1 id="include"><span id="include" class="anchor"></span>Include</h1></div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="exist.md" optional> with <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="exist.md" optional/>', '', ].join('\n'); const exist = ['# Exist'].join('\n'); const json = { 'index.md': index, 'exist.md': exist, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div><h1 id="exist"><span id="exist" class="anchor"></span>Exist</h1></div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="doesNotExist.md" optional> with empty <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="doesNotExist.md" optional/>', '', ].join('\n'); const json = { 'index.md': index, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<include src="/doesNotExist.md" optional></include>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#exists"> with <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md#exists"/>', '', ].join('\n'); const include = [ '# Include', '<seg id="exists">existing segment</seg>', ].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div>existing segment</div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#exists" inline> with inline content', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md#exists" inline/>', '', ].join('\n'); const include = [ '# Include', '<seg id="exists">existing segment</seg>', ].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<span>existing segment</span>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#exists" trim> with trimmed content', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md#exists" trim/>', '', ].join('\n'); const include = [ '# Include', '<seg id="exists">\t\texisting segment\t\t</seg>', ].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div>existing segment</div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#doesNotExist"> with error <div>', async () => { const indexPath = path.resolve('index.md'); const includePath = path.resolve('include.md'); const index = [ '# Index', '<include src="include.md#doesNotExist"/>', '', ].join('\n'); const include = ['# Include'].join('\n'); const expectedErrorMessage = `No such segment '#doesNotExist' in file: ${includePath}` + `\nMissing reference in ${indexPath}`; const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', `<div style="color: red"><div style="color: red">${expectedErrorMessage}</div></div>`, ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#exists" optional> with <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md#exists" optional/>', '', ].join('\n'); const include = [ '# Include', '<seg id="exists">existing segment</seg>', ].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div>existing segment</div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile replaces <include src="include.md#doesNotExist" optional> with empty <div>', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="include.md#doesNotExist" optional/>', '', ].join('\n'); const include = ['# Include'].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div></div>', ].join('\n'); expect(result).toEqual(expected); }); test('includeFile detects cyclic references for static cyclic includes', async () => { const indexPath = path.resolve('index.md'); const includePath = path.resolve('include.md'); const index = [ '# Index', '<include src="include.md" />', '', ].join('\n'); const include = [ '# Include', '<include src="index.md" />', '', ].join('\n'); const json = { 'index.md': index, 'include.md': include, }; fs.vol.fromJSON(json, ''); const expectedErrorMessage = [ 'Cyclic reference detected.', 'Last 5 files processed:', `\t${indexPath}`, `\t${includePath}`, `\t${indexPath}`, `\t${includePath}`, `\t${indexPath}`, ].join('\n'); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expected = `<div style="color: red">${expectedErrorMessage}</div>`; expect(result).toContain(expected); }); test('process include should preserve included frontmatter data', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="exist.md" />', '', ].join('\n'); const exist = [ '<frontmatter>', ' title: This should be present', '</frontmatter>', '', '# Exist', ].join('\n'); const json = { 'index.md': index, 'exist.md': exist, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expectedHtml = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div>', '<h1 id="exist"><span id="exist" class="anchor"></span>Exist</h1></div>', ].join('\n'); const expectedFrontmatter = { title: 'This should be present', }; expect(result).toEqual(expectedHtml); expect(nodeProcessor.frontMatter).toEqual(expectedFrontmatter); }); test('process include with omitFrontmatter should discard included frontmatter data', async () => { const indexPath = path.resolve('index.md'); const index = [ '# Index', '<include src="exist.md" omitFrontmatter/>', '', ].join('\n'); const exist = [ '<frontmatter>', ' title: This should not be present', '</frontmatter>', '', '# Exist', ].join('\n'); const json = { 'index.md': index, 'exist.md': exist, }; fs.vol.fromJSON(json, ''); const nodeProcessor = getNewDefaultNodeProcessor(); const result = await nodeProcessor.process(indexPath, index); const expectedHtml = [ '<h1 id="index"><span id="index" class="anchor"></span>Index</h1>', '<div>', '<h1 id="exist"><span id="exist" class="anchor"></span>Exist</h1></div>', ].join('\n'); const expectedFrontMatter = {}; expect(result).toEqual(expectedHtml); expect(nodeProcessor.frontMatter).toEqual(expectedFrontMatter); });
class TreeNode: def __init__(self, value=0, left=None, right=None): self.value = value self.left = left self.right = right class Solution: def findDiameter(self, root: TreeNode) -> int: self.diameter = 0 def getTreeHeight(node: TreeNode) -> int: if not node: return 0 leftHeight = getTreeHeight(node.left) rightHeight = getTreeHeight(node.right) self.diameter = max(leftHeight + rightHeight, self.diameter) return max(leftHeight, rightHeight) + 1 getTreeHeight(root) return self.diameter
import pandas as pd from sklearn.ensemble import RandomForestClassifier # create data wine_data = pd.read_csv('wine.csv') # set features features = ['alcohol', 'type', 'sulphates', 'pH', 'quality'] x_train = wine_data[features] y_train = wine_data['type'] # create model model = RandomForestClassifier(n_estimators=100) model.fit(x_train, y_train) # save model # model.save('wine_random_forest.h5')
#!/bin/sh sudo yum install -y openshift-ansible-playbooks
/** * @file * Handle preview functionality on smart segment form. */ (function ($, Drupal) { Drupal.behaviors.previewFieldBehavior = { attach: function (context, settings) { // Segment elements. var segments = $('.smart-content-segment-set-edit-form .smart-segments-preview'); segments.each(function() { // Add click once for each segment. $(this).once('previewFieldBehavior').on('click', function() { // If checked, uncheck all the other checkboxes. if (this.checked) { segments.not(this).prop('checked', false); } }); }); } }; })(jQuery, Drupal);
package db; public class Compiler { private static Compiler compiler; private static String compilationMSG = "Compiling..."; private Compiler() {} public static Compiler compile() { if (compiler == null) { compiler = new Compiler(); } return compiler; } public void addCompilerMSG(String msg) { compilationMSG += msg + "\n"; } public void showMSG() { System.out.println(compilationMSG); } }
#SAFARI="/Applications/Safari.app/Contents/MacOS/Safari" #exec "$SAFARI" $1 open -W -a Safari $1
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2009 - 2020 ImageJ developers. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imglib2.ops.function.real; import net.imglib2.ops.function.Function; import net.imglib2.ops.pointset.PointSet; import net.imglib2.type.numeric.RealType; /** * Abstract base class used by statistical function classes * * @author <NAME> * @param <T> * @deprecated Use net.imagej.ops instead. */ @Deprecated public abstract class AbstractRealStatFunction<T extends RealType<T>> implements Function<PointSet, T> { // -- instance variables -- protected final Function<long[], T> otherFunc; private StatCalculator<T> calculator; // -- constructor -- public AbstractRealStatFunction(Function<long[],T> otherFunc) { this.otherFunc = otherFunc; this.calculator = null; } // -- public api -- @Override public void compute(PointSet input, T output) { if (calculator == null) { calculator = new StatCalculator<T>(otherFunc, input); } else calculator.reset(otherFunc, input); double value = value(calculator); output.setReal(value); } @Override public T createOutput() { return otherFunc.createOutput(); } // -- protected api -- abstract protected double value(StatCalculator<T> calc); }
<gh_stars>10-100 # frozen_string_literal: true require 'tempfile' # captures arbitrary io def capture(io) captured_io = Tempfile.new orig_stdout = io.dup io.reopen captured_io yield io.rewind captured_io.read ensure captured_io.unlink io.reopen orig_stdout end # captures $stdout # @example # out = capture_out do # puts '1' # captured # warn '2' # printed # puts '3' # captured # warn '4' # printed # end # out # => "1\n3\n" def capture_out capture($stdout) do yield end end # captures $stderr # @example # err = capture_err do # puts '1' # printed # warn '2' # captured # puts '3' # printed # warn '4' # captured # end # err #=> "2\n4\n" def capture_err capture($stderr) do yield end end # captures $stdout and $stderr # @example # out, err = capture_io do # puts '1' # captured # warn '2' # captured # puts '3' # captured # warn '4' # captured # end # out # => "1\n3\n" # err #=> "2\n4\n" def capture_io err = nil out = capture_out do err = capture_err do yield end end [out, err] end
#!/bin/bash # # Copyright (c) 2018-2019 Intel Corporation # # SPDX-License-Identifier: Apache-2.0 # set -e cidir=$(dirname "$0") source /etc/os-release || source /usr/lib/os-release source "${cidir}/lib.sh" TEST_CGROUPSV2="${TEST_CGROUPSV2:-false}" echo "Install chronic" sudo -E dnf -y install moreutils if [ "${TEST_CGROUPSV2}" == "true" ]; then echo "Install podman" version=$(get_test_version "externals.podman.version") sudo -E dnf -y install podman-"${version}" fi declare -A minimal_packages=( \ [spell-check]="hunspell hunspell-en-GB hunspell-en-US pandoc" \ [xml_validator]="libxml2" \ [yaml_validator]="yamllint" \ ) declare -A packages=( \ [general_dependencies]="xfsprogs dnf-plugins-core python pkgconfig util-linux libgpg-error-devel" \ [kata_containers_dependencies]="libtool automake autoconf bc pixman numactl-libs" \ [qemu_dependencies]="libcap-devel libattr-devel libcap-ng-devel zlib-devel pixman-devel librbd-devel libpmem-devel" \ [kernel_dependencies]="elfutils-libelf-devel flex" \ [crio_dependencies]="btrfs-progs-devel device-mapper-devel glib2-devel glibc-devel glibc-static gpgme-devel libassuan-devel libseccomp-devel libselinux-devel" \ [bison_binary]="bison" \ [os_tree]="ostree-devel" \ [metrics_dependencies]="jq" \ [cri-containerd_dependencies]="libseccomp-devel btrfs-progs-devel libseccomp-static" \ [crudini]="crudini" \ [procenv]="procenv" \ [haveged]="haveged" \ [gnu_parallel]="parallel" \ [libsystemd]="systemd-devel" \ [redis]="redis" \ [versionlock]="python3-dnf-plugin-versionlock" \ ) main() { local setup_type="$1" [ -z "$setup_type" ] && die "need setup type" local pkgs_to_install local pkgs for pkgs in "${minimal_packages[@]}"; do info "The following package will be installed: $pkgs" pkgs_to_install+=" $pkgs" done if [ "$setup_type" = "default" ]; then for pkgs in "${packages[@]}"; do info "The following package will be installed: $pkgs" pkgs_to_install+=" $pkgs" done fi chronic sudo -E dnf -y install $pkgs_to_install [ "$setup_type" = "minimal" ] && exit 0 echo "Install kata containers dependencies" chronic sudo -E dnf -y groupinstall "Development tools" if [ "$KATA_KSM_THROTTLER" == "yes" ]; then echo "Install ${KATA_KSM_THROTTLER_JOB}" chronic sudo -E dnf -y install ${KATA_KSM_THROTTLER_JOB} fi } main "$@"
<reponame>anedyalkov/JS-Applications function generatesMatrix(numberOfRows, numberOfCols) { let matrix = []; for (let row = 0; row < numberOfRows; row++) { matrix[row] = []; for (let col = 0; col < numberOfCols; col++) { matrix[row][col] = 0; } } let counter = 1; let direction = 'right'; let currentRow = 0; let currentCol = 0; for (let i = 0; i < numberOfRows * numberOfCols; i++) { matrix[currentRow][currentCol] = counter; if (direction === 'right') { if (currentCol + 1 >= numberOfCols || matrix[currentRow][currentCol + 1] !== 0) { direction = 'down'; currentRow++; } else { currentCol++; } } else if (direction === 'down') { if (currentRow + 1 >= numberOfRows || matrix[currentRow + 1][currentCol] !== 0) { direction = 'left'; currentCol--; } else { currentRow++; } } else if (direction === 'left') { if (currentCol - 1 < 0 || matrix[currentRow][currentCol - 1] !== 0) { direction = 'up'; currentRow--; } else { currentCol--; } } else if (direction === 'up') { if (currentRow - 1 < 0 || matrix[currentRow - 1][currentCol] !== 0) { direction = 'right'; currentCol++; } else { currentRow--; } } counter++; } for (let row of matrix) { console.log(row.join(' ')); } } generatesMatrix(3, 3);
<reponame>matthew-gerstman/code-surfer import { useDeck } from "mdx-deck"; import React from "react"; export function useNotes(notesElements) { const context = useDeck(); React.useEffect(() => { if (!context || !context.register) return; if (typeof context.index === "undefined") return; const notes = getNotesFromElements(notesElements); context.register(context.index, { notes }); }, []); } function getNotesFromElements(notesElements) { const notes = notesElements.map(element => { if (!element) { // this is a step with empty notes return null; } const { props } = element; if (props.inline) { // this is <Notes inline={true} /> return { inline: true, text: props.children }; } // this is <Notes>something</Notes> // we shouldn't return an object here, // to be compatible with the default Presenter return props && props.children; }); if (notes.length) { const lastNotes = notes[notes.length - 1]; // we add an extra EOL to the last step notes[notes.length - 1] = (lastNotes || "") + "\n"; } return notes; } export function getTextFromNotes(notes) { if (notes === null) { // this is a step with empty notes // we don't add extra lines here // to allow a line of text with multiple notes return ""; } if (typeof notes === "object") { // this comes from a step with inline=true // but we check again just in case return notes.text + (notes.inline ? "" : "\n"); } else { // this could be an empty note from any slide // or a note from a step without the inline prop return notes + "\n"; } }
#!/usr/bin/env bash # shellcheck disable=SC2091 ############################################################################### function cmd { printf "./avalanche-cli.sh admin memory-profile" ; } function check { local result="$1" ; local result_u ; result_u=$(printf '%s' "$result" | cut -d' ' -f3) ; local result_h ; result_h=$(printf '%s' "$result" | cut -d' ' -f5) ; local result_d ; result_d=$(printf '%s' "$result" | cut -d' ' -f7) ; local expect_u ; expect_u="'https://api.avax.network/ext/admin'" ; assertEquals "$expect_u" "$result_u" ; local expect_h ; expect_h="'content-type:application/json'" ; assertEquals "$expect_h" "$result_h" ; local expect_d ; expect_d="'{" ; expect_d+='"jsonrpc":"2.0",' ; expect_d+='"id":1,' ; expect_d+='"method":"admin.memoryProfile",' ; expect_d+='"params":{}' ; expect_d+="}'" ; assertEquals "$expect_d" "$result_d" ; local expect="curl --url $expect_u --header $expect_h --data $expect_d" ; assertEquals "$expect" "$result" ; } function test_admin__memory_profile_1 { check "$(AVAX_ID_RPC=1 $(cmd))" ; } ############################################################################### ###############################################################################
#!/bin/bash -e # Name: PyAnime4K setup script for ubuntu # Author: TianZerL - forked 2/26/22 with much thanks to the author if [ ! -z "$1" ]; then export INSTALLATION_PATH=$1 else export INSTALLATION_PATH="$HOME/pyanime4k_wheel/" fi TEMP="/tmp/pyanime4k" git clone https://github.com/TianZerL//pyanime4k.git $TEMP/pyanime4k apt-get update apt install -y --no-install-recommends libopencv-dev ocl-icd-opencl-dev cmake python3-pip git clone -b v2.5.0 https://github.com/TianZerL//Anime4KCPP.git $TEMP/anime4kcpp mkdir -v $TEMP/anime4kcpp/build cd $TEMP/anime4kcpp/build cmake -DBuild_CLI=OFF -DBuild_C_wrapper=ON -DBuild_C_wrapper_with_core=ON .. make -j$(nproc) mv -v $TEMP/anime4kcpp/build/bin/libac.so $TEMP/pyanime4k/pyanime4k/wrapper cd $TEMP/pyanime4k pip3 install -r requirements.txt pip3 install setuptools pip3 install wheel python3 $TEMP/pyanime4k/setup.py bdist_wheel mv -v $TEMP/pyanime4k/dist $INSTALLATION_PATH rm -rf $TEMP echo "All finished." echo "Your wheel file of pyanime4k was stored in $INSTALLATION_PATH" echo "Use pip install $INSTALLATION_PATH$(ls $INSTALLATION_PATH) to install it"
/* * Copyright 2009-2012 The MyBatis Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.cache.decorators; import java.util.concurrent.locks.ReadWriteLock; import org.apache.ibatis.cache.Cache; public class SynchronizedCache implements Cache { private Cache delegate; public SynchronizedCache(Cache delegate) { this.delegate = delegate; } public String getId() { return delegate.getId(); } public int getSize() { acquireReadLock(); try { return delegate.getSize(); } finally { releaseReadLock(); } } public void putObject(Object key, Object object) { acquireWriteLock(); try { delegate.putObject(key, object); } finally { releaseWriteLock(); } } public Object getObject(Object key) { acquireReadLock(); try { return delegate.getObject(key); } finally { releaseReadLock(); } } public Object removeObject(Object key) { acquireWriteLock(); try { return delegate.removeObject(key); } finally { releaseWriteLock(); } } public void clear() { acquireWriteLock(); try { delegate.clear(); } finally { releaseWriteLock(); } } public ReadWriteLock getReadWriteLock() { return delegate.getReadWriteLock(); } public int hashCode() { return delegate.hashCode(); } public boolean equals(Object obj) { return delegate.equals(obj); } private void acquireReadLock() { getReadWriteLock().readLock().lock(); } private void releaseReadLock() { getReadWriteLock().readLock().unlock(); } private void acquireWriteLock() { getReadWriteLock().writeLock().lock(); } private void releaseWriteLock() { getReadWriteLock().writeLock().unlock(); } }
import { BaseApi } from '../BaseAPI'; import { Application, Request, Response } from 'express'; export class CommonApi extends BaseApi { constructor() { super(); // Initialize any common API configurations here } register(express: Application): void { // Register routes using the provided Express application express.get('/common', (req, res) => { res.send('Common API is working'); }); // Add more route registrations as needed } uploadImage(req: Request, res: Response): Promise<void> { return new Promise<void>((resolve, reject) => { // Handle image upload logic here // For example, save the uploaded image and resolve the promise resolve(); // If an error occurs during image upload, reject the promise // reject(new Error('Image upload failed')); }); } private init() { // Any private initialization logic for the CommonApi class } }
import * as React from "react"; import asc from "@app/AppStateContainer"; import { logout } from "@async/logout"; import { History } from "history"; import { Moment } from "moment"; import { Option } from "fp-ts/lib/Option"; import { Link } from "react-router-dom"; import {apBasePath} from "@paths/ap/_base" import {jpBasePath} from "@paths/jp/_base" import { apSettingsPagePath } from "@paths/ap/settings"; import { jpSettingsPagePath } from "@paths/jp/settings"; export default (props: {history: History<any>, sysdate: Option<Moment>, showProgramLink: boolean}) => { const pathComponents = props.history.location.pathname.split("/"); const program = pathComponents[1]; const switchLink = (function() { if (props.showProgramLink && program == "jp") { return <Link key="ap" to={apBasePath.getPathFromArgs({})}>&nbsp;&nbsp;&nbsp;Adult Program</Link>; } else if (props.showProgramLink && program == "ap") { return <Link key="jp" to={jpBasePath.getPathFromArgs({})}>&nbsp;&nbsp;&nbsp;Junior Program</Link>; } else return null; }()); const logoutLink = ( program == 'ap' ? apBasePath.getPathFromArgs({}) : jpBasePath.getPathFromArgs({}) ); const settingsPage = ( program == 'ap' ? <Link key="settings" to={apSettingsPagePath.getPathFromArgs({})}>&nbsp;&nbsp;&nbsp;Change Email/Password</Link> : <Link key="settings" to={jpSettingsPagePath.getPathFromArgs({})}>&nbsp;&nbsp;&nbsp;Change Email/Password</Link> ); const navComponents = [ props.sysdate.map(d => <React.Fragment key="sysdate">System Time: <span id="systime">{d.format("hh:mm:ss A")}</span> (refresh your browser to update!)</React.Fragment>) .getOrElse(null), switchLink, settingsPage, <a key="logout" href="#" onClick={() => { logout.send({type: "json", jsonData: {}}).then(() => { asc.updateState.login.logout() }) props.history.push(logoutLink); }}>&nbsp;&nbsp;&nbsp;Logout</a> ].filter(Boolean); return (<React.Fragment> {navComponents} </React.Fragment>); }
#!/bin/bash # This script parses in the command line parameters from runCust, # maps them to the correct command line parameters for DispNet training script and launches that task # The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR # Parse the command line parameters # that runCust will give out DATA_DIR=NONE LOG_DIR=NONE CONFIG_DIR=NONE MODEL_DIR=NONE # Parsing command line arguments: while [[ $# > 0 ]] do key="$1" case $key in -h|--help) echo "Usage: run_dispnet_training_philly.sh [run_options]" echo "Options:" echo " -d|--data-dir <path> - directory path to input data (default NONE)" echo " -l|--log-dir <path> - directory path to save the log files (default NONE)" echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)" echo " -m|--model-dir <path> - directory path to output model file (default NONE)" exit 1 ;; -d|--data-dir) DATA_DIR="$2" shift # pass argument ;; -p|--config-file-dir) CONFIG_DIR="$2" shift # pass argument ;; -m|--model-dir) MODEL_DIR="$2" shift # pass argument ;; -l|--log-dir) LOG_DIR="$2" shift ;; *) echo Unkown option $key ;; esac shift # past argument or value done # Prints out the arguments that were passed into the script echo "DATA_DIR=$DATA_DIR" echo "LOG_DIR=$LOG_DIR" echo "CONFIG_DIR=$CONFIG_DIR" echo "MODEL_DIR=$MODEL_DIR" # Run training on philly # Add the root folder of the code to the PYTHONPATH export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR # Run the actual job python $CONFIG_DIR/examples/AnytimeNetwork/resnet-ann.py \ --data_dir=$DATA_DIR \ --log_dir=$LOG_DIR \ --model_dir=$MODEL_DIR \ --load=${MODEL_DIR}/checkpoint \ --ds_name=cifar100 \ -f=4 \ --opt_at=-1 \ -n=9 \ -c=64 \ -s=1 \ --samloss=6 \ --batch_size=128 \ --exp_gamma=0.3 --sum_rand_ratio=2 --last_reward_rate=0.8 \
#!/bin/bash set -e # Run nginx nginx -g "daemon off;" & # Wait for kartotherian to be ready sleep 5 # Run expire tiles service ./expire.sh
from typing import Any, Dict, Generic, List, Tuple, TypeVar, Union from pydantic import BaseModel as PydanticBaseModel import uvicore from uvicore.contracts import Model as ModelInterface from uvicore.support.classes import hybridmethod from uvicore.support.dumper import dd, dump from uvicore.orm.fields import HasMany from uvicore.orm.mapper import Mapper from uvicore.orm.query import QueryBuilder E = TypeVar("E") from uvicore.orm.model import Model as X #class _Model(Generic[E], ModelInterface, PydanticBaseModel): class Model(X[E]): #class Model(Generic[E], PydanticBaseModel, metaclass=ModelMetaclass): #class _Model(PydanticBaseModel): @classmethod def query(entity) -> QueryBuilder[E]: dump('query override----------------------------------') return QueryBuilder[entity](entity) @classmethod def query2(entity): return 'query2 here!!!!!!!!!!'
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies the dSYM of a vendored framework install_dsym() { local source="$1" if [ -r "$source" ]; then echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}" fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/PASTA/PASTA.framework" install_framework "${BUILT_PRODUCTS_DIR}/Metron/Metron.framework" install_framework "${BUILT_PRODUCTS_DIR}/Quick/Quick.framework" install_framework "${BUILT_PRODUCTS_DIR}/Nimble/Nimble.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
package dev.arkav.openoryx.net.packets.c2s; import dev.arkav.openoryx.net.data.Packet; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; @SuppressWarnings("ALL") public class KeyInfoRequestPacket implements Packet { @SuppressWarnings("WeakerAccess") public int itemType; public KeyInfoRequestPacket() { } public void write(DataOutput out) throws IOException { out.writeInt(this.itemType); } public void read(DataInput in) throws IOException { this.itemType = in.readInt(); } }
INSERT INTO users (user_id, name, nickname, descrption, location, followers_count, tweets_count, creation_date, is_verified) SELECT DISTINCT user_id, name, nickname, descrpition, user_location, followers_count, tweets_count, user_date, verified FROM staging_tweets;
#!/bin/sh # Run the setup scripts # TODO: set a flag that will stop this script from running if already run SCRIPT=$(readlink -f "$0") SCRIPTPATH=$(dirname "$SCRIPT") cd $SCRIPTPATH # Add Apache Ports and Domain Mappings (cd apache ; sh add-ports.sh) # Create tables (cd /vagrant/www/src/shrub/tools; echo YES | php table-create) # Setup Sphinx mv /etc/sphinxsearch/sphinx.conf /etc/sphinxsearch/_sphinx.conf ln -s /vagrant/www/private-search/sphinx.conf /etc/sphinxsearch/sphinx.conf ln -s /var/lib/sphinxsearch/data /home/vagrant/sphinx-data ln -s /vagrant/www/private-search/sphinx.conf /home/vagrant/sphinx.conf service sphinxsearch restart
import { Component } from '@angular/core'; import { NavController } from 'ionic-angular'; import { FormBuilder, Validators } from '@angular/common'; @Component({ templateUrl: 'build/pages/contactus/contactus.html', }) export class ContactusPage { contactForm: any; constructor(private nav: NavController, fb: FormBuilder) { this.nav = nav; this.contactForm = fb.group({ name: ['', Validators.required], email: ['', Validators.required] }); } submitForm() { // debugger; console.log(this.contactForm.controls); } }
#!/bin/bash # Prepare Kaldi cd kaldi/tools #make clean make -j ${cores} make -j ${cores} openfst ./extras/install_openblas.sh cd ../src # make clean (sometimes helpful after upgrading upstream?) ./configure --static --static-math=yes --static-fst=yes --use-cuda=no --openblas-root=../tools/OpenBLAS/install --fst-root=../tools/openfst-1.6.7 make -j ${cores} depend cd ../../
<reponame>e-money/bep3 package bep3 import ( "fmt" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/e-money/bep3/module/types" ) // InitGenesis initializes the store state from a genesis state. func InitGenesis(ctx sdk.Context, keeper Keeper, accountKeeper types.AccountKeeper, gs GenesisState) { // Check if the module account exists moduleAcc := accountKeeper.GetModuleAccount(ctx, ModuleName) if moduleAcc == nil { panic(fmt.Sprintf("%s module account has not been set", ModuleName)) } if err := gs.Validate(); err != nil { panic(fmt.Sprintf("failed to validate %s genesis state: %s", ModuleName, err)) } keeper.SetPreviousBlockTime(ctx, gs.PreviousBlockTime) keeper.SetParams(ctx, gs.Params) for _, supply := range gs.Supplies.AssetSupplies { keeper.SetAssetSupply(ctx, supply, supply.GetDenom()) } var incomingSupplies sdk.Coins var outgoingSupplies sdk.Coins for _, swap := range gs.AtomicSwaps { if swap.Validate() != nil { panic(fmt.Sprintf("invalid swap %s", swap.GetSwapID())) } // Atomic swap assets must be both supported and active err := keeper.ValidateLiveAsset(ctx, swap.Amount[0]) if err != nil { panic(err) } keeper.SetAtomicSwap(ctx, swap) // Add swap to block index or longterm storage based on swap.Status // Increment incoming or outgoing supply based on swap.Direction switch swap.Direction { case Incoming: switch swap.Status { case Open: // This index expires unclaimed swaps keeper.InsertIntoByTimestamp(ctx, swap) incomingSupplies = incomingSupplies.Add(swap.Amount...) case Expired: incomingSupplies = incomingSupplies.Add(swap.Amount...) case Completed: // This index stores swaps until deletion keeper.InsertIntoLongtermStorage(ctx, swap) default: panic(fmt.Sprintf("swap %s has invalid status %s", swap.GetSwapID(), swap.Status.String())) } case Outgoing: switch swap.Status { case Open: keeper.InsertIntoByTimestamp(ctx, swap) outgoingSupplies = outgoingSupplies.Add(swap.Amount...) case Expired: outgoingSupplies = outgoingSupplies.Add(swap.Amount...) case Completed: keeper.InsertIntoLongtermStorage(ctx, swap) default: panic(fmt.Sprintf("swap %s has invalid status %s", swap.GetSwapID(), swap.Status.String())) } default: panic(fmt.Sprintf("swap %s has invalid direction %s", swap.GetSwapID(), swap.Direction.String())) } } // Asset's given incoming/outgoing supply much match the amount of coins in incoming/outgoing atomic swaps supplies := keeper.GetAllAssetSupplies(ctx) for _, supply := range supplies.AssetSupplies { incomingSupply := incomingSupplies.AmountOf(supply.GetDenom()) if !supply.IncomingSupply.Amount.Equal(incomingSupply) { panic(fmt.Sprintf("asset's incoming supply %s does not match amount %s in incoming atomic swaps", supply.IncomingSupply, incomingSupply)) } outgoingSupply := outgoingSupplies.AmountOf(supply.GetDenom()) if !supply.OutgoingSupply.Amount.Equal(outgoingSupply) { panic(fmt.Sprintf("asset's outgoing supply %s does not match amount %s in outgoing atomic swaps", supply.OutgoingSupply, outgoingSupply)) } limit, err := keeper.GetSupplyLimit(ctx, supply.GetDenom()) if err != nil { panic(err) } if supply.CurrentSupply.Amount.GT(limit.Limit) { panic(fmt.Sprintf("asset's current supply %s is over the supply limit %s", supply.CurrentSupply, limit.Limit)) } if supply.IncomingSupply.Amount.GT(limit.Limit) { panic(fmt.Sprintf("asset's incoming supply %s is over the supply limit %s", supply.IncomingSupply, limit.Limit)) } if supply.IncomingSupply.Amount.Add(supply.CurrentSupply.Amount).GT(limit.Limit) { panic(fmt.Sprintf("asset's incoming supply + current supply %s is over the supply limit %s", supply.IncomingSupply.Add(supply.CurrentSupply), limit.Limit)) } if supply.OutgoingSupply.Amount.GT(limit.Limit) { panic(fmt.Sprintf("asset's outgoing supply %s is over the supply limit %s", supply.OutgoingSupply, limit.Limit)) } } } // ExportGenesis writes the current store values to a genesis file, which can be imported again with InitGenesis func ExportGenesis(ctx sdk.Context, k Keeper) (data *GenesisState) { params := k.GetParams(ctx) swaps := k.GetAllAtomicSwaps(ctx) supplies := k.GetAllAssetSupplies(ctx) previousBlockTime, found := k.GetPreviousBlockTime(ctx) if !found { previousBlockTime = DefaultPreviousBlockTime } return NewGenesisState(params, swaps, supplies, previousBlockTime) }
// Java program to store the contact information public class Contact { // Declare variables String firstName; String lastName; String phoneNumber; String email; // Default constructor public Contact() { firstName = ""; lastName = ""; phoneNumber = ""; email = ""; } // Parameterized constructor public Contact(String firstName_, String lastName_, String phoneNumber_, String email_) { firstName = firstName_; lastName = lastName_; phoneNumber = phoneNumber_; email = email_; } // Access methods public String getFirstName() { return firstName; } public String getLastName() { return lastName; } public String getPhoneNumber() { return phoneNumber; } public String getEmail() { return email; } // Modifier methods public void setFirstName(String firstName_) { firstName = firstName_; } public void setLastName(String lastName_) { lastName = lastName_; } public void setPhoneNumber(String phoneNumber_) { phoneNumber = phoneNumber_; } public void setEmail(String email_) { email = email_; } }
import Taro from '../index.js' Taro.initNativeApi(Taro) describe('request', () => { beforeEach(() => { const fetch = jest.fn(() => { return new Promise((resolve, reject) => { resolve({ ok: true, status: 200, headers: {}, json: () => { return Promise.resolve({ data: 'calorie' }) }, text: () => { return Promise.resolve('卡路里卡路里卡路') } }) }) }) global.fetch = fetch }) describe('request', () => { test('直接传入url时能正常返回', async () => { const expectData = { data: 'calorie' } const url = 'https://test.taro.com/v1' const res = await Taro.request(url) expect(res.data).toEqual(expectData) }) test('接口数据返回json对象', async () => { const expectData = { data: 'calorie' } const url = 'https://test.taro.com/v1' const options = { url, responseType: 'json' } const res = await Taro.request(options) expect(res.data).toEqual(expectData) }) test('接口数据返回text', async () => { const expectData = '卡路里卡路里卡路' const url = 'https://test.taro.com/v1' const options = { url, responseType: 'text' } const res = await Taro.request(options) expect(res.data).toMatch(expectData) }) }) })
#!/usr/bin/env bash SDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) && cd "$SDIR" || exit 1 source ./util.sh init_store register_features t0=${1:-$(perl -MTime::HiRes=time -E 'say int(time * 1000)')} oomcli push --entity-key 1 --group user-click --feature last_5_click_posts=1,2,3,4,5 --feature number_of_user_starred_posts=10 t1=${1:-$(perl -MTime::HiRes=time -E 'say int(time * 1000)')} oomcli push --entity-key 1 --group user-click --feature last_5_click_posts=2,3,4,5,6 --feature number_of_user_starred_posts=11 t2=${1:-$(perl -MTime::HiRes=time -E 'say int(time * 1000)')} import_student_sample t3=${1:-$(perl -MTime::HiRes=time -E 'say int(time * 1000)')} oomcli push --entity-key 2 --group user-click --feature last_5_click_posts=1,2,3,4,5 --feature number_of_user_starred_posts=10 t4=${1:-$(perl -MTime::HiRes=time -E 'say int(time * 1000)')} oomcli_export_no_register_feature() { case="export all no register feature" actual=$(oomcli export --feature a.b,a.c --unix-milli $t0 2>&1 || true) expected='Error: failed exporting features: invalid feature names [a.b a.c]' assert_eq "$case" "$expected" "$actual" } oomcli_export_has_no_register_feature() { case="export has no register feature" actual=$(oomcli export --feature user-click.last_5_click_posts,user-click.a --unix-milli $t0 2>&1 ||true) expected='Error: failed exporting features: invalid feature names [user-click.a]' assert_eq "$case" "$expected" "$actual" } oomcli_export_push_feature() { case="push feature" expected='user,user-click.last_5_click_posts,user-click.number_of_user_starred_posts 1,"1,2,3,4,5",10 ' actual=$(oomcli export --feature user-click.last_5_click_posts,user-click.number_of_user_starred_posts --unix-milli $t1 -o csv) assert_eq "$case" "$(sort <<< "$expected")" "$(sort <<< "$actual")" } oomcli_export_update_feature() { case="update feature" expected='user,user-click.last_5_click_posts,user-click.number_of_user_starred_posts 1,"2,3,4,5,6",11 ' actual=$(oomcli export --feature user-click.last_5_click_posts,user-click.number_of_user_starred_posts --unix-milli $t2 -o csv) assert_eq "$case" "$(sort <<< "$expected")" "$(sort <<< "$actual")" } oomcli_export_batch() { case='export batch feature' expected='user,student.name,student.gender,student.age 1,lian,m,18 2,gao,m,20 3,zhang,f,17 4,dong,m,25 5,tang,f,18 6,chen,m,25 7,he,f,19 ' actual=$(oomcli export --feature student.name,student.gender,student.age --unix-milli $t3 -o csv) assert_eq "$case" "$(sort <<< "$expected")" "$(sort <<< "$actual")" } oomcli_export_batch_and_stream_before_import() { case="export batch and stream feature before import" expected='user,student.name,student.gender,student.age,user-click.last_5_click_posts,user-click.number_of_user_starred_posts 1,,,,"2,3,4,5,6",11 ' actual=$(oomcli export --feature student.name,student.gender,student.age,user-click.last_5_click_posts,user-click.number_of_user_starred_posts --unix-milli $t2 -o csv) assert_eq "$case" "$(sort <<< "$expected")" "$(sort <<< "$actual")" } oomcli_export_batch_and_stream() { case="export batch and stream feature" expected='user,student.name,student.gender,student.age,user-click.last_5_click_posts,user-click.number_of_user_starred_posts 1,lian,m,18,"2,3,4,5,6",11 2,gao,m,20,, 3,zhang,f,17,, 4,dong,m,25,, 5,tang,f,18,, 6,chen,m,25,, 7,he,f,19,, ' actual=$(oomcli export --feature student.name,student.gender,student.age,user-click.last_5_click_posts,user-click.number_of_user_starred_posts --unix-milli $t3 -o csv) assert_eq "$case" "$(sort <<< "$expected")" "$(sort <<< "$actual")" } main() { oomcli_export_no_register_feature oomcli_export_has_no_register_feature oomcli_export_push_feature oomcli_export_update_feature oomcli_export_batch oomcli_export_batch_and_stream_before_import oomcli_export_batch_and_stream } main
# Buhos # https://github.com/clbustos/buhos # Copyright (c) 2016-2021, <NAME> # All rights reserved. # Licensed BSD 3-Clause License # See LICENSE file for more information # # @!group Screening and analysis of documents # Retrieve the interface to make decision on a document get '/decision/review/:review_id/user/:user_id/canonical_document/:cd_id/stage/:stage' do |review_id, user_id, cd_id, stage| halt_unless_auth('review_view') review=SystematicReview[review_id] cd=CanonicalDocument[cd_id] ars=AnalysisSystematicReview.new(review) usuario=User[user_id] decisions=Decision.where(:user_id => user_id, :systematic_review_id => review_id, :stage => stage).as_hash(:canonical_document_id) if !review or !cd or !usuario return [500, "No existe alguno de los componentes"] end return partial(:decision, :locals => {review: review, cd: cd, decisions: decisions, ars: ars, user_id: user_id, stage: stage}) end # Put a commentary for a specific document on analysis put '/decision/review/:review_id/user/:user_id/canonical_document/:cd_id/stage/:stage/commentary' do |review_id, user_id, cd_id, stage| halt_unless_auth('review_analyze') pk = params['pk'] value = params['value'] $db.transaction(:rollback => :reraise) do des=Decision.where(:systematic_review_id => review_id, :user_id => user_id, :canonical_document_id => pk, :stage => stage).first if des des.update(:commentary => value) else Decision.insert(:systematic_review_id => review_id, :decision => nil, :user_id => user_id, :canonical_document_id => pk, :stage => stage, :commentary => value.strip) end end return 200 end # Make a decision on a given document post '/decision/review/:review_id/user/:user_id/canonical_document/:cd_id/stage/:stage/decision' do |review_id, user_id, cd_id, stage| halt_unless_auth('review_analyze') #cd_id=params['pk_id'] decision=params['decision'] #user_id=params['user_id'] only_buttons = params['only_buttons'] == "1" $db.transaction do des=Decision.where(:systematic_review_id => review_id, :user_id => user_id, :canonical_document_id => cd_id, :stage => stage).first if des des.update(:decision => decision) else Decision.insert(:systematic_review_id => review_id, :decision => decision, :user_id => user_id, :canonical_document_id => cd_id, :stage => stage) end end review=SystematicReview[review_id] cd=CanonicalDocument[cd_id] ars=AnalysisSystematicReview.new(review) decisions=Decision.where(:user_id => user_id, :systematic_review_id => review_id, :stage => stage).as_hash(:canonical_document_id) return partial(:decision, :locals => {review: review, cd: cd, decisions: decisions, ars: ars, user_id: user_id, stage: stage, ajax: true, only_buttons:only_buttons}) end # @!endgroup
<reponame>eddie4941/servicetalk /* * Copyright © 2021 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.concurrent.api; import io.servicetalk.concurrent.internal.DeliberateException; import io.servicetalk.concurrent.test.internal.TestPublisherSubscriber; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static io.servicetalk.concurrent.api.Publisher.empty; import static io.servicetalk.concurrent.api.Publisher.failed; import static io.servicetalk.concurrent.api.SourceAdapters.toSource; import static io.servicetalk.concurrent.internal.DeliberateException.DELIBERATE_EXCEPTION; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; class OnErrorPublisherTest { private TestPublisherSubscriber<Integer> subscriber; private TestPublisher<Integer> first; @BeforeEach void setUp() { subscriber = new TestPublisherSubscriber<>(); first = new TestPublisher<>(); } @Test void onErrorComplete() { toSource(first.onErrorComplete()).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); subscriber.awaitOnComplete(); } @Test void onErrorCompleteClassMatch() { toSource(first.onErrorComplete(DeliberateException.class)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); subscriber.awaitOnComplete(); } @Test void onErrorCompleteClassNoMatch() { toSource(first.onErrorComplete(IllegalArgumentException.class)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorCompletePredicateMatch() { toSource(first.onErrorComplete(t -> t == DELIBERATE_EXCEPTION)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); subscriber.awaitOnComplete(); } @Test void onErrorCompletePredicateNoMatch() { toSource(first.onErrorComplete(t -> false)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorReturnMatch() { toSource(first.onErrorReturn(t -> 1)).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.takeOnNext(), is(1)); subscriber.awaitOnComplete(); } @Test void onErrorReturnThrows() { toSource(first.onErrorReturn(t -> { throw DELIBERATE_EXCEPTION; })).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorReturnClassMatch() { toSource(first.onErrorReturn(DeliberateException.class, t -> 1)).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.takeOnNext(), is(1)); subscriber.awaitOnComplete(); } @Test void onErrorReturnClassNoMatch() { toSource(first.onErrorReturn(IllegalArgumentException.class, t -> 1)).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorReturnPredicateMatch() { toSource(first.onErrorReturn(t -> t == DELIBERATE_EXCEPTION, t -> 1)).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.takeOnNext(), is(1)); subscriber.awaitOnComplete(); } @Test void onErrorReturnPredicateNoMatch() { toSource(first.onErrorReturn(t -> false, t -> 1)).subscribe(subscriber); subscriber.awaitSubscription().request(1); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapMatch() { toSource(first.onErrorMap(t -> DELIBERATE_EXCEPTION)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapMatchThrows() { toSource(first.onErrorMap(t -> { throw DELIBERATE_EXCEPTION; })).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapClassMatch() { toSource(first.onErrorMap(DeliberateException.class, t -> DELIBERATE_EXCEPTION)).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapClassNoMatch() { toSource(first.onErrorMap(IllegalArgumentException.class, t -> new DeliberateException())) .subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapPredicateMatch() { toSource(first.onErrorMap(t -> t instanceof DeliberateException, t -> DELIBERATE_EXCEPTION)) .subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorMapPredicateNoMatch() { toSource(first.onErrorMap(t -> false, t -> new IllegalStateException())).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorResumeClassMatch() { toSource(first.onErrorResume(DeliberateException.class, t -> failed(DELIBERATE_EXCEPTION))) .subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorResumeClassNoMatch() { toSource(first.onErrorResume(IllegalArgumentException.class, t -> empty())).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorResumePredicateMatch() { toSource(first.onErrorResume(t -> t instanceof DeliberateException, t -> failed(DELIBERATE_EXCEPTION))).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(new DeliberateException()); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } @Test void onErrorResumePredicateNoMatch() { toSource(first.onErrorResume(t -> false, t -> empty())).subscribe(subscriber); subscriber.awaitSubscription(); first.onError(DELIBERATE_EXCEPTION); assertThat(subscriber.awaitOnError(), is(DELIBERATE_EXCEPTION)); } }
<reponame>QuentinQuero/among_us_irl-back 'use strict'; const gameSchema = require('../../schema/GameSchema'); const resetMissionList = require('./resetMissionList'); const resetPlayerList = require('./resetPlayerList'); const updateGameStatus = function (gameId) { console.log('Game service - updateGameStatus - begin') return new Promise((resolve, reject) => { gameSchema.findById(gameId).exec((err, game) => { if(err || game === null) { console.log('Game service - updateGameStatus - error no game found'); resolve({ status: 'error', message: 'No game found' }); } else { let newStatus = ''; switch (game.status) { case 'init': newStatus = 'inGame'; break; case 'inGame': newStatus = 'finished'; break; case 'finished': newStatus = 'init'; resetMissionList(game._id).then(() => { resetPlayerList(game._id).then(() => { }).catch(() => { console.log('gameService - updateGameStatus - error resetPlayerList'); reject({ status: 'error', message: 'error_reset_player_list' }); }); }).catch(() => { console.log('gameService - updateGameStatus - error resetMissionList'); reject({ status: 'error', message: 'error_reset_mission_list' }); }) break; } game.status = newStatus; game.save(); console.log('Game service - updateGameStatus - end'); resolve({ status: 'success', message: 'game status updated' }); } }) }); }; module.exports = updateGameStatus;
#!/bin/bash sudo yum -y update echo "Install Java JDK 8" yum remove -y java yum install -y java-1.8.0-openjdk echo "Install Maven" yum install -y maven echo "Install git" yum install -y git echo "Install Docker engine" yum update -y yum install docker -y #sudo usermod -a -G docker jenkins #sudo service docker start sudo systemctl enable docker echo "Install Jenkins" sudo wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo sudo rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key yum install -y jenkins sudo usermod -a -G docker jenkins sudo systemctl enable jenkins sudo systemctl start docker sudo systemctl start jenkins
#!/bin/bash GPIO_PIN=250 DEVICE='/dev/ttyS5' BAUD=230400 # Install PPS cd .. cd pps-gpio-modprobe make clean make sudo cp pps-gpio-modprobe.ko /lib/modules/$(uname -r)/kernel/drivers/pps/clients/ sudo depmod sudo sh -c 'echo "pps-gpio-modprobe" >> /etc/modules-load.d/10-pps-gpio-modprobe.conf' sudo sh -c "echo 'options pps-gpio-modprobe gpio=${GPIO_PIN}' >> /etc/modprobe.d/10-pps-gpio-modprobe.conf" cd .. # Install NMEA GPS sudo apt install gpsd -y sudo cp cfg/gpsd /etc/default/gpsd sudo dpkg-reconfigure gpsd sudo sh -c "echo '#!/bin/bash' >> /etc/rc.local" sudo sh -c "echo '' >> /etc/rc.local" sudo sh -c "echo '# Setting GPS UART' >> /etc/rc.local" sudo sh -c "echo 'stty -F ${DEVICE} ${BAUD}' >> /etc/rc.local" sudo sh -c "echo '# Start GPSD' >> /etc/rc.local" sudo sh -c "echo 'service gpsd start' >> /etc/rc.local" sudo sh -c "echo '' >> /etc/rc.local" sudo sh -c "echo 'exit 0' >> /etc/rc.local" sudo chmod +x /etc/rc.local # Install chrony. sudo apt install chrony -y sudo sh -c "echo '' >> /etc/chrony/chrony.conf" sudo sh -c "echo '# GPS + PPS' >> /etc/chrony/chrony.conf" sudo sh -c "echo 'refclock PPS /dev/pps0 lock NMEA' >> /etc/chrony/chrony.conf" sudo sh -c "echo 'refclock SHM 0 offset 0.02 delay 0.2 refid NMEA' >> /etc/chrony/chrony.conf" # Install PPS debug tools. sudo apt install pps-tools gpsd-clients -y
#!/bin/bash # Created with package:mono_repo v5.0.5 # Support built in commands on windows out of the box. # When it is a flutter repo (check the pubspec.yaml for "sdk: flutter") # then "flutter" is called instead of "pub". # This assumes that the Flutter SDK has been installed in a previous step. function pub() { if grep -Fq "sdk: flutter" "${PWD}/pubspec.yaml"; then command flutter pub "$@" else command dart pub "$@" fi } # When it is a flutter repo (check the pubspec.yaml for "sdk: flutter") # then "flutter" is called instead of "pub". # This assumes that the Flutter SDK has been installed in a previous step. function format() { if grep -Fq "sdk: flutter" "${PWD}/pubspec.yaml"; then command flutter format "$@" else command dart format "$@" fi } # When it is a flutter repo (check the pubspec.yaml for "sdk: flutter") # then "flutter" is called instead of "pub". # This assumes that the Flutter SDK has been installed in a previous step. function analyze() { if grep -Fq "sdk: flutter" "${PWD}/pubspec.yaml"; then command flutter analyze "$@" else command dart analyze "$@" fi } if [[ -z ${PKGS} ]]; then echo -e '\033[31mPKGS environment variable must be set! - TERMINATING JOB\033[0m' exit 64 fi if [[ "$#" == "0" ]]; then echo -e '\033[31mAt least one task argument must be provided! - TERMINATING JOB\033[0m' exit 64 fi SUCCESS_COUNT=0 declare -a FAILURES for PKG in ${PKGS}; do echo -e "\033[1mPKG: ${PKG}\033[22m" EXIT_CODE=0 pushd "${PKG}" >/dev/null || EXIT_CODE=$? if [[ ${EXIT_CODE} -ne 0 ]]; then echo -e "\033[31mPKG: '${PKG}' does not exist - TERMINATING JOB\033[0m" exit 64 fi dart pub upgrade || EXIT_CODE=$? if [[ ${EXIT_CODE} -ne 0 ]]; then echo -e "\033[31mPKG: ${PKG}; 'dart pub upgrade' - FAILED (${EXIT_CODE})\033[0m" FAILURES+=("${PKG}; 'dart pub upgrade'") else for TASK in "$@"; do EXIT_CODE=0 echo echo -e "\033[1mPKG: ${PKG}; TASK: ${TASK}\033[22m" case ${TASK} in analyze) echo 'dart analyze --fatal-infos .' dart analyze --fatal-infos . || EXIT_CODE=$? ;; format) echo 'dart format --output=none --set-exit-if-changed .' dart format --output=none --set-exit-if-changed . || EXIT_CODE=$? ;; test_0) echo 'dart test' dart test || EXIT_CODE=$? ;; test_1) echo 'dart test --run-skipped -t presubmit-only test/ensure_build_test.dart' dart test --run-skipped -t presubmit-only test/ensure_build_test.dart || EXIT_CODE=$? ;; test_2) echo 'dart test -p chrome' dart test -p chrome || EXIT_CODE=$? ;; test_3) echo 'dart test --run-skipped -t presubmit-only test/annotation_version_test.dart' dart test --run-skipped -t presubmit-only test/annotation_version_test.dart || EXIT_CODE=$? ;; *) echo -e "\033[31mUnknown TASK '${TASK}' - TERMINATING JOB\033[0m" exit 64 ;; esac if [[ ${EXIT_CODE} -ne 0 ]]; then echo -e "\033[31mPKG: ${PKG}; TASK: ${TASK} - FAILED (${EXIT_CODE})\033[0m" FAILURES+=("${PKG}; TASK: ${TASK}") else echo -e "\033[32mPKG: ${PKG}; TASK: ${TASK} - SUCCEEDED\033[0m" SUCCESS_COUNT=$((SUCCESS_COUNT + 1)) fi done fi echo echo -e "\033[32mSUCCESS COUNT: ${SUCCESS_COUNT}\033[0m" if [ ${#FAILURES[@]} -ne 0 ]; then echo -e "\033[31mFAILURES: ${#FAILURES[@]}\033[0m" for i in "${FAILURES[@]}"; do echo -e "\033[31m $i\033[0m" done fi popd >/dev/null || exit 70 echo done if [ ${#FAILURES[@]} -ne 0 ]; then exit 1 fi
#bart vec 0 0 1 0 v1 #bart vec 0 0 0 1 v2 #bart vec 0 1 1 1 v3 #bart join 1 v1 v2 v3 v #bart vec 0 0 1 1 v1 #bart vec 0 1 1 0 v2 #bart vec 0 0 1 0 v3 #bart join 1 v1 v2 v3 v #bart vec 0 1 1 1 0 1 v1 #bart vec 0 1 0 0 0 0 v2 #bart vec 0 0 0 0 1 1 v3 #bart vec 0 0 1 1 0 1 v4 #bart vec 0 1 0 1 0 1 v5 #bart join 1 v1 v2 v3 v4 v5 v #bart resize -c 0 300 1 300 v o #bart conway -n3000 o x bart vec 0 0 0 1 1 1 0 0 0 1 1 1 0 0 v0 bart vec 0 0 0 0 0 0 0 0 0 0 0 0 0 0 v1 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 v2 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 v3 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 v4 bart vec 0 0 0 1 1 1 0 0 0 1 1 1 0 0 v5 bart vec 0 0 0 0 0 0 0 0 0 0 0 0 0 0 v6 bart vec 0 0 0 1 1 1 0 0 0 1 1 1 0 0 v7 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 v8 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 v9 bart vec 0 1 0 0 0 0 1 0 1 0 0 0 0 1 va bart vec 0 0 0 0 0 0 0 0 0 0 0 0 0 0 vb bart vec 0 0 0 1 1 1 0 0 0 1 1 1 0 0 vc bart join 1 v0 v1 v2 v3 v4 v5 v6 v7 v8 v9 va vb vc v bart resize -c 0 50 1 50 v o bart conway -n3 o x
function registerUser(userData, callback) { if (userData.username && userData.password) { // Simulate successful registration callback(true, 'Successfully registered'); } else { // Simulate failed registration due to invalid data callback(false, 'Invalid registration data'); } } // Example usage: const user1 = { username: 'john_doe', password: 'password123' }; registerUser(user1, (success, message) => { if (success) { console.log(message); // Output: Successfully registered // Additional actions for successful registration (e.g., redirect to login page) } else { console.log(message); // Output: Invalid registration data // Additional actions for failed registration (e.g., display error message) } });
#include "tickit.h" #include "taplib.h" #include "taplib-mockterm.h" int main(int argc, char *argv[]) { TickitTerm *tt = make_term(25, 80); TickitRenderBuffer *rb; rb = tickit_renderbuffer_new(10, 20); // Position { int line, col; tickit_renderbuffer_goto(rb, 2, 2); { tickit_renderbuffer_save(rb); tickit_renderbuffer_goto(rb, 4, 4); tickit_renderbuffer_get_cursorpos(rb, &line, &col); is_int(line, 4, "line before restore"); is_int(col, 4, "col before restore"); tickit_renderbuffer_restore(rb); } tickit_renderbuffer_get_cursorpos(rb, &line, &col); is_int(line, 2, "line after restore"); is_int(col, 2, "col after restore"); tickit_renderbuffer_text(rb, "some text"); tickit_renderbuffer_flush_to_term(rb, tt); is_termlog("Stack saves/restores virtual cursor position", GOTO(2,2), SETPEN(), PRINT("some text"), NULL); } // Clipping { tickit_renderbuffer_text_at(rb, 0, 0, "0000000000"); { tickit_renderbuffer_save(rb); tickit_renderbuffer_clip(rb, &(TickitRect){.top = 0, .left = 2, .lines = 10, .cols = 16}); tickit_renderbuffer_text_at(rb, 1, 0, "1111111111"); tickit_renderbuffer_restore(rb); } tickit_renderbuffer_text_at(rb, 2, 0, "2222222222"); tickit_renderbuffer_flush_to_term(rb, tt); is_termlog("Stack saves/restores clipping region", GOTO(0,0), SETPEN(), PRINT("0000000000"), GOTO(1,2), SETPEN(), PRINT("11111111"), GOTO(2,0), SETPEN(), PRINT("2222222222"), NULL); } // Pen { TickitPen *pen; tickit_renderbuffer_goto(rb, 3, 0); tickit_renderbuffer_setpen(rb, pen = tickit_pen_new_attrs(TICKIT_PEN_BG, 1, 0)); tickit_pen_unref(pen); tickit_renderbuffer_text(rb, "123"); { tickit_renderbuffer_savepen(rb); tickit_renderbuffer_setpen(rb, pen = tickit_pen_new_attrs(TICKIT_PEN_FG, 4, 0)); tickit_pen_unref(pen); tickit_renderbuffer_text(rb, "456"); tickit_renderbuffer_setpen(rb, pen = tickit_pen_new_attrs(TICKIT_PEN_BG, -1, 0)); tickit_pen_unref(pen); tickit_renderbuffer_text(rb, "789"); tickit_renderbuffer_restore(rb); } tickit_renderbuffer_text(rb, "ABC"); tickit_renderbuffer_flush_to_term(rb, tt); is_termlog("Stack saves/restores render pen", GOTO(3,0), SETPEN(.bg=1), PRINT("123"), SETPEN(.bg=1,.fg=4), PRINT("456"), SETPEN(), PRINT("789"), SETPEN(.bg=1), PRINT("ABC"), NULL); tickit_renderbuffer_goto(rb, 4, 0); tickit_renderbuffer_setpen(rb, pen = tickit_pen_new_attrs(TICKIT_PEN_REVERSE, 1, 0)); tickit_pen_unref(pen); tickit_renderbuffer_text(rb, "123"); { tickit_renderbuffer_savepen(rb); tickit_renderbuffer_setpen(rb, pen = tickit_pen_new_attrs(TICKIT_PEN_REVERSE, 0, 0)); tickit_pen_unref(pen); tickit_renderbuffer_text(rb, "456"); tickit_renderbuffer_restore(rb); } tickit_renderbuffer_text(rb, "789"); tickit_renderbuffer_flush_to_term(rb, tt); is_termlog("Stack saves/restores allow zeroing pen attributes", GOTO(4,0), SETPEN(.rv=1), PRINT("123"), SETPEN(), PRINT("456"), SETPEN(.rv=1), PRINT("789"), NULL); } // Translation { tickit_renderbuffer_text_at(rb, 0, 0, "A"); tickit_renderbuffer_save(rb); { tickit_renderbuffer_translate(rb, 2, 2); tickit_renderbuffer_text_at(rb, 1, 1, "B"); } tickit_renderbuffer_restore(rb); tickit_renderbuffer_text_at(rb, 2, 2, "C"); tickit_renderbuffer_flush_to_term(rb, tt); is_termlog("Stack saves/restores translation offset", GOTO(0,0), SETPEN(), PRINT("A"), GOTO(2,2), SETPEN(), PRINT("C"), GOTO(3,3), SETPEN(), PRINT("B"), NULL); } tickit_renderbuffer_unref(rb); tickit_term_unref(tt); return exit_status(); }
<filename>Client/Seen-1.0.4-IM/Seen/app/src/main/java/com/a8plus1/seen/mainViewPagers/SearchFragment.java<gh_stars>1-10 package com.a8plus1.seen.mainViewPagers; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.a8plus1.seen.Adapter.MessageRecyclerAdapter; import com.a8plus1.seen.Bean.NetData; import com.a8plus1.seen.MainActivity; import com.a8plus1.seen.R; import com.a8plus1.seen.TieZi; import com.yanzhenjie.nohttp.NoHttp; import com.yanzhenjie.nohttp.RequestMethod; import com.yanzhenjie.nohttp.rest.OnResponseListener; import com.yanzhenjie.nohttp.rest.Request; import com.yanzhenjie.nohttp.rest.RequestQueue; import com.yanzhenjie.nohttp.rest.Response; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; public class SearchFragment extends Fragment { private RecyclerView messageRecyclerView; private MessageRecyclerAdapter searchRecyclerViewAdapter; private EditText searchTextEditText; private Button searchButton; ArrayList<TieZi> tieZis = new ArrayList<>(); //存帖子信息用集合 String[] tieZiInfosTemp; //统计帖子数 工具变量 int num = 0; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_search, container, false); initSearchFragmentView(view); return view; } private void initSearchFragmentView(View view) { messageRecyclerView = (RecyclerView) view.findViewById(R.id.search_recyclerview_searchfragment); initRecyclerView(); searchTextEditText = view.findViewById(R.id.searchtext_edittext_searchfragment); searchButton = view.findViewById(R.id.search_button_searchfragment); initSearchListener(); } private void initSearchListener() { searchButton.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (motionEvent.getAction() == MotionEvent.ACTION_DOWN) { String keyString = searchTextEditText.getText().toString(); searchButton.setBackgroundResource(R.color.colorPrimaryDark); if(keyString.equals("")){ Toast.makeText(getContext(), "请输入关键字", Toast.LENGTH_SHORT).show(); return true; } //从服务器中load数据 再进行筛选****** initData(); // ArrayList<TieZi> temps = new ArrayList<>(); // for(int i = 0 ; i < tieZis.size(); i ++) { // temps.add(tieZis.get(i)); // } // tieZis.removeAll(tieZis); // for(int i = 0; i < temps .size(); i ++ ){ // //如果不是子串则删除…… // String temp = temps.get(i).getTitle(); // if(temp.contains(keyString)){ // tieZis.add(temps.get(i)); // } // } String dataUrl = NetData.urlFindNote; RequestQueue queue = NoHttp.newRequestQueue(); Request<String> request = NoHttp.createStringRequest(dataUrl, RequestMethod.POST); //request.add("Search", keyString); try { JSONObject jsonObject = new JSONObject(); jsonObject.put("Search", keyString); request.setDefineRequestBodyForJson(jsonObject); } catch (JSONException e) { e.printStackTrace(); } queue.add(2, request, new OnResponseListener<String>() { @Override public void onStart(int what) { } @Override public void onSucceed(int what, Response<String> response) { if(response.responseCode() == 200){ String result = response.get(); try { final JSONArray jsonArray = new JSONArray(result); //存所有帖子信息 tieZiInfosTemp = new String[jsonArray.length()]; for(int i = 999; i < jsonArray.length() + 999; i++){ //String id = ((JSONObject)jsonArray.get(i)) .getString("tieID"); //Request<String> request2 = NoHttp.createStringRequest(NetData.urlGetNote, RequestMethod.POST); Request<String> request2 = NoHttp.createStringRequest(NetData.urlGetNote, RequestMethod.POST); RequestQueue queue = NoHttp.newRequestQueue(); try { request2.setDefineRequestBodyForJson((JSONObject)(jsonArray.get(i-999))); } catch (JSONException e) { e.printStackTrace(); } queue.add(i, request2, new OnResponseListener<String>() { @Override public void onStart(int what) { } @Override public void onSucceed(int what, Response<String> response) { num++; if(response.responseCode() == 200){ tieZiInfosTemp[what-999] = response.get(); } if(num == jsonArray.length() ){ num = 0; //获得所有帖子数据之后 开始解析 if(!tieZis.isEmpty()) tieZis.removeAll(tieZis); for(int i = 0; i < jsonArray.length(); i ++){ if(!tieZiInfosTemp[i].equals("")){ try { JSONArray jsonObject1 = new JSONArray(tieZiInfosTemp[i]); JSONObject jsonObjectTemp = null; jsonObjectTemp = (JSONObject) jsonObject1.get(0); //JSONObject jsonObject1 = new JSONObject(tieZiInfosTemp[i]); // 数据有误!!!!! //模拟Data // String moniData = "{\"t_userID\": \"a123\",\"title\": \"929\",\"content\": \"qqqq\",\"time\": \"2017-09-29 06:59:46.0\",\"circleImage\": \"江汉大学\",\"nickname\": \"-115.936318\",\"agree\": 2,\"pageviews\": 1}"; // JSONObject jsonObjectTemp = null; // jsonObjectTemp = new JSONObject(moniData); // public TieZi(String tieZiId, String userID, String userNickName, String title, String context, String picString, int watchCount, int goodCount, String firstTime) { tieZis.add(new TieZi( ((JSONObject)jsonArray.get(i)).getString("tieID"), jsonObjectTemp.getString("t_userID"), jsonObjectTemp.getString("nickname"), jsonObjectTemp.getString("title"), jsonObjectTemp.getString("content"), jsonObjectTemp.getString("circleImage"), jsonObjectTemp.getInt("pageviews"), jsonObjectTemp.getInt("agree"), jsonObjectTemp.getString("time"), jsonObjectTemp.optString("Image1","") )); searchRecyclerViewAdapter.notifyDataSetChanged(); } catch (JSONException e) { e.printStackTrace(); } } } } } @Override public void onFailed(int what, Response<String> response) { } @Override public void onFinish(int what) { } }); } } catch (JSONException e) { e.printStackTrace(); } }else { Log.i("cyd000", "connect failed"); } } @Override public void onFailed(int what, Response<String> response) { } @Override public void onFinish(int what) { } }); searchRecyclerViewAdapter = new MessageRecyclerAdapter(tieZis, getActivity()); messageRecyclerView.setAdapter(searchRecyclerViewAdapter); } else if (motionEvent.getAction() == MotionEvent.ACTION_UP) { searchButton.setBackgroundResource(R.color.clickChuanZhao); } return true; } }); } private void initRecyclerView() { messageRecyclerView.setLayoutManager(new GridLayoutManager(this.getContext(), 1, GridLayoutManager.VERTICAL, false)); } private void initData() { if(!tieZis.isEmpty()) tieZis.removeAll(tieZis); //获得数据应为一个JsonArray //测试数据 // for(int i = 0; i < 20; i ++){ // tieZis.add(new TieZi("0000000" + i, "000000"+i, "鸡蛋",i+"为新时代中国特色", // "新华社北京电 题:为新时代中国特色社会主义提供有力宪法保障——各地干部群众热议党的十九届二中全会公报\n" + // "\n" + // "  新华社记者\n" + // "\n" + // "  “宪法修改是国家政治生活中的一件大事,是党中央从新时代坚持和发展中国特色社会主义全局和战略高度作出的重大决策,也是推进全面依法治国、推进国家治理体系和治理能力现代化的重大举措。”\n" + // "\n" + // "  治国凭圭臬,安邦靠准绳。19日发布的党的十九届二中全会公报,在社会各界引起高度关注和强烈反响。\n" + // "\n" + // "  “公报凝聚共识,顺应时代要求和人民意愿。”广大干部群众表示,要更加紧密地团结在以习近平同志为核心的党中央周围,以习近平新时代中国特色社会主义思想为指导,认真贯彻落实党的十九大精神,坚定不移走中国特色社会主义法治道路,自觉维护宪法权威、保证宪法实施,为新时代推进全面依法治国、建设社会主义法治国家而努力奋斗。" // ,1234, 888,"2007-1-12")); // } } }
package milter import ( "bytes" "net" nettextproto "net/textproto" "reflect" "testing" "github.com/linanh/go-message/textproto" ) func init() { // HACK: claim to support v6 in server for tests serverProtocolVersion = 6 } type MockMilter struct { ConnResp Response ConnMod func(m *Modifier) ConnErr error HeloResp Response HeloMod func(m *Modifier) HeloErr error MailResp Response MailMod func(m *Modifier) MailErr error RcptResp Response RcptMod func(m *Modifier) RcptErr error HdrResp Response HdrMod func(m *Modifier) HdrErr error HdrsResp Response HdrsMod func(m *Modifier) HdrsErr error BodyChunkResp Response BodyChunkMod func(m *Modifier) BodyChunkErr error BodyResp Response BodyMod func(m *Modifier) BodyErr error // Info collected during calls. Host string Family string Port uint16 Addr net.IP HeloValue string From string Rcpt []string Hdr nettextproto.MIMEHeader Chunks [][]byte } func (mm *MockMilter) Connect(host string, family string, port uint16, addr net.IP, m *Modifier) (Response, error) { if mm.ConnMod != nil { mm.ConnMod(m) } mm.Host = host mm.Family = family mm.Port = port mm.Addr = addr return mm.ConnResp, mm.ConnErr } func (mm *MockMilter) Helo(name string, m *Modifier) (Response, error) { if mm.HeloMod != nil { mm.HeloMod(m) } mm.HeloValue = name return mm.HeloResp, mm.HeloErr } func (mm *MockMilter) MailFrom(from string, m *Modifier) (Response, error) { if mm.MailMod != nil { mm.MailMod(m) } mm.From = from return mm.MailResp, mm.MailErr } func (mm *MockMilter) RcptTo(rcptTo string, m *Modifier) (Response, error) { if mm.RcptMod != nil { mm.RcptMod(m) } mm.Rcpt = append(mm.Rcpt, rcptTo) return mm.RcptResp, mm.RcptErr } func (mm *MockMilter) Header(name string, value string, m *Modifier) (Response, error) { if mm.HdrMod != nil { mm.HdrMod(m) } return mm.HdrResp, mm.HdrErr } func (mm *MockMilter) Headers(h nettextproto.MIMEHeader, m *Modifier) (Response, error) { if mm.HdrsMod != nil { mm.HdrsMod(m) } mm.Hdr = h return mm.HdrsResp, mm.HdrsErr } func (mm *MockMilter) BodyChunk(chunk []byte, m *Modifier) (Response, error) { if mm.BodyChunkMod != nil { mm.BodyChunkMod(m) } mm.Chunks = append(mm.Chunks, chunk) return mm.BodyChunkResp, mm.BodyChunkErr } func (mm *MockMilter) Body(m *Modifier) (Response, error) { if mm.BodyMod != nil { mm.BodyMod(m) } return mm.BodyResp, mm.BodyErr } func TestMilterClient_UsualFlow(t *testing.T) { mm := MockMilter{ ConnResp: RespContinue, HeloResp: RespContinue, MailResp: RespContinue, RcptResp: RespContinue, HdrResp: RespContinue, HdrsResp: RespContinue, BodyChunkResp: RespContinue, BodyResp: RespContinue, BodyMod: func(m *Modifier) { m.AddHeader("X-Bad", "very") m.ChangeHeader(1, "Subject", "***SPAM***") m.Quarantine("very bad message") }, } s := Server{ NewMilter: func() Milter { return &mm }, Actions: OptAddHeader | OptChangeHeader, } defer s.Close() local, err := net.Listen("tcp", "127.0.0.1:0") if err != nil { t.Fatal(err) } go s.Serve(local) cl := NewClientWithOptions("tcp", local.Addr().String(), ClientOptions{ ActionMask: OptAddHeader | OptChangeHeader | OptQuarantine, }) defer cl.Close() session, err := cl.Session() if err != nil { t.Fatal(err) } defer session.Close() assertAction := func(act *Action, err error, expectCode ActionCode) { t.Helper() if err != nil { t.Fatal(err) } if act.Code != expectCode { t.Fatal("Unexpectedcode:", act.Code) } } act, err := session.Conn("host", FamilyInet, 25565, "172.16.17.32") assertAction(act, err, ActContinue) if mm.Host != "host" { t.Fatal("Wrong host:", mm.Host) } if mm.Family != "tcp4" { t.Fatal("Wrong family:", mm.Family) } if mm.Port != 25565 { t.Fatal("Wrong port:", mm.Port) } if mm.Addr.String() != "172.16.17.32" { t.Fatal("Wrong IP:", mm.Addr) } if err := session.Macros(CodeHelo, "tls_version", "very old"); err != nil { t.Fatal("Unexpected error", err) } act, err = session.Helo("helo_host") assertAction(act, err, ActContinue) if mm.HeloValue != "helo_host" { t.Fatal("Wrong helo value:", mm.HeloValue) } act, err = session.Mail("<EMAIL>", []string{"A=B"}) assertAction(act, err, ActContinue) if mm.From != "<EMAIL>" { t.Fatal("Wrong MAIL FROM:", mm.From) } act, err = session.Rcpt("<EMAIL>", []string{"A=B"}) assertAction(act, err, ActContinue) act, err = session.Rcpt("<EMAIL>", []string{"A=B"}) assertAction(act, err, ActContinue) if !reflect.DeepEqual(mm.Rcpt, []string{"<EMAIL>", "<EMAIL>"}) { t.Fatal("Wrong recipients:", mm.Rcpt) } hdr := textproto.Header{} hdr.Add("From", "<EMAIL>") hdr.Add("To", "<EMAIL>") act, err = session.Header(hdr) assertAction(act, err, ActContinue) if len(mm.Hdr) != 2 { t.Fatal("Unexpected header length:", len(mm.Hdr)) } if val := mm.Hdr.Get("From"); val != "<EMAIL>" { t.Fatal("Wrong From header:", val) } if val := mm.Hdr.Get("To"); val != "<EMAIL>" { t.Fatal("Wrong To header:", val) } modifyActs, act, err := session.BodyReadFrom(bytes.NewReader(bytes.Repeat([]byte{'A'}, 128000))) assertAction(act, err, ActContinue) if len(mm.Chunks) != 2 { t.Fatal("Wrong amount of body chunks received") } if len(mm.Chunks[0]) > 65535 { t.Fatal("Too big first chunk:", len(mm.Chunks[0])) } if totalLen := len(mm.Chunks[0]) + len(mm.Chunks[1]); totalLen < 128000 { t.Fatal("Some body bytes lost:", totalLen) } expected := []ModifyAction{ { Code: ActAddHeader, HeaderName: "X-Bad", HeaderValue: "very", }, { Code: ActChangeHeader, HeaderIndex: 1, HeaderName: "Subject", HeaderValue: "***SPAM***", }, { Code: ActQuarantine, Reason: "very bad message", }, } if !reflect.DeepEqual(modifyActs, expected) { t.Fatalf("Wrong modify actions, got %+v", modifyActs) } }
<gh_stars>0 'use strict'; /** * Module dependencies */ var rawsPolicy = require('../policies/raws.server.policy'), raws = require('../controllers/raws.server.controller'); module.exports = function(app) { // Raws Routes app.route('/api/raws').all(rawsPolicy.isAllowed) .get(raws.list) .post(raws.create); app.route('/api/raws/:rawId').all(rawsPolicy.isAllowed) .get(raws.read) .put(raws.update) .delete(raws.delete); // Finish by binding the Raw middleware app.param('rawId', raws.rawByID); };
import { Request, Response } from "express"; import { RoomsManager } from "../rooms/RoomsManager"; export class RoomCodeResetHandler{ // required query string (super secure, of course) private static readonly secretAuthCode:string = process.env.API_SECRET || "lichKing33"; // rooms manager public static roomsManager:RoomsManager; // handle http get requests public static get = (req:Request, res:Response):void => { // valid auth query string? if(req.query.auth !== RoomCodeResetHandler.secretAuthCode){ res.status(403).end("Unauthorized access."); return; } if(!RoomCodeResetHandler.roomsManager){ res.status(400).end("No rooms manager."); return; } // extract room name from query string let roomName:string = req.query.room_name || ""; // attempt code clear let reset:boolean = RoomCodeResetHandler.roomsManager.resetRoomCode(roomName); // respond if(reset){ res.status(200).end(`Room "${roomName}" code has been reset.`) } else{ res.status(400).end(`Room "${roomName}" not found.`); } } }
package io.opensphere.core.control.ui.impl; import java.util.concurrent.Executor; import io.opensphere.core.control.ui.SharedComponentListener; import io.opensphere.core.util.ChangeSupport; import io.opensphere.core.util.WeakChangeSupport; /** * Support for notify interested parties when a shared component has been added * or removed. */ public class SharedComponentChangeSupport { /** The change support helper. */ private final ChangeSupport<SharedComponentListener> myChangeSupport = new WeakChangeSupport<>(); /** * Add a listener for shared component changes. * * @param listener The listener. */ public void addListener(SharedComponentListener listener) { myChangeSupport.addListener(listener); } /** * Remove a listener from receiving shared component changes. * * @param listener The listener. */ public void removeListener(SharedComponentListener listener) { myChangeSupport.removeListener(listener); } /** * Notify the shared component listeners that a change has been made. * * @param name The name of the shared component. * @param type The type of update (component added or removed). * @param executor The optional executor. */ protected void notifyComponentListeners(final String name, final ComponentChangeType type, Executor executor) { WeakChangeSupport.Callback<SharedComponentListener> callback = listener -> { if (type == ComponentChangeType.ADDED) { listener.componentAdded(name); } else if (type == ComponentChangeType.REMOVED) { listener.componentRemoved(name); } }; myChangeSupport.notifyListeners(callback, executor); } }
#!/bin/bash while [[ $# -gt 1 ]] do key="$1" case $key in -m|--model) MODEL="$2" shift # past argument ;; -h|--nodes_file) NODES_FILE="$2" shift # past argument ;; -r|--remote_dir) REMOTE_DIR="$2" shift # past argument ;; -n|--num_nodes) NUM_NODES="$2" shift # past argument ;; -g|--gpu_per_node) GPU_PER_NODE="$2" shift # past argument ;; -b|--batch_size) BATCH_SIZE="$2" shift # past argument ;; -i|--iterations) ITERATIONS="$2" shift # past argument ;; *) # unknown option ;; esac shift # past argument or value done if [ -z "${ITERATIONS+x}" ]; then ITERATIONS=20 fi echo "Compressing model files..." if [ "$MODEL" == "inceptionv3" ]; then model_path="../../tensorflow inception/inception" elif [ "$MODEL" == "alexnet" ]; then model_path="../../tensorflow alexnet" elif [ "$MODEL" == "resnet" ]; then model_path="../../tensorflow resnet" fi rm -f model.tar.gz tar -czvf model.tar.gz -C $model_path echo "Copying scripts to remote nodes..." head -$NUM_NODES $NODES_FILE | while read line; do if [ -z line ]; then continue; fi arr=( $line ) host_name=${arr[0]} ssh_alias=${arr[1]} scp -o "StrictHostKeyChecking no" model.tar.gz $ssh_alias:$REMOTE_DIR ssh -o "StrictHostKeyChecking no" $ssh_alias 'cd '${REMOTE_DIR}' && tar -xvzf model.tar.gz > /dev/null 2>&1' & done echo "Generating runners..." rm -rf gen mkdir -p gen script_name=`python generate_runner.py --model=$MODEL --nodes=$NODES_FILE --gen_dir=gen --remote_dir="${REMOTE_DIR}" --num_nodes=$NUM_NODES --gpu_per_node=$GPU_PER_NODE --batch_size=$BATCH_SIZE` echo "Copying runners..." if [ "$MODEL" == "inceptionv3" ]; then RUNNER_DEST=$REMOTE_DIR/inception/inception/ elif [ "$MODEL" == "alexnet" ]; then RUNNER_DEST=$REMOTE_DIR/alexnet/ elif [ "$MODEL" == "resnet" ]; then RUNNER_DEST=$REMOTE_DIR/resnet/ fi index=1 head -$NUM_NODES $NODES_FILE | while read line; do arr=( $line ) ssh_alias=${arr[1]} scp -o "StrictHostKeyChecking no" gen/${index}.sh ${ssh_alias}:${RUNNER_DEST}/runner.sh let "index++" done echo "Killing lingering processes" bash killall.sh -h $NODES_FILE executed=0 while [ $executed -eq 0 ]; do echo "Executing runners..." head -$NUM_NODES $NODES_FILE | while read line; do tuple=( $line ) ssh_alias=${tuple[1]} ssh -o "StrictHostKeyChecking no" ${ssh_alias} "cd ${RUNNER_DEST} && bash runner.sh" & done # We could wait for less but there isn't going to be any output for 10 sec anyway sleep 10 # Run tail to monitor logs echo "Monitoring logs..." head -$NUM_NODES $NODES_FILE | while read line; do tuple=( $line ) ssh_alias=${tuple[1]} ssh -o "StrictHostKeyChecking no" ${ssh_alias} "tail -f /tmp/worker* | grep --line-buffered '/sec'" & done while : do sleep 30 num_running=`bash runincluster.sh -h $NODES_FILE -n $NUM_NODES -c "ps -ef | grep ps_hosts | grep -v grep | wc -l" | awk '{s+=$1} END {print s}'` expected_running=$(($NUM_NODES*(GPU_PER_NODE+1))) if [ ${num_running} -ne ${expected_running} ] ; then echo "Some process died unexpectedly. Restart this test." bash killall.sh -h $NODES_FILE executed=0 break fi current_iteration=`bash runincluster.sh -h $NODES_FILE -n $NUM_NODES -c "cat /tmp/worker* | grep 'examples/sec' | sed 's/.*step \([[:digit:]]*\).*/\1/'" 2>/dev/null | sort | tail -1` if ! [[ $current_iteration =~ ^[0-9]+$ ]] ; then current_iteration=0 fi if [ ${current_iteration} -gt $ITERATIONS ]; then echo "Reached required number of iterations. Terminating test" bash killall.sh -h $NODES_FILE executed=1 break fi done done #Workers are done. Collect the logs echo "Copying logs..." total_gpus=$(($NUM_NODES*$GPU_PER_NODE)) LOG_DIR=logs/${MODEL}_b${BATCH_SIZE}_g${total_gpus} rm -rf $LOG_DIR mkdir -p $LOG_DIR head -$NUM_NODES $NODES_FILE | while read line; do tuple=( $line ) ssh_alias=${tuple[1]} scp -o "StrictHostKeyChecking no" $ssh_alias:/tmp/worker* $LOG_DIR done #Get average images/sec avg=`cat $LOG_DIR/* | grep "examples/sec" | grep -v "step 0" | cut -d'(' -f2 | cut -d' ' -f1 | python average.py` echo $avg > $LOG_DIR/imagespersec echo "Nodes:" $NUM_NODES"; GPUs per node:" $GPU_PER_NODE"; Images/sec:" $avg
from Old_robots import Text_Robots, Resumir, Write def start(): def inputTermo(): print() termo = input('Digite um termo para o Wikipedia: ') print() return termo def inputPrefixo(): prefixos = ['Quem e', 'O que e', 'A historia', 'Exit', ''] print('Escolha um:') for index, item in enumerate(prefixos): print(index + 1, item) print() escolha = int(input('>> ')) return prefixos[escolha - 1] robots = Text_Robots content = [] content.append(inputTermo()) content.append(inputPrefixo()) def call(): # Chamada dos Robos rob = robots.TextRobots(content) robo = Write.Write("/home/nask/Documentos/Arquivos/", rob.pularLinhas()) text = robo.atributos(rob.atributos()) res = Resumir.Resumir(text) res.arquivoResumo() call() start()
<reponame>pedroalbanese/gostpass<filename>pkg/keepass/io_test.go // Copyright 2016 The Sandpass Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package keepass import ( "bytes" "testing" "time" ) var dateTests = []struct { t time.Time b []byte }{ { time.Time{}, []byte{0x2e, 0xdf, 0x39, 0x7e, 0xfb}, }, { time.Date(2015, time.February, 19, 2, 32, 15, 0, time.UTC), []byte{0x1f, 0x7c, 0xa6, 0x28, 0x0f}, }, { time.Date(2015, time.February, 18, 18, 32, 15, 0, time.FixedZone("PST", -8*60*60)), []byte{0x1f, 0x7c, 0xa6, 0x28, 0x0f}, }, } func TestReadDate(t *testing.T) { for _, test := range dateTests { b := make([]byte, 5) copy(b, test.b) ti, err := readDate("test field", b) if err != nil { t.Errorf("readDate(%v) error: %v", test.b, err) } if !ti.Equal(test.t) { t.Errorf("readDate(%v) = %v; want %v", test.b, ti, test.t) } } } func TestWriteDateField(t *testing.T) { head := []byte{0x34, 0x12, 0x05, 0x00, 0x00, 0x00} for _, test := range dateTests { var buf bytes.Buffer writeDateField(&writer{w: &buf}, 0x1234, test.t) b := buf.Bytes() if !bytes.HasPrefix(b, head) { t.Errorf("writeDateField(w, 0x1234, %v) = %v; want prefix %v", test.t, b, head) } if len(b) >= len(head) { b = b[len(head):] if !bytes.Equal(b, test.b) { t.Errorf("writeDateField(w, 0x1234, %v)[%d:] = %v; want %v", test.t, len(head), b, test.b) } } } }
<filename>03. Callback-Function/Calculator.Callback.js<gh_stars>0 // Addition Callback const addCallback = (a, b, c) => c(parseInt(a) + parseInt(b)); // Subtraction Callback const subCallback = (a, b, c) => c(parseInt(a) - parseInt(b)); // Multiplication Callback const multiCallback = (a, b, c) => c(parseInt(a) * parseInt(b)); // Division Callback const divCallback = (a, b, c) => c(parseInt(a) / parseInt(b)); module.exports = { addCallback, subCallback, multiCallback, divCallback };
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2008 <NAME> All rights reserved. # """ """ #end_pymotw_header import urllib url = 'http://localhost:8080/~dhellmann/' print 'urlencode() :', urllib.urlencode({'url':url}) print 'quote() :', urllib.quote(url) print 'quote_plus():', urllib.quote_plus(url)
// Copyright 2020 The SQLFlow Authors. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package experimental import ( "fmt" "strings" "sqlflow.org/sqlflow/go/attribute" "sqlflow.org/sqlflow/go/ir" ) // TODO(typhoonzero): below functions are copied from codegen/xgboost/codegen.go // remove the original functions when this experimental packages are ready. // ----------------------------------------------------------------------------- func getXGBoostObjectives() (ret []string) { for k := range attribute.XGBoostObjectiveDocs { ret = append(ret, k) } return } // TODO(tony): complete model parameter and training parameter list // model parameter list: https://xgboost.readthedocs.io/en/latest/parameter.html#general-parameters // training parameter list: https://github.com/dmlc/xgboost/blob/b61d53447203ca7a321d72f6bdd3f553a3aa06c4/python-package/xgboost/training.py#L115-L117 var attributeDictionary = attribute.Dictionary{}. Float("eta", float32(0.3), `[default=0.3, alias: learning_rate] Step size shrinkage used in update to prevents overfitting. After each boosting step, we can directly get the weights of new features, and eta shrinks the feature weights to make the boosting process more conservative. range: [0,1]`, attribute.Float32RangeChecker(0, 1, true, true)). Int("num_class", nil, `Number of classes. range: [2, Infinity]`, attribute.IntLowerBoundChecker(2, true)). String("objective", nil, `Learning objective`, attribute.StringChoicesChecker(getXGBoostObjectives()...)). String("eval_metric", nil, `eval metric`, nil). Bool("train.disk_cache", false, `whether use external memory to cache train data`, nil). Int("train.num_boost_round", 10, `[default=10] The number of rounds for boosting. range: [1, Infinity]`, attribute.IntLowerBoundChecker(1, true)). Int("train.batch_size", -1, `[default=-1] Batch size for each iteration, -1 means use all data at once. range: [-1, Infinity]`, attribute.IntLowerBoundChecker(-1, true)). Int("train.epoch", 1, `[default=1] Number of rounds to run the training. range: [1, Infinity]`, attribute.IntLowerBoundChecker(1, true)). String("validation.select", "", `[default=""] Specify the dataset for validation. example: "SELECT * FROM boston.train LIMIT 8"`, nil). Int("train.num_workers", 1, `[default=1] Number of workers for distributed train, 1 means stand-alone mode. range: [1, 128]`, attribute.IntRangeChecker(1, 128, true, true)) var fullAttrValidator = attribute.Dictionary{} func updateIfKeyDoesNotExist(current, add map[string]interface{}) { for k, v := range add { if _, ok := current[k]; !ok { current[k] = v } } } func resolveXGBoostModelParams(ir *ir.TrainStmt) error { switch strings.ToUpper(ir.Estimator) { case "XGBOOST.XGBREGRESSOR", "XGBREGRESSOR": defaultAttributes := map[string]interface{}{"objective": "reg:squarederror"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.XGBRFREGRESSOR", "XGBRFREGRESSOR": defaultAttributes := map[string]interface{}{"objective": "reg:squarederror", "learning_rate": 1, "subsample": 0.8, "colsample_bynode": 0.8, "reg_lambda": 1e-05} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.XGBCLASSIFIER", "XGBCLASSIFIER": defaultAttributes := map[string]interface{}{"objective": "binary:logistic"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.XGBRFCLASSIFIER", "XGBRFCLASSIFIER": defaultAttributes := map[string]interface{}{"objective": "multi:softprob", "learning_rate": 1, "subsample": 0.8, "colsample_bynode": 0.8, "reg_lambda": 1e-05} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.XGBRANKER", "XGBRANKER": defaultAttributes := map[string]interface{}{"objective": "rank:pairwise"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.GBTREE": defaultAttributes := map[string]interface{}{"booster": "gbtree"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.GBLINEAR": defaultAttributes := map[string]interface{}{"booster": "gblinear"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) case "XGBOOST.DART": defaultAttributes := map[string]interface{}{"booster": "dart"} updateIfKeyDoesNotExist(ir.Attributes, defaultAttributes) default: return fmt.Errorf("unsupported model name %v, currently supports xgboost.gbtree, xgboost.gblinear, xgboost.dart", ir.Estimator) } return nil } func init() { // xgboost.gbtree, xgboost.dart, xgboost.gblinear share the same parameter set fullAttrValidator = attribute.NewDictionaryFromModelDefinition("xgboost.gbtree", "") fullAttrValidator.Update(attributeDictionary) } // -----------------------------------------------------------------------------
class MessagingConfig: def __init__(self, name): self.name = name def get_name(self): return self.name
<gh_stars>0 package org.rs2server.rs2.model.minigame.impl; import org.rs2server.rs2.model.GameObject; import org.rs2server.rs2.model.Location; import org.rs2server.rs2.model.event.ClickEventManager; import org.rs2server.rs2.model.event.EventListener; import org.rs2server.rs2.model.player.Player; import java.util.ArrayList; import java.util.HashMap; public class PestControlManager extends EventListener { private static final PestControlManager INSTANCE = new PestControlManager(); //multi dimensional for boat levels public static final int[][] PC_NPCS = new int[][]{ {1694, 1695, 1696, 1697, 1714, 1715, 1716, 1717, 1718, 1709, 1710, 1711, 1724, 1725, 1726, 1727, 1734, 1735, 1736, 1737, 1689}, {3736, 3737, 3738, 3739, 3756, 3757, 3758, 3759, 3749, 3766, 3767, 3744, 3768, 3769, 3773, 3774, 3775, 3727, 3728, 3729, 3730}, {3740, 3741, 3760, 3761, 3750, 3751, 3745, 3746, 3770, 3771, 3776, 3731} }; public static PestControlManager getPestControlManager() { return INSTANCE; } public enum Boat { NOVICE(40, 14315, Location.create(2657, 2639, 0), Location.create(2660, 2639, 0)), INTERMEDIATE(70, 25631, Location.create(2644, 2644, 0), Location.create(2641, 2644, 0)), EXPERT(100, 25632, Location.create(2638, 2653, 0), Location.create(2635, 2653, 0)); public int objId; public Location startLoc; public int lvl; public Location endLoc; Boat(int reqLvl, int objId, Location start, Location end) { this.lvl = reqLvl; this.objId = objId; this.startLoc = start; this.endLoc = end; } } public enum Portal { WEST(1747, 18, Location.create(2628, 2591, 0), new Location[]{ Location.create(2630, 2594, 0), Location.create(2631, 2594, 0), Location.create(2631, 2593, 0), Location.create(2631, 2592, 0), Location.create(2631, 2591, 0), Location.create(2631, 2590, 0), Location.create(2630, 2590, 0)}), EAST(1748, 20, Location.create(2680, 2588, 0), new Location[] { Location.create(2644, 2571, 0), Location.create(2644, 2572, 0), Location.create(2645, 2572, 0), Location.create(2646, 2572, 0), Location.create(2647, 2572, 0), Location.create(2648, 2572, 0), Location.create(2648, 2571, 0) }), SOUTH_EAST(1749, 22, Location.create(2669, 2570, 0), new Location[] { Location.create(2668, 2572, 0), Location.create(2668, 2573, 0), Location.create(2669, 2573, 0), Location.create(2670, 2573, 0), Location.create(2671, 2573, 0), Location.create(2672, 2573, 0), Location.create(2672, 2572, 0) }), SOUTH_WEST(1750, 24, Location.create(2645, 2569, 0), new Location[] { Location.create(2680, 2587, 0), Location.create(2679, 2587, 0), Location.create(2679, 2588, 0), Location.create(2679, 2589, 0), Location.create(2679, 2590, 0), Location.create(2679, 2591, 0), Location.create(2680, 2591, 0) }); public int shieldId; public int interfacConfig; public Location spawn; public Location[] spawnLocs; Portal(int shieldId, int interfaceConfig, Location spawn, Location[] spawns) { this.shieldId = shieldId; this.interfacConfig = interfaceConfig; this.spawn = spawn; this.spawnLocs = spawns; } } public static HashMap<Integer, Boat> boatMap = new HashMap<Integer, Boat>(); static { for (Boat b : Boat.values()) { boatMap.put(b.objId, b); } } public ArrayList<PestControl> minigamesInProgress = new ArrayList<PestControl>(); public PestControl[] minigamesWaiting = new PestControl[3]; public boolean[] heights = new boolean[256]; public void init() { minigamesWaiting[0] = new PestControl(Boat.NOVICE, findAvailableHeight()); minigamesWaiting[1] = new PestControl(Boat.INTERMEDIATE, findAvailableHeight()); minigamesWaiting[2] = new PestControl(Boat.EXPERT, findAvailableHeight()); } private int findAvailableHeight() { for (int i = 0; i < heights .length; i++) { if (!heights[i]) { return i * 4; } } return 0; } @Override public void register(ClickEventManager manager) { // for (Boat b : boatMap.values()) { // manager.registerObjectListener(b.objId, this); // } // manager.registerObjectListener(14314, this); // manager.registerObjectListener(25629, this); // manager.registerObjectListener(25630, this); } public void addPlayerToBoat(Boat boat, Player player) { if (boat.lvl > player.getSkills().getCombatLevel()) { player.getActionSender().sendMessage("You need a combat level of " + boat.lvl + " to enter this boat!"); return; } minigamesWaiting[boat.ordinal()].joinPlayer(player); } @Override public boolean objectAction(Player player, int objectId, GameObject gameObject, Location location, ClickOption option) { if (boatMap.containsKey(objectId)) { getPestControlManager().addPlayerToBoat(boatMap.get(objectId), player); return true; } else switch(objectId) { case 14314: getPestControlManager().leaveBoat(Boat.NOVICE, player); return true; case 25629: getPestControlManager().leaveBoat(Boat.INTERMEDIATE, player); return true; case 25630: getPestControlManager().leaveBoat(Boat.EXPERT, player); return true; } return false; } private void leaveBoat(Boat boat, Player player) { minigamesWaiting[boat.ordinal()].quit(player); } public void removeWaiting(Boat boat, int height) { heights[height / 4] = false; minigamesWaiting[boat.ordinal()] = new PestControl(boat, findAvailableHeight()); } }
import React from "react"; import { connect } from "react-redux"; import styled from "emotion/react"; import Title from "../misc/title"; import UserInfo from "./user-info"; import { getActiveCard } from "../../selectors/cards"; const HeaderLayout = styled.header` display: flex; justify-content: space-between; align-items: center; height: 74px; background: #fff; padding: 20px 30px; box-sizing: border-box; border-bottom: 1px solid rgba(0, 0, 0, 0.06); `; const Balance = styled(Title)` margin: 0; `; const BalanceSum = styled.span` font-weight: bold; `; class Header extends React.Component { renderBalance() { const { activeCard } = this.props; if (activeCard) { let balance = activeCard.balance || 0; return ( <Balance> Баланс: <BalanceSum>{` ${Number(balance.toFixed(2))} ${ activeCard.currencySign }`}</BalanceSum> </Balance> ); } } render() { return ( <HeaderLayout> {this.renderBalance()} <Balance>Электронный кошелек</Balance> <UserInfo /> </HeaderLayout> ); } } const mapStateToProps = state => ({ activeCard: getActiveCard(state) }); export default connect(mapStateToProps)(Header);
<reponame>ParkerM/markdown-serve<gh_stars>10-100 var resolver = require('../lib/resolver'), path = require('path'); describe('resolver', function() { var rootDir = path.resolve(__dirname, 'fixture/'); it('should resolve "/"', function() { var file = resolver('/', rootDir); file.should.equal(path.resolve(rootDir, 'index.md')); }); it('should not resolve "/" when no index.md exists', function() { var file = resolver('/', path.resolve(rootDir, 'no-index')); should.not.exist(file); }); it('should resolve "/new"', function() { var file = resolver('/new', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'new.md')); }); it('should resolve "/test%20space"', function() { var file = resolver('/test%20space', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'test space.md')); }); it('should resolve "/test-space"', function() { var file = resolver('/test-space', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'test space.md')); }); it('should resolve "/test-no-yml"', function() { var file = resolver('/test-no-yml', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'test-no-yml.md')); }); it('should resolve "/sub/test"', function() { var file = resolver('/sub/test', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'sub/test.md')); }); it('should resolve "/space-in-name/test"', function() { var file = resolver('/space-in-name/test', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'space in name/test.md')); }); it('should resolve "/space-in-name/sub/more-spaces"', function() { var file = resolver('/space-in-name/sub/more-spaces', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'space in name/sub/more spaces.md')); }); it('should resolve "/space-in-name/sub/with-dash"', function() { var file = resolver('/space-in-name/sub/with-dash', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'space in name/sub/with-dash.md')); }); it('should not resolve "/space-in-name/sub/bobus-file"', function() { var file = resolver('/space-in-name/sub/bogus-file', rootDir); should.not.exist(file); }); it('should resolve "/sub/"', function() { var file = resolver('/sub/', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'sub/index.md')); }); it('should not resolve "/test/"', function() { var file = resolver('/test/', rootDir); should.not.exist(file); }); it('should resolve "/new" with file extension option', function() { var file = resolver('/new', rootDir, { fileExtension: 'markdown' }); should.exist(file); file.should.equal(path.resolve(rootDir, 'new.markdown')); }); it('should resolve "/sub/" with default page name option', function() { var file = resolver('/sub/', rootDir, { defaultPageName: 'default' }); should.exist(file); file.should.equal(path.resolve(rootDir, 'sub/default.md')); }); it('should resolve "/sub/" with all options', function() { var file = resolver('/sub/', rootDir, { defaultPageName: 'custom', fileExtension: 'foo' }); should.exist(file); file.should.equal(path.resolve(rootDir, 'sub/custom.foo')); }); it('should resolve "/test-use-extension.md" with use extension in url option', function() { var file = resolver('/test-use-extension.md', rootDir, { useExtensionInUrl: true }); should.exist(file); file.should.equal(path.resolve(rootDir, 'test-use-extension.md')); }); it('should resolve "/sub" with default page in containing folder', function() { var file = resolver('/sub', rootDir); should.exist(file); file.should.equal(path.resolve(rootDir, 'sub/index.md')); }); });
#! /bin/sh # Build and run Quarkus Docker container according to instructions in https://access.redhat.com/documentation/en-us/red_hat_build_of_quarkus/1.7/html-single/compiling_your_quarkus_applications_to_native_executables/index IMAGE_NAME=transactions # Build native executable ./mvnw package -Pnative -Dquarkus.native.container-build=true # Build the container image docker build -f src/main/docker/Dockerfile.native -t $IMAGE_NAME . # Run the container; network "host" needed to access database on localhost docker run --network="host" -i --rm -p 8080:8080 $IMAGE_NAME
def sort_list(list_to_sort): """ Sort the given list """ sorted_list = sorted(list_to_sort) return sorted_list # Test my_list = [2, 3, 5, 8, 1, 6, 4] print(sort_list(my_list))
#!/bin/sh ########################################################################## # If not stated otherwise in this file or this component's Licenses.txt # file the following copyright and licenses apply: # # Copyright 2015 RDK Management # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ########################################################################## ####################################################################### # Copyright [2014] [Cisco Systems, Inc.] # # Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ####################################################################### . /etc/device.properties echo "bring_lan.sh script is called setting bring-lan to up" > /dev/console sysevent set bring-lan up #BOX_TYPE=`cat /etc/device.properties | grep BOX_TYPE | cut -f2 -d=` BRIDGE_MODE=`syscfg get bridge_mode` if [ "$BOX_TYPE" = "XB3" ] && [ "$BRIDGE_MODE" = "0" ]; then echo "XB3 case:Router mode: Start brlan0 initialization" > /dev/console sysevent set multinet-up 1 else echo "brlan0 initialization for non-XB3 platforms and in bridge-mode is done in service_ipv4.sh" fi
#!/usr/bin/env bash echo "Installing MongoDB" echo "Adding Key Server" sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6 echo "Adding List File" echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/testing multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list echo "Updating apt packages" sudo apt-get update echo "Installing MongoDB" sudo apt-get install -y mongodb-org service mongod status