text
stringlengths
1
1.05M
#!/bin/bash find /var/www/html/download/ -mmin +59 -exec rm {} \; echo "Account" latest=$(find /var/www/html/download/*.json -printf "%T@ %p\n" | sort -nr|grep account_data|grep -v latest|head -n 2|tail -n 1|cut -d " " -f 2) latestcsv=$(echo "$latest"|sed 's/json/csv/g') latestcsvt=$(echo "$latestcsv"|sed 's/\//_/g') rm /var/www/html/download/latest_account_data.json; ln -s $latest /var/www/html/download/latest_account_data.json json2csv -i $latest > $latestcsvt mv $latestcsvt $latestcsv rm /var/www/html/download/latest_account_data.csv; ln -s $latestcsv /var/www/html/download/latest_account_data.csv zip /var/www/html/download/latest_account_data.csv.zip /var/www/html/download/latest_account_data.csv zip /var/www/html/download/latest_account_data.json.zip /var/www/html/download/latest_account_data.json echo "Escrow" latest=$(find /var/www/html/download/*.json -printf "%T@ %p\n" | sort -nr|grep escrow_data|grep -v latest|head -n 2|tail -n 1|cut -d " " -f 2) latestcsv=$(echo "$latest"|sed 's/json/csv/g') latestcsvt=$(echo "$latestcsv"|sed 's/\//_/g') rm /var/www/html/download/latest_escrow_data.json; ln -s $latest /var/www/html/download/latest_escrow_data.json json2csv -i $latest > $latestcsvt mv $latestcsvt $latestcsv rm /var/www/html/download/latest_escrow_data.csv; ln -s $latestcsv /var/www/html/download/latest_escrow_data.csv
"""The model for the MNIST variant of the multitask experiment.""" import torch import torch.nn.functional as F from torch import Tensor, nn def assert_shape(x: Tensor, shape: (int, int)): """Raises an exception if the Tensor doesn't have the given final two dimensions.""" assert tuple(x.shape[-2:]) == tuple(shape), f'Expected shape ending {shape}, got {x.shape}' class Encoder(nn.Module): def __init__(self): super().__init__() self._encoder = nn.Sequential(nn.Conv2d(1, 16, 3, stride=3, padding=1), # [batch x 16 x 10 x 10] nn.ReLU(True), nn.MaxPool2d(2, stride=2), # [batch x 16 x 5 x 5] nn.Conv2d(16, 8, 3, stride=2, padding=1), # [batch x 8 x 3 x 3] nn.ReLU(True), nn.MaxPool2d(2, stride=1)) # [batch x 8 x 2 x 2] def forward(self, x): assert_shape(x, (28, 28)) return self._encoder(x) @staticmethod def get_out_features(): return 8 class Encoder2(nn.Module): """A larger encoder.""" def __init__(self): super().__init__() self._conv1 = nn.Conv2d(1, 32, 5, stride=2, padding=4) self._conv2 = nn.Conv2d(32, 16, 3, stride=2, padding=1) self._conv3 = nn.Conv2d(16, 16, 3, stride=1, padding=1) def forward(self, x): assert_shape(x, (28, 28)) x = F.relu(self._conv1(x)) assert_shape(x, (16, 16)) x = F.max_pool2d(x, 2, stride=2) assert_shape(x, (8, 8)) x = F.relu(self._conv2(x)) assert_shape(x, (4, 4)) x = F.relu(self._conv3(x)) assert_shape(x, (4, 4)) x = F.max_pool2d(x, 2, stride=2) assert_shape(x, (2, 2)) return x @staticmethod def get_out_features(): return 16 class Encoder3(nn.Module): """A much larger encoder.""" def __init__(self): super().__init__() self._conv1 = nn.Conv2d(1, 32, 3, stride=1, padding=1) self._conv2 = nn.Conv2d(32, 32, 3, stride=1, padding=1) self._conv3 = nn.Conv2d(32, 32, 2, stride=1, padding=0) self._conv4 = nn.Conv2d(32, 16, 2, stride=1, padding=0) self._conv5 = nn.Conv2d(16, 16, 2, stride=2, padding=1) def forward(self, x): assert_shape(x, (28, 28)) x = F.relu(self._conv1(x)) assert_shape(x, (28, 28)) x = F.relu(self._conv2(x)) assert_shape(x, (28, 28)) x = F.max_pool2d(x, 2, stride=2) assert_shape(x, (14, 14)) x = F.relu(self._conv3(x)) assert_shape(x, (13, 13)) x = F.relu(self._conv4(x)) assert_shape(x, (12, 12)) x = F.max_pool2d(x, 2, stride=2) assert_shape(x, (6, 6)) x = F.relu(self._conv5(x)) assert_shape(x, (4, 4)) x = F.max_pool2d(x, 2, stride=2) assert_shape(x, (2, 2)) return x @staticmethod def get_out_features(): return 16 class EncoderFC(nn.Module): """A fully connected encoder.""" def __init__(self): super().__init__() self._layers = nn.Sequential(nn.Linear(28 * 28, 512), # nn.ReLU(), # nn.Linear(512, 256), # nn.ReLU(), # nn.Linear(256, 32), # nn.ReLU()) def forward(self, x): assert_shape(x, (28, 28)) x = x.view(-1, 28 * 28) x = self._layers(x) # Return in shape (2, 2) so as to be compatible with the other decoders. return x.view(-1, 8, 2, 2) @staticmethod def get_out_features(): return 8 class Classifier(nn.Module): def __init__(self, num_classes: int, in_features: int): super().__init__() self._fc1 = nn.Linear(in_features=2 * 2 * in_features, out_features=128) self._fc2 = nn.Linear(in_features=128, out_features=num_classes) def forward(self, x): x = x.view(x.shape[0], -1) x = F.relu(self._fc1(x)) x = self._fc2(x) return x class Classifier2(nn.Module): """A larger fully connected classifier.""" def __init__(self, num_classes: int, in_features: int): super().__init__() self._layers = nn.Sequential(nn.Linear(in_features=2 * 2 * in_features, out_features=128), # nn.ReLU(), # nn.Linear(in_features=128, out_features=128), # nn.ReLU(), # nn.Linear(in_features=128, out_features=num_classes)) def forward(self, x): x = x.view(x.shape[0], -1) x = self._layers(x) return x class Reconstructor(nn.Module): def __init__(self, in_features: int): super().__init__() self._decoder = nn.Sequential(nn.ConvTranspose2d(in_features, 16, 3, stride=2), # b, 16, 5, 5 nn.ReLU(True), # nn.ConvTranspose2d(16, 8, 5, stride=3, padding=1), # b, 8, 15, 15 nn.ReLU(True), # nn.ConvTranspose2d(8, 1, 2, stride=2, padding=1), # b, 1, 28, 28 nn.Tanh()) def forward(self, x): x = self._decoder(x) assert_shape(x, (28, 28)) return x class Reconstructor2(nn.Module): def __init__(self, in_features: int): super().__init__() self._convt1 = nn.ConvTranspose2d(in_features, 16, 2, stride=2) self._convt2 = nn.ConvTranspose2d(16, 8, 3, stride=2, padding=1) self._convt3 = nn.ConvTranspose2d(8, 4, 2, stride=2) self._convt4 = nn.ConvTranspose2d(4, 1, 2, stride=2) def forward(self, x): x = F.relu(self._convt1(x)) assert_shape(x, (4, 4)) x = F.relu(self._convt2(x)) assert_shape(x, (7, 7)) x = F.relu(self._convt3(x)) assert_shape(x, (14, 14)) x = F.relu(self._convt4(x)) assert_shape(x, (28, 28)) return torch.tanh(x) class ReconstructorFC(nn.Module): """A fully connected reconstruction model.""" def __init__(self, in_features: int): super().__init__() self._in_features = in_features self._layers = nn.Sequential(nn.Linear(in_features * 2 * 2, 1024), # nn.ReLU(), # nn.Linear(1024, 800), # nn.ReLU(), # nn.Linear(800, 28 * 28), # nn.Tanh()) def forward(self, x): x = x.view(-1, self._in_features * 2 * 2) x = self._layers(x) return x.view(-1, 28, 28) _models = [(Encoder, Classifier, Reconstructor), # None, # Model 2 is no longer implemented. (Encoder2, Classifier, Reconstructor), # (Encoder3, Classifier, Reconstructor2), # None, # 4 is no longer implemented, see commit da9d7c9. (EncoderFC, Classifier, ReconstructorFC), # (Encoder3, Classifier2, Reconstructor2)] class MultitaskMnistModel(nn.Module): def __init__(self, initial_ses: [float], model_version: int): super().__init__() encoder_con, classifier_con, reconstructor_con = _models[model_version] self._encoder = encoder_con() self._classifier1 = classifier_con(num_classes=3, in_features=self._encoder.get_out_features()) self._classifier2 = classifier_con(num_classes=10, in_features=self._encoder.get_out_features()) self._reconstructor = reconstructor_con(in_features=self._encoder.get_out_features()) assert len(initial_ses) == 3 self._weight1 = nn.Parameter(torch.tensor([initial_ses[0]])) self._weight2 = nn.Parameter(torch.tensor([initial_ses[1]])) self._weight3 = nn.Parameter(torch.tensor([initial_ses[2]])) def forward(self, x): assert_shape(x, (28, 28)) x = self._encoder(x) x1 = self._classifier1(x) x2 = self._classifier2(x) x3 = self._reconstructor(x) return x1, x2, x3 def get_loss_weights(self) -> (nn.Parameter, nn.Parameter, nn.Parameter): """Returns the loss weight parameters (s in the paper).""" return self._weight1, self._weight2, self._weight3
<filename>com.ensoftcorp.open.dynadoc.core/src/com/ensoftcorp/open/dynadoc/core/wrapper/ClassCommitsWrapper.java package com.ensoftcorp.open.dynadoc.core.wrapper; import java.util.List; import com.ensoftcorp.open.dynadoc.core.data.Commit; import com.ensoftcorp.open.dynadoc.core.data.JavaClass; import com.hp.gagawa.java.elements.A; import com.hp.gagawa.java.elements.Div; import com.hp.gagawa.java.elements.Table; import com.hp.gagawa.java.elements.Tbody; import com.hp.gagawa.java.elements.Td; import com.hp.gagawa.java.elements.Tfoot; import com.hp.gagawa.java.elements.Th; import com.hp.gagawa.java.elements.Thead; import com.hp.gagawa.java.elements.Tr; public class ClassCommitsWrapper { private static final String COMMITS_TABLE_JAVASCRIPT_FILE_NAME = "jquery-commits-table-script.js"; private static final String COMMIT_SECTION_HEADER = "Revision Control Summary"; private static final String COMMITS_TABLE_NAME = "commits-table"; private static final String [] COMMITS_TABLE_HEADERS = { "Commit Id", "Commiter", "Date/Time", "Summary", "Related Issues", "View Commit" }; private List<Commit> commits; public ClassCommitsWrapper(JavaClass javaClass) { this.commits = javaClass.getCommits(); } private List<Commit> getCommits() { return this.commits; } public Div wrap() { Div tableDiv = new Div(); tableDiv.setCSSClass("card text-white bg-warning mb-3"); tableDiv.setStyle("max-width: 98%; margin: 10pt"); Div cardHeader = new Div(); cardHeader.setCSSClass("card-header"); cardHeader.appendText(COMMIT_SECTION_HEADER); Div cardContent = new Div(); cardContent.setCSSClass("card-body bg-white text-dark"); Table table = new Table(); table.setId(COMMITS_TABLE_NAME); table.setCSSClass("display small"); table.setStyle("width:100%"); Thead tHead = new Thead(); Tr tr = new Tr(); Th firstColumn = new Th(); tr.appendChild(firstColumn); for(String headerText: COMMITS_TABLE_HEADERS) { Th column = new Th(); column.appendText(headerText); tr.appendChild(column); } tHead.appendChild(tr); table.appendChild(tHead); Tbody tBody = new Tbody(); List<Commit> commits = this.getCommits(); for(Commit commit: commits) { Tr commitRow = this.wrapCommit(commit); tBody.appendChild(commitRow); } table.appendChild(tBody); Tfoot tFoot = new Tfoot(); tr = new Tr(); firstColumn = new Th(); tr.appendChild(firstColumn); for(int i = 0; i < COMMITS_TABLE_HEADERS.length; i++) { Th column = new Th(); tr.appendChild(column); } tFoot.appendChild(tr); table.appendChild(tFoot); cardContent.appendChild(table); cardHeader.appendChild(cardContent); tableDiv.appendChild(cardHeader); return tableDiv; } private Tr wrapCommit(Commit commit) { Tr row = new Tr(); Td showHideColumn = this.wrapShowHideIcon(); row.appendChild(showHideColumn); Td idColumn = this.wrapId(commit); row.appendChild(idColumn); Td commiterColumn = this.wrapCommiter(commit); row.appendChild(commiterColumn); Td dateTimeColumn = this.wrapDateTime(commit); row.appendChild(dateTimeColumn); Td summaryColumn = this.wrapSummary(commit); row.appendChild(summaryColumn); Td issuesColumn = this.wrapIssues(commit); row.appendChild(issuesColumn); Td urlColumn = this.wrapUrl(commit); row.appendChild(urlColumn); return row; } private Td wrapShowHideIcon() { Td td = new Td(); td.setCSSClass("details-control"); return td; } private Td wrapId(Commit commit) { Td td = new Td(); td.appendText(commit.id()); return td; } private Td wrapCommiter(Commit commit) { Td td = new Td(); td.appendText(commit.commiter()); return td; } private Td wrapDateTime(Commit commit) { Td td = new Td(); td.appendText(commit.dateTime()); return td; } private Td wrapSummary(Commit commit) { Td td = new Td(); td.appendText(commit.summary()); return td; } private Td wrapIssues(Commit commit) { Td td = new Td(); td.appendText(commit.associatedIssuesString()); return td; } private Td wrapUrl(Commit commit) { Td td = new Td(); A link = new A(); link.setHref(commit.url()); link.setTarget("_blank"); link.setAttribute("role", "button"); link.setAttribute("class", "btn btn-primary"); link.appendText("Show"); td.appendChild(link); return td; } }
#!/bin/bash nvidia_version=`cat /proc/driver/nvidia/version |grep 'NVRM version:'| grep -oE "Kernel Module\s+[0-9.]+"| awk {'print $3'}` nvidia_major_version=`echo $nvidia_version |sed "s/\..*//"` driver_filename="NVIDIA-Linux-x86_64-$nvidia_version.run" driver_url="http://us.download.nvidia.com/XFree86/Linux-x86_64/$nvidia_version/$driver_filename" tesla_driver_url="http://us.download.nvidia.com/tesla/$nvidia_version/$driver_filename" s3_driver_url="http://ai2-vision-nvidia.s3-us-west-2.amazonaws.com/$driver_filename" wget $driver_url -P /tmp/ || wget $tesla_driver_url -P /tmp/ || wget $s3_driver_url -P /tmp/ if [ $? -eq 0 ]; then sh /tmp/$driver_filename -s --no-kernel-module else echo "Error trying to install nvidia driver for $nvidia_version" exit 1 fi;
module CamaleonCms::Frontend::NavMenuHelper # draw nav menu as html list # key: slug for nav menu # to register this, go to admin -> appearance -> menus # (DEPRECATED) def get_nav_menu(key = 'main_menu', class_name = "navigation") draw_menu({menu_slug: key, container_class: class_name}) end # draw menu as an html # default configurations is for bootstrap support def draw_menu(args = {}) args_def = { menu_slug: 'main_menu', #slug for the menu container: 'ul', #type of container for the menu container_id: '', #container id for the menu container_class: 'nav navbar-nav nav-menu', #container class for the menu item_container: 'li', #type of container for the items item_current: 'current-menu-item', #class for current menu item item_class: 'menu-item', # class for all menu items item_class_parent:"dropdown", # class for all menu items that contain sub items sub_container: 'ul', #type of container for sub items sub_class: 'dropdown-menu', # class for sub container callback_item: lambda{|args| }, # callback executed for each item (args = { menu_item, link, level, settings, has_children, link_attrs = "", index}). # menu_item: (Object) Menu object # link: (Hash) link data: {link: '', name: ''} # level: (Integer) current level # has_children: (boolean) if this item contain sub menus # settings: (Hash) menu settings # index: (Integer) Index Position of this menu # link_attrs: (String) Here you can add your custom attrs for current link, sample: id='my_id' data-title='#{args[:link][:name]}' # item_container_attrs: (String) Here you can add your custom attrs for link container. # In settings you can change the values for this item, like after, before, ..: # sample: lambda{|args| args[:settings][:after] = "<span class='caret'></span>" if args[:has_children]; args[:link_attrs] = "id='#{menu_item.id}'"; } # sample: lambda{|args| args[:settings][:before] = "<i class='fa fa-home'></i>" if args[:level] == 0 && args[:index] == 0; } before: '', # content before link text after: '', # content after link text link_current: 'current-link', # class for current menu link link_before: '', # content before link link_after: '', # content after link link_class: 'menu_link', # class for all menu links link_class_parent:"dropdown-toggle", # class for all menu links that contain sub items levels: -1, # max of levels to recover, -1 => return all levels container_prepend: '', # content prepend for menu container container_append: '' # content append for menu container } args = args_def.merge(args) nav_menu = current_site.nav_menus.find_by_slug(args[:menu_slug]) nav_menu = current_site.nav_menus.first unless nav_menu.present? html = "<#{args[:container]} class='#{args[:container_class]}' id='#{args[:container_id]}'>#{args[:container_prepend]}{__}#{args[:container_append]}</#{args[:container]}>" if nav_menu.present? html = html.sub("{__}", cama_menu_draw_items(args, nav_menu.children.reorder(:term_order))) else html = html.sub("{__}", "") end html end # draw menu items def cama_menu_draw_items(args, nav_menu, level = 0) html = "" parent_current = false index = 0 nav_menu.eager_load(:metas).each do |nav_menu_item| _args = args.dup data_nav_item = cama_parse_menu_item(nav_menu_item) next if data_nav_item == false _is_current = data_nav_item[:current] || site_current_path == data_nav_item[:link] || site_current_path == data_nav_item[:link].sub(".html", "") has_children = nav_menu_item.have_children? && (args[:levels] == -1 || (args[:levels] != -1 && level <= args[:levels])) r = { menu_item: nav_menu_item.decorate, link: data_nav_item, level: level, settings: _args, has_children: has_children, link_attrs: '', item_container_attrs: '', index: index } args[:callback_item].call(r) _args = r[:settings] if has_children html_children, current_children = cama_menu_draw_items(args, nav_menu_item.children.reorder(:term_order), level + 1) else html_children, current_children = "", false end parent_current = true if _is_current || current_children html += "<#{_args[:item_container]} #{r[:item_container_attrs]} class='#{_args[:item_class]} #{_args[:item_class_parent] if has_children} #{"#{_args[:item_current]}" if _is_current} #{"current-menu-ancestor" if current_children }'>#{_args[:link_before]} <a #{r[:link_attrs]} #{" target='#{nav_menu_item.target}'" if nav_menu_item.target.present?} href='#{data_nav_item[:link]}' class='#{args[:link_current] if _is_current} #{_args[:link_class_parent] if has_children} #{_args[:link_class]}' #{"data-toggle='dropdown'" if has_children } >#{_args[:before]}#{data_nav_item[:name]}#{_args[:after]}</a> #{_args[:link_after]} #{ html_children } </#{_args[:item_container]}>" index += 1 end if level == 0 html else html = "<#{args[:sub_container]} class='#{args[:sub_class]} #{"parent-#{args[:item_current]}" if parent_current} level-#{level}'>#{html}</#{args[:sub_container]}>" [html, parent_current] end end # filter and parse all menu items visible for current user and adding the flag for current_parent or current_item # max_levels: max levels to iterate # return an multidimensional array with all items until level 'max_levels' # internal_level: ingnore (managed by internal recursion) def cama_menu_parse_items(items, max_levels=-1, internal_level=0) res, is_current_parent, levels = [], false, [0] items.reorder(:term_order).each_with_index do |nav_menu_item, index| data_nav_item = cama_parse_menu_item(nav_menu_item) next if data_nav_item == false _is_current = data_nav_item[:current] || site_current_path == data_nav_item[:link] || site_current_path == data_nav_item[:link].sub(".html", "") has_children = nav_menu_item.have_children? has_children = false if max_levels > 0 && max_levels == internal_level data_nav_item[:label] = data_nav_item[:name] data_nav_item[:url] = data_nav_item[:link] r = { menu_item: nav_menu_item.decorate, level: internal_level, has_children: has_children, index: index, current_item: _is_current, current_parent: false, levels: 0 }.merge(data_nav_item.except(:current, :name, :link)) if has_children r[:children], _is_current_parent, r[:levels] = cama_menu_parse_items(nav_menu_item.children, max_levels, internal_level + 1) if _is_current_parent is_current_parent = true r[:current_parent] = true end r[:levels] = r[:levels] + 1 end is_current_parent = true if r[:current_item] levels << r[:levels] res << r end if internal_level == 0 res else [res, is_current_parent, levels.max] end end #******************* BREADCRUMBS ******************* # draw the breadcrumb as html list def breadcrumb_draw res = [] @_front_breadcrumb.each_with_index do |item, index| if @_front_breadcrumb.size == (index+1) #last menu res << "<li class='active'>#{item[0]}</li>" else res << "<li><a href='#{item[1]}'>#{item[0]}</a></li>" end end res.join("") end # add breadcrumb item at the end # label => label of the link # url: url for the link def breadcrumb_add(label, url = "", prepend = false) @_front_breadcrumb ||= [] if prepend @_front_breadcrumb = @_front_breadcrumb.unshift([label, url]) else @_front_breadcrumb << [label, url] end end def cama_parse_menu_item(nav_menu_item, is_from_backend = false) type_menu, result = nav_menu_item.kind, false begin case type_menu when 'post' post = CamaleonCms::Post.find(nav_menu_item.url).decorate if is_from_backend || post.can_visit? result = {link: post.the_url(as_path: true), name: post.the_title, type_menu: type_menu, url_edit: post.the_edit_url} result[:current] = @cama_visited_post.present? && @cama_visited_post.id == post.id unless is_from_backend end when 'category' category = CamaleonCms::Category.find(nav_menu_item.url).decorate result = {link: category.the_url(as_path: true), name: category.the_title, url_edit: category.the_edit_url} result[:current] = @cama_visited_category.present? && @cama_visited_category.id == category.id unless is_from_backend when 'post_tag' post_tag = CamaleonCms::PostTag.find(nav_menu_item.url).decorate result = {link: post_tag.the_url(as_path: true), name: post_tag.the_title, url_edit: post_tag.the_edit_url} result[:current] = @cama_visited_tag.present? && @cama_visited_tag.id == post_tag.id unless is_from_backend when 'post_type' post_type = CamaleonCms::PostType.find(nav_menu_item.url).decorate result = {link: post_type.the_url(as_path: true), name: post_type.the_title, url_edit: post_type.the_edit_url} result[:current] = @cama_visited_post_type.present? && @cama_visited_post_type.id == post_type.id unless is_from_backend when 'external' result = {link: nav_menu_item.url.to_s.translate, name: nav_menu_item.name.to_s.translate, current: false} # permit to customize or mark as current menu # _args: (HASH) {menu_item: Model Menu Item, parsed_menu: Parsed Menu } # Sample parsed_menu: {link: "url of the link", name: "Text of the menu", current: Boolean (true => is current menu, false => not current menu item)} unless is_from_backend result[:link] = cama_root_path if result[:link] == "root_url" result[:link] = site_current_path if site_current_path == "#{current_site.the_path}#{result[:link]}" result[:current] = result[:link] == site_current_url || result[:link] == site_current_path _args = {menu_item: nav_menu_item, parsed_menu: result}; hooks_run("on_external_menu", _args) result = _args[:parsed_menu] end else # permit to build custom menu items registered as Custom Menu by hook "nav_menu_custom" # sample: def my_parse_custom_menu_item_listener(args); # if args[:menu_item].kind == 'MyModelClass' # my_model = MyModelClass.find(args[:menu_item].url) # res = {name: my_model.name, url_edit: my_model_edit_url(id: my_model.id), link: my_model_public_url(id: my_model.id)} # res[:current] = site_current_path == my_model_public_url(id: my_model.id) unless args[:is_from_backend] # args[:parsed_menu] = res # end # end hook_args={menu_item: nav_menu_item, parsed_menu: false, is_from_backend: is_from_backend}; hooks_run('parse_custom_menu_item', hook_args) result = hook_args[:parsed_menu] end rescue => e Rails.logger.error "Camaleon CMS - Menu Item Not Found => Skipped menu for: #{e.message} (#{nav_menu_item.inspect})".cama_log_style(:red) end # permit to customize data, like: current, title, ... of parsed menu item or skip menu item by assigning false into :parsed_menu unless is_from_backend _args = {menu_item: nav_menu_item, parsed_menu: result}; hooks_run("on_render_front_menu_item", _args) _args[:parsed_menu] else result end end end
#!/bin/zsh cd /tmp NAME="$1" EXPR="$2" doc=$(cat <<EOF \\documentclass[preview]{standalone} \\\usepackage{mathtools} \\\begin{document} $ ${EXPR} $ \\\end{document} EOF ) echo $doc | pdflatex 1> /dev/null convert -density 800 texput.pdf -quality 100 $HOME/equations/$NAME.png print "Outputted to: $HOME/equations/$NAME.png" rm texput.{pdf,log,aux}
#!/usr/bin/env bash # # Configure environment for a particular configuration for whitebox testing. To # use this outside of nightly testing, set these two variables in the # environment: # # Variable Values # ------------------------------------------------------ # COMPILER cray, intel, pgi, gnu # COMP_TYPE TARGET, HOST-TARGET, HOST-TARGET-no-PrgEnv # # Optionally, the platform can be set with: # # CRAY_PLATFORM_FROM_JENKINS # # The default is cray-xc. cray-xe is the other valid option. CWD=$(cd $(dirname ${BASH_SOURCE[0]}) ; pwd) source $CWD/functions.bash # Ensure module commands are available. local_bashrc=/etc/bash.bashrc.local if [ -z "$(type module 2> /dev/null)" -a -f $local_bashrc ] ; then log_info "module command not available. Attempting to source ${local_bashrc}" source $local_bashrc if [ -z "$(type module 2> /dev/null)" ] ; then log_error "Could not access module command after sourceing local bashrc (${local_bashrc}). Exiting." exit 1 fi elif [ -z "$(type module 2> /dev/null)" ] ; then log_error "module command not available and local bashrc (${local_bashrc}) does not exist. Exiting." exit 2 fi # Variable set by Jenkins to indicate type of whitebox. If it is not set, assume cray-xc. platform=${CRAY_PLATFORM_FROM_JENKINS:-cray-xc} log_info "Using platform: ${platform}" short_platform=$(echo "${platform}" | cut -d- -f2) log_info "Short platform: ${short_platform}" # Setup vars that will help load the correct compiler module. case $COMP_TYPE in TARGET) module_name=PrgEnv-${COMPILER} chpl_host_value="" export CHPL_TARGET_PLATFORM=$platform log_info "Set CHPL_TARGET_PLATFORM to: ${CHPL_TARGET_PLATFORM}" export CHPL_NIGHTLY_TEST_CONFIG_NAME="${short_platform}-wb.prgenv-${COMPILER}" ;; HOST-TARGET) module_name=PrgEnv-${COMPILER} chpl_host_value=cray-prgenv-${COMPILER} export CHPL_HOST_PLATFORM=$platform export CHPL_TARGET_PLATFORM=$platform log_info "Set CHPL_HOST_PLATFORM to: ${CHPL_HOST_PLATFORM}" log_info "Set CHPL_TARGET_PLATFORM to: ${CHPL_TARGET_PLATFORM}" export CHPL_NIGHTLY_TEST_CONFIG_NAME="${short_platform}-wb.host.prgenv-${COMPILER}" ;; HOST-TARGET-no-PrgEnv) the_cc=${COMPILER} if [ "${COMPILER}" = "gnu" ] ; then the_cc=gcc fi module_name=${the_cc} chpl_host_value=${COMPILER} export CHPL_NIGHTLY_TEST_CONFIG_NAME="${short_platform}-wb.${COMPILER}" ;; *) log_error "Unknown COMP_TYPE value: ${COMP_TYPE}. Exiting." exit 3 ;; esac # load compiler versions from $CHPL_INTERNAL_REPO/build/compiler_versions.bash # This should define load_target_compiler function and CHPL_GCC_TARGET_VERSION. # The module uses the gen compiler to build the compiler and runtime, and the # target version to test. For whitebox testing we use the target compiler for # everything because there's no easy way to split up what we build with vs test # with. We decided to always use the target compiler to get more exposure # building with newer compilers. source $CHPL_INTERNAL_REPO/build/compiler_versions.bash # Always load the right version of GCC since we use it sometimes # to e.g. build the Chapel compiler with COMP_TYPE=TARGET if [ "${COMPILER}" != "gnu" ] ; then ### TEMPORARY # Restore the following line when we can. # module load gcc/${CHPL_GCC_TARGET_VERSION} # For now, we need to force it to gcc 7.3.0 so its libraries will # link with earlier versions of the Intel compiler. module load gcc/7.3.0 fi # quiet libu warning about cpuid detection failure if [ "${COMPILER}" == "cray" ] ; then export RFE_811452_DISABLE=true fi # Then load the selected compiler load_target_compiler ${COMPILER} # Do minor fixups case $COMPILER in cray|intel|gnu) # swap out network modules to get "host-only" environment log_info "Swap network module for host-only environment." module unload $(module list -t 2>&1 | grep craype-network) module load craype-network-none ;; pgi) # EJR (04/07/16): Since the default pgi was upgraded from 15.10.0 to # 16.3.0 on 04/02/16 the speculative gmp build gets stuck in an # infinite loop during `make check` while trying to test t_scan.c. Just # force disable gmp until there's more time to investigate this. export CHPL_GMP=none ;; *) log_error "Unknown COMPILER value: ${COMPILER}. Exiting." exit 4 ;; esac if [ "${HOSTNAME:0:6}" = "esxbld" ] ; then libsci_module=$(module list -t 2>&1 | grep libsci) if [ -n "${libsci_module}" ] ; then log_info "Unloading cray-libsci module: ${libsci_module}" module unload $libsci_module fi fi export CHPL_HOME=$(cd $CWD/../.. ; pwd) # Set CHPL_HOST_COMPILER. if [ -n "${chpl_host_value}" ] ; then export CHPL_HOST_COMPILER="${chpl_host_value}" log_info "Set CHPL_HOST_COMPILER to: ${CHPL_HOST_COMPILER}" fi # Disable launchers, comm. export CHPL_LAUNCHER=none export CHPL_COMM=none # Set some vars that nightly cares about. export CHPL_NIGHTLY_LOGDIR=${CHPL_NIGHTLY_LOGDIR:-/data/sea/chapel/Nightly} export CHPL_NIGHTLY_CRON_LOGDIR="$CHPL_NIGHTLY_LOGDIR" # Ensure that one of the CPU modules is loaded. my_arch=$($CHPL_HOME/util/chplenv/chpl_cpu.py 2> /dev/null) if [ "${my_arch}" = "none" ] ; then log_info "Loading craype-shanghai module to stifle chpl_cpu.py warnings." module load craype-shanghai fi # no cpu targeting module supports the esxbld CPUs, so force x86-64 if [ "${HOSTNAME:0:6}" = "esxbld" ] ; then module unload $(module list -t 2>&1| grep craype-| grep -v craype-network |grep -v craype-target) log_info "Setting CRAY_CPU_TARGET to x86-64 to stifle chpl_cpu.py warnings." export CRAY_CPU_TARGET=x86-64 fi if [ "${COMP_TYPE}" != "HOST-TARGET-no-PrgEnv" ] ; then log_info "Loading fftw module." module load fftw fi log_info "Current loaded modules:" module list log_info "Chapel environment:" $CHPL_HOME/util/printchplenv --all --no-tidy
<reponame>drkstr101/wa export * from './lib/message'; export * from './lib/message-context'; export * from './lib/message-list'; export * from './lib/use-message';
def findMax(arr): if len(arr) == 0: return None max = arr[0] for i in range(1, len(arr)): if arr[i] > max: max = arr[i] return max arr = [1, 9, 4, 6, 8, 2] print(findMax(arr))
import sqlite3 def processInput(name): conn = sqlite3.connect('database.db') cursor = conn.cursor() query = "SELECT * FROM Students WHERE name = ?;" cursor.execute(query, (name,)) results = cursor.fetchall() conn.close() return results
from flask import Flask, render_template, redirect, url_for, request import sqlite3 app = Flask(__name__) # Create database conn = sqlite3.connect('example.db', check_same_thread=False) cursor = conn.cursor() # Create table cursor.execute("""CREATE TABLE IF NOT EXISTS customers (id INTEGER PRIMARY KEY, name text, age integer, email text, phone integer, address text) """) # Add customer @app.route("/add_customer", methods=["POST", "GET"]) def add_customer(): if request.method == "POST": details = request.form name = details['name'] age = details['age'] email = details['email'] phone = details['phone'] address = details['address'] cursor.execute("INSERT INTO customers (name, age, email, phone, address) VALUES (?, ?, ?, ?, ?)", (name, age, email, phone, address)) conn.commit() msg = "Customer added successfully" return render_template("add_customers.html", msg=msg) # View customers @app.route("/view_customers") def view_customers(): cursor.execute("SELECT * FROM customers") customers = cursor.fetchall() return render_template("view_customers.html", customers=customers) # Edit customer @app.route("/edit_customer/<id>") def edit_customer(id): cursor.execute("SELECT * FROM customers WHERE id = ?", (id,)) customer = cursor.fetchall() return render_template("edit_customer.html", customer=customer[0]) # Update customer @app.route("/update_customer/<id>", methods=["POST", "GET"]) def update_customer(id): if request.method == "POST": details = request.form name = details['name'] age = details['age'] email = details['email'] phone = details['phone'] address = details['address'] cursor.execute( "UPDATE customers SET name = ?, age = ?, email = ?, phone = ?, address = ? WHERE id = ?", (name, age, email, phone, address, id) ) conn.commit() return redirect(url_for('view_customers')) # Delete customer @app.route("/delete_customer/<id>") def delete_customer(id): cursor.execute("DELETE FROM customers WHERE id = ?", (id,)) conn.commit() return redirect(url_for('view_customers')) # Run server if __name__ == "__main__": app.run()
<reponame>SplashSync/PyCore # -*- coding: utf-8 -*- # # This file is part of SplashSync Project. # # Copyright (C) 2015-2020 Splash Sync <www.splashsync.com> # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # For the full copyright and license information, please view the LICENSE # file that was distributed with this source code. # from collections import Iterable class Logger: """Splash Logger: Collect & Return Remote Logs""" def __init__(self, debug=False): """Init Splash Logger""" self.debug = debug self.prefix = "Splash Py Client" self.msg = [] self.war = [] self.err = [] self.deb = [] def set_debug(self, debug): """ Setup Debug Mode :return self """ self.debug = debug return self def set_prefix(self, prefix): """ Set Logs Messages Prefix :return self """ self.prefix = prefix return self def info(self, text ): """Add an Info / Success Message to Log""" self.__add("msg", text) return True def warn(self, text): """Add an Warning Message to Log""" self.__add("war", text) def error(self, text): """Add an Error Message to Log""" self.__add("err", text) return False def fromException(self, exception, trace=True): """Add an Exception to Log""" import traceback # Detect Error Main Message if hasattr(exception, "message"): self.__add("err", exception.message) else: self.__add("err", exception) # Detect Error Trace if trace: self.__add("err", "".join(traceback.TracebackException.from_exception(exception).format())) return False def dump(self, data, name=None): """Dump Data and Add to Logs""" from json import dump from io import StringIO if name is None: prefix = "Dump " + str(type(data)) + " : " else: prefix = name + " " + str(type(data)) + " : " if isinstance(data, bool): return self.__add("war", prefix + str(data)) if isinstance(data, int) or isinstance(data, float) or isinstance(data, str): return self.__add("war", prefix + str(data)) try: buffer = StringIO() dump(data, buffer) return self.__add("war", prefix + buffer.getvalue()) except Exception as exception: return self.__add("war", exception) def vvv( self, text ): if self.debug is True: self.__add("deb", text) return True def clear(self): """ Clear Log Queue :return: self """ self.msg = [] self.war = [] self.err = [] self.deb = [] return self def __add( self, msg_type, text ): message = "[" + self.prefix + "] " + text.__str__() getattr(self, msg_type).append(message) def to_logging(self): """ Push All Messages to Logging :return self """ import logging # Force Logger Level to Show All Messages level = logging.getLogger().level logging.getLogger().setLevel(logging.DEBUG) # Push All Messages to Logger for msg in self.msg: logging.info(msg) # Push All Warnings to Logger for war in self.war: logging.warning(war) # Push All Errors to Logger for err in self.err: logging.error(err) # Push All Debug to Logger for deb in self.deb: logging.debug(deb) # Restore Logger Level logging.getLogger().setLevel(level) return self def export( self ): """Export All Messages for Messages Packing""" # Init Response logs = { "msg": {}, "war": {}, "err": {}, "deb": {} } # Add All Messages for index, text in enumerate(self.msg): logs['msg']['msg-'+index.__str__()] = text # Add All Warnings for index, text in enumerate(self.war): logs['war']['war-'+index.__str__()] = text # Add All Errors for index, text in enumerate(self.err): logs['err']['err-'+index.__str__()] = text # Add All Debug for index, text in enumerate(self.deb): logs['deb']['deb-'+index.__str__()] = text return logs def append(self, raw_logs): """Import All Messages from Splash Message""" # Import All Messages if 'msg' in raw_logs and isinstance(raw_logs['msg'], Iterable): for message in raw_logs['msg']: getattr(self, "msg").append(raw_logs['msg'][message]) # Import All Warnings if 'war' in raw_logs and isinstance(raw_logs['war'], Iterable): for message in raw_logs['war']: getattr(self, "war").append(raw_logs['war'][message]) # Import All Errors if 'err' in raw_logs and isinstance(raw_logs['err'], Iterable): for message in raw_logs['err']: getattr(self, "err").append(raw_logs['err'][message]) # Import All Debug if 'deb' in raw_logs and isinstance(raw_logs['deb'], Iterable): for message in raw_logs['deb']: getattr(self, "deb").append(raw_logs['deb'][message]) def on_fault(self, soap_fault): """Import Error Message from Soap Fault""" getattr(self, "err").append(soap_fault.faultstring) def to_html(self, clean=False): """ Return All WebServer current Log WebServer in Html format :param clean: bool :return: str """ from splashpy.helpers import HtmlLogExporter raw_html = HtmlLogExporter.to_html(self.export()) if clean: self.clear() return raw_html def to_html_list(self, clean=False): """ Return All WebServer current Log WebServer in Html Checklist format :param clean: bool :return: str """ from splashpy.helpers import HtmlLogExporter raw_html = HtmlLogExporter.to_html_list(self.export()) if clean: self.clear() return raw_html if __name__ == "__main__": # import logging logger = Logger() logger.info("Test Info Message") logger.error("Test Error Message") logger.warn("Test Warning Message") logger.vvv("Test Debug Message") # print logger.msg logger.to_logging() print (logger.export())
public class ThickMatrix { private String identifier; private double length; private PhaseMatrix m_matGen; public ThickMatrix(String strId, double dblLen, PhaseMatrix matPhiSub) { this.identifier = strId; this.length = dblLen; this.m_matGen = matPhiSub; } // Add any additional methods for matrix manipulation here // For example, matrix multiplication, addition, etc. public PhaseMatrix getMatrix() { return m_matGen; } // Example of matrix multiplication method public PhaseMatrix multiply(PhaseMatrix otherMatrix) { // Perform matrix multiplication and return the result } // Other methods for matrix operations can be added as needed }
<gh_stars>1-10 # frozen_string_literal: true require_relative 'foreign_key_add' module DeclareSchema module SchemaChange class ForeignKeyRemove < ForeignKeyAdd alias index_add_up_command up_command alias index_add_down_command down_command def up_command index_add_down_command end def down_command index_add_up_command end end end end
package com.boot.feign.article; import com.boot.data.CommonResult; import com.boot.pojo.Article; import org.springframework.cloud.openfeign.FeignClient; import org.springframework.stereotype.Component; import org.springframework.web.bind.annotation.*; import java.util.List; import java.util.Map; /** * ???????????????? * 实测:如果feign接口写了fallback方法,即使在fallback方法里面抛出Runtime异常也不能让全局事务回滚======== * ????????????????????????????????? * 所以,暂时把fallback方法取消 */ @Component //@FeignClient(value = "cloud-yblog-article",fallback = ArticleFeignImpl.class) @FeignClient(value = "cloud-yblog-article") public interface ArticleFeign { @ResponseBody @PostMapping(path = "/feign/article/publishArticle") public String publishArticle(@RequestBody Article article); @ResponseBody @PostMapping(path = "/feign/article/changeArticle") public String changeArticle(@RequestBody Article article); @ResponseBody @GetMapping(path = "/feign/article/updateHits") public String updateHits(@RequestParam("id") long id); @ResponseBody @GetMapping(path = "/feign/article/updateAllowCommentTo1") public String updateAllowCommentTo_1(@RequestParam("id") long id); @ResponseBody @GetMapping(path = "/feign/article/updateAllowCommentTo0") public String updateAllowCommentTo_0(@RequestParam("id") long id); @ResponseBody @GetMapping(path = "/feign/article/updateRecommendTo1") public String updateRecommendTo_1(@RequestParam("id") long id); @ResponseBody @GetMapping(path = "/feign/article/updateRecommendTo0") public String updateRecommendTo_0(@RequestParam("id") long id); @ResponseBody @GetMapping(path = "/feign/article/deleteArticle") public String deleteArticle(@RequestParam("articleid") long articleid); }
#if defined(TEMPEST_BUILD_DIRECTX12) #include "dxbuffer.h" #include "dxdevice.h" #include <cassert> #include "gapi/graphicsmemutils.h" using namespace Tempest; using namespace Tempest::Detail; DxBuffer::DxBuffer(DxDevice* dev, ComPtr<ID3D12Resource>&& b, UINT sizeInBytes) :dev(dev), impl(std::move(b)), sizeInBytes(sizeInBytes) { } DxBuffer::DxBuffer(Tempest::Detail::DxBuffer&& other) :dev(other.dev), impl(std::move(other.impl)),sizeInBytes(other.sizeInBytes) { other.sizeInBytes=0; } void DxBuffer::update(const void* data, size_t off, size_t count, size_t size, size_t alignedSz) { auto& dx = *dev; D3D12_HEAP_PROPERTIES prop = {}; ID3D12Resource& ret = *impl; ret.GetHeapProperties(&prop,nullptr); if(prop.Type==D3D12_HEAP_TYPE_UPLOAD) { dx.dataMgr().waitFor(this); // write-after-write case D3D12_RANGE rgn = {off*alignedSz,count*alignedSz}; void* mapped = nullptr; dxAssert(ret.Map(0,&rgn,&mapped)); mapped = reinterpret_cast<uint8_t*>(mapped)+off; copyUpsample(data,mapped,count,size,alignedSz); ret.Unmap(0,&rgn); return; } auto stage = dx.dataMgr().allocStagingMemory(data,count,size,alignedSz,MemUsage::TransferSrc,BufferHeap::Upload); Detail::DSharedPtr<Buffer*> pstage(new Detail::DxBuffer(std::move(stage))); Detail::DSharedPtr<Buffer*> pbuf (this); auto cmd = dx.dataMgr().get(); cmd->begin(); cmd->hold(pbuf); // NOTE: DxBuffer may be deleted, before copy is finished cmd->hold(pstage); cmd->copy(*this, off*alignedSz, *pstage.handler,0, count*alignedSz); cmd->end(); dx.dataMgr().waitFor(this); // write-after-write case dx.dataMgr().submit(std::move(cmd)); } void DxBuffer::read(void* data, size_t off, size_t size) { auto& dx = *dev; D3D12_HEAP_PROPERTIES prop = {}; ID3D12Resource& ret = *impl; ret.GetHeapProperties(&prop,nullptr); if(prop.Type==D3D12_HEAP_TYPE_READBACK) { dx.dataMgr().waitFor(this); // write-after-write case D3D12_RANGE rgn = {off,size}; void* mapped=nullptr; dxAssert(ret.Map(0,&rgn,&mapped)); mapped = reinterpret_cast<uint8_t*>(mapped)+off; std::memcpy(data,mapped,size); ret.Unmap(0,nullptr); return; } auto stage = dx.dataMgr().allocStagingMemory(nullptr,size,1,1,MemUsage::TransferDst,BufferHeap::Readback); auto cmd = dx.dataMgr().get(); cmd->begin(); cmd->copy(stage,0, *this,off,size); cmd->end(); dx.dataMgr().waitFor(this); dx.dataMgr().submitAndWait(std::move(cmd)); D3D12_RANGE rgn = {0,size}; void* mapped=nullptr; dxAssert(stage.impl->Map(0,&rgn,&mapped)); mapped = reinterpret_cast<uint8_t*>(mapped); std::memcpy(data,mapped,size); stage.impl->Unmap(0,nullptr); } void DxBuffer::uploadS3TC(const uint8_t* d, uint32_t w, uint32_t h, uint32_t mipCnt, UINT blockSize) { ID3D12Resource& ret = *impl; D3D12_RANGE rgn = {0,sizeInBytes}; void* mapped=nullptr; dxAssert(ret.Map(0,&rgn,&mapped)); uint8_t* b = reinterpret_cast<uint8_t*>(mapped); uint32_t bufferSize = 0, stageSize = 0; for(uint32_t i=0; i<mipCnt; i++) { UINT wBlk = (w+3)/4; UINT hBlk = (h+3)/4; UINT pitchS = wBlk*blockSize; UINT pitchA = alignTo(pitchS,D3D12_TEXTURE_DATA_PITCH_ALIGNMENT); for(uint32_t r=0;r<hBlk;++r) { std::memcpy(b+stageSize,d+bufferSize,pitchS); bufferSize += pitchS; stageSize += pitchA; } stageSize = alignTo(stageSize,D3D12_TEXTURE_DATA_PLACEMENT_ALIGNMENT); w = std::max<uint32_t>(1,w/2); h = std::max<uint32_t>(1,h/2); } ret.Unmap(0,nullptr); } #endif
<reponame>eiah32/springCloud<gh_stars>0 package com.eiah.service.impl; import org.springframework.stereotype.Service; import com.eiah.service.RoleService; @Service public class RoleServiceImpl implements RoleService{ // @Autowired // private SqlSession sqlSession; // @Override // public List<Role> findRoles(String username) { // return sqlSession.getMapper(IRoleMapper.class).findRoles(username); // } }
#!/usr/bin/env bash function checkout_knative_eventing { checkout_repo 'knative.dev/eventing' \ "${KNATIVE_EVENTING_REPO}" \ "${KNATIVE_EVENTING_VERSION}" \ "${KNATIVE_EVENTING_BRANCH}" } function checkout_knative_eventing_operator { checkout_repo 'knative.dev/eventing-operator' \ "${KNATIVE_EVENTING_OPERATOR_REPO}" \ "${KNATIVE_EVENTING_OPERATOR_VERSION}" \ "${KNATIVE_EVENTING_OPERATOR_BRANCH}" } function knative_eventing_tests { ( local exitstatus=0 logger.info 'Running eventing tests' checkout_knative_eventing go_test_e2e -timeout=90m -parallel=1 ./test/e2e \ --kubeconfig "$KUBECONFIG" \ --dockerrepo 'quay.io/openshift-knative' \ || exitstatus=$? && true print_test_result ${exitstatus} remove_temporary_gopath return $exitstatus ) } function knative_eventing_operator_tests { logger.info 'Running eventing operator tests' ( local exitstatus=0 checkout_knative_eventing_operator export TEST_NAMESPACE="${EVENTING_NAMESPACE}" go_test_e2e -timeout=20m -parallel=1 ./test/e2e \ --kubeconfig "$KUBECONFIG" \ || exitstatus=$? && true print_test_result ${exitstatus} remove_temporary_gopath return $exitstatus ) }
<gh_stars>1-10 /** * Copyright (C) 2014 xuanhung2401. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.google.datastore.test; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import org.google.datastore.test.entity.Foo; import org.junit.Test; import static org.junit.Assert.*; import cloud.google.datastore.GCDConfig; import cloud.google.datastore.GCDService; import cloud.google.datastore.GCDServiceFactory; import cloud.google.datastore.entity.core.Key; /** * @author xuanhung2401 * */ public class CommitBasicTest { /** * Create your project. Visit https://console.developers.google.com > Create * Project, enter project name and project id. * */ /** * Important : Enable Google Cloud Datastore API. Visit * https://console.developers.google.com. Choose your created project > APIs * & auth (left menu) > APIs > Google Cloud Datastore API > On. * */ /** * Change these variable with yours to test api. Choose your created project * > APIs & auth (left menu) > Credentials > Create New Client ID > Chooose * Service account > Create Client ID. - projectName variable is your * project id with "s~" before, example your project id : "source-gcd" -> * projectName = "s~source-gcd". iss variable is Email address. To get p12 * key file, click Generate new P12 key, download, keyLocation variable is * path to your p12 Key file. * */ String projectName = "s~source-gcd"; String iss = "299520893014-<EMAIL>"; String keyLocation = "source-gcd-542f0520e284.p12"; GCDConfig config = new GCDConfig(projectName, iss, keyLocation); GCDService ds = GCDServiceFactory.getInstance(config); @Test public void testInsertOne() { Foo f = new Foo(); f.setId("this-is-id-01"); f.setName("This is Name 01"); f.setIndexString("category-01"); f.setIndexInt(1); Calendar cal = Calendar.getInstance(); cal.set(2012, 07, 17); f.setDoc(cal.getTime()); ds.commit(Foo.class).entities(f).delete(); // start insert. List<Key<Foo>> listKey = ds.commit(Foo.class).entities(f).insert(); assertEquals(listKey.size(), 1); assertEquals(listKey.get(0).getPath().get(0).getName(), f.getId()); Foo lookupFoo = ds.lookup(Foo.class).id(f.getId()).get(); assertEquals(lookupFoo.getId(), f.getId()); } @Test public void testUpdateOne() { Foo f = new Foo(); f.setId("this-is-id"); f.setName("<NAME>"); // remove enity if exists ds.commit(Foo.class).entities(f).delete(); // start insert. ds.commit(Foo.class).entities(f).insert(); f.setName("This is Name update !"); ds.commit(Foo.class).entities(f).update(); Foo lookupFoo = ds.lookup(Foo.class).id(f.getId()).get(); assertEquals(lookupFoo.getName(), f.getName()); } @Test public void testInsertMany() { Foo f1 = new Foo(); f1.setId("this-is-id-01"); f1.setName("This is Name 01"); Foo f2 = new Foo(); f2.setId("this-is-id-02"); f2.setName("This is Name 02"); ds.commit(Foo.class).entities(f1, f2).delete(); List<Key<Foo>> listKey = ds.commit(Foo.class).entities(f1, f2).insert(); assertEquals(listKey.size(), 2); Foo lookupFoo = ds.lookup(Foo.class).id(f1.getId()).get(); assertEquals(lookupFoo.getId(), f1.getId()); lookupFoo = ds.lookup(Foo.class).id(f2.getId()).get(); assertEquals(lookupFoo.getId(), f2.getId()); } @Test public void testUpdateMany() { Foo f = new Foo(); f.setId("this-is-id"); f.setName("This is Name"); Foo f1 = new Foo(); f1.setId("this-is-id-01"); f1.setName("This is Name 01"); // remove enity if exists ds.commit(Foo.class).entities(f, f1).delete(); // start insert. ds.commit(Foo.class).entities(f, f1).insert(); f.setName("This is Name update !"); f1.setName("This is Name update 01 !"); ds.commit(Foo.class).entities(f, f1).update(); Foo lookupFoo = ds.lookup(Foo.class).id(f.getId()).get(); assertEquals(lookupFoo.getName(), f.getName()); lookupFoo = ds.lookup(Foo.class).id(f1.getId()).get(); assertEquals(lookupFoo.getName(), f1.getName()); } @Test public void testInsertList() { Foo f1 = new Foo(); f1.setId("this-is-id-01"); f1.setName("This is Name 01"); Foo f2 = new Foo(); f2.setId("this-is-id-02"); f2.setName("This is Name 02"); Foo f3 = new Foo(); f3.setId("this-is-id-03"); f3.setName("This is Name 03"); List<Foo> list = new ArrayList<Foo>(); list.add(f1); list.add(f2); list.add(f3); ds.commit(Foo.class).entities(list).delete(); List<Key<Foo>> listKey = ds.commit(Foo.class).entities(list).insert(); assertEquals(listKey.size(), 3); Foo lookupFoo = ds.lookup(Foo.class).id(f1.getId()).get(); assertEquals(lookupFoo.getId(), f1.getId()); lookupFoo = ds.lookup(Foo.class).id(f2.getId()).get(); assertEquals(lookupFoo.getId(), f2.getId()); lookupFoo = ds.lookup(Foo.class).id(f3.getId()).get(); assertEquals(lookupFoo.getId(), f3.getId()); } }
<gh_stars>0 require 'ostruct' if ENV['HEROKU'] Errbit::Config = OpenStruct.new Errbit::Config.host = ENV['ERRBIT_HOST'] Errbit::Config.email_from = ENV['ERRBIT_EMAIL_FROM'] Errbit::Config.email_at_notices = [1,3,10] #ENV['ERRBIT_EMAIL_AT_NOTICES'] else yaml = File.read(Rails.root.join('config','config.yml')) config = YAML.load(yaml) config.merge!(config.delete(Rails.env)) if config.has_key?(Rails.env) Errbit::Config = OpenStruct.new(config) end # Set config specific values ActionMailer::Base.default_url_options[:host] = Errbit::Config.host
<filename>src/minimumcost_spanning_tree/Boj21924.java package minimumcost_spanning_tree; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.PriorityQueue; import java.util.StringTokenizer; /** * * @author exponential-e * 백준 21924번: 도시 건설 * * @see https://www.acmicpc.net/problem/21924 * */ public class Boj21924 { private static long total; private static int[] parent; private static PriorityQueue<Node> pq = new PriorityQueue<>(); private static class Node implements Comparable<Node>{ int node1; int node2; long cost; public Node(int node1, int node2, long cost) { this.node1 = node1; this.node2 = node2; this.cost = cost; } @Override public int compareTo(Node n) { return this.cost < n.cost ? -1: 1; } } public static void main(String[] args) throws Exception { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); StringTokenizer st = new StringTokenizer(br.readLine()); int N = Integer.parseInt(st.nextToken()); int M = Integer.parseInt(st.nextToken()); init(N); while(M-- > 0) { st = new StringTokenizer(br.readLine()); int node1 = Integer.parseInt(st.nextToken()) - 1; int node2 = Integer.parseInt(st.nextToken()) - 1; long cost = Long.parseLong(st.nextToken()); pq.offer(new Node(node1, node2, cost)); total += cost; } System.out.println(mst()); } /** * * MST * * line 77 ~ 79: if all cities didn't link, then return -1 * * @return */ private static long mst() { long cost = 0; while(!pq.isEmpty()) { Node current = pq.poll(); if(merged(current.node1, current.node2)) continue; cost += current.cost; } int set = 0; for(int p: parent) { if(p < 0) set++; } return set >= 2 ? -1: total - cost; } private static void init(int n) { parent = new int[n]; for(int i = 0; i < n; i++){ parent[i] = -1; } } private static int find(int x) { if (parent[x] < 0) return x; return parent[x] = find(parent[x]); } private static boolean merged(int x, int y) { x = find(x); y = find(y); if(x == y) return true; if(parent[x] < parent[y]) { parent[x] += parent[y]; parent[y] = x; } else { parent[y] += parent[x]; parent[x] = y; } return false; } }
#!/bin/bash # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of NVIDIA CORPORATION nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY # OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. REPO_VERSION=${NVIDIA_TENSORRT_SERVER_VERSION} if [ "$#" -ge 1 ]; then REPO_VERSION=$1 fi if [ -z "$REPO_VERSION" ]; then echo -e "Repository version must be specified" echo -e "\n***\n*** Test Failed\n***" exit 1 fi CLIENT_LOG="./client.log" ZERO_OUT_TEST=zero_out_test.py CUDA_OP_TEST=cuda_op_test.py SERVER=/opt/tensorrtserver/bin/trtserver SERVER_ARGS="--model-repository=/data/inferenceserver/${REPO_VERSION}/qa_custom_ops" SERVER_LOG="./inference_server.log" source ../common/util.sh rm -f $SERVER_LOG $CLIENT_LOG RET=0 export LD_PRELOAD=/data/inferenceserver/${REPO_VERSION}/qa_custom_ops/libzeroout.so:/data/inferenceserver/${REPO_VERSION}/qa_custom_ops/libcudaop.so:/data/inferenceserver/${REPO_VERSION}/qa_custom_ops/libbusyop.so run_server if [ "$SERVER_PID" == "0" ]; then echo -e "\n***\n*** Failed to start $SERVER\n***" cat $SERVER_LOG exit 1 fi unset LD_PRELOAD set +e python $ZERO_OUT_TEST -m graphdef_zeroout >>$CLIENT_LOG 2>&1 if [ $? -ne 0 ]; then cat $CLIENT_LOG echo -e "\n***\n*** Test Failed\n***" RET=1 fi python $ZERO_OUT_TEST -m savedmodel_zeroout >>$CLIENT_LOG 2>&1 if [ $? -ne 0 ]; then cat $CLIENT_LOG echo -e "\n***\n*** Test Failed\n***" RET=1 fi python $CUDA_OP_TEST -m graphdef_cudaop >>$CLIENT_LOG 2>&1 if [ $? -ne 0 ]; then cat $CLIENT_LOG echo -e "\n***\n*** Test Failed\n***" RET=1 fi python $CUDA_OP_TEST -m savedmodel_cudaop >>$CLIENT_LOG 2>&1 if [ $? -ne 0 ]; then cat $CLIENT_LOG echo -e "\n***\n*** Test Failed\n***" RET=1 fi set -e kill $SERVER_PID wait $SERVER_PID if [ $RET -eq 0 ]; then echo -e "\n***\n*** Test Passed\n***" fi exit $RET
// units.rs pub mod units { pub mod length { pub fn meters_to_feet(meters: f64) -> f64 { meters * 3.28084 } pub fn meters_to_inches(meters: f64) -> f64 { meters * 39.3701 } // Add more length conversion functions as needed } pub mod weight { pub fn kilograms_to_pounds(kilograms: f64) -> f64 { kilograms * 2.20462 } pub fn kilograms_to_ounces(kilograms: f64) -> f64 { kilograms * 35.274 } // Add more weight conversion functions as needed } pub mod volume { pub fn liters_to_gallons(liters: f64) -> f64 { liters * 0.264172 } pub fn liters_to_milliliters(liters: f64) -> f64 { liters * 1000.0 } // Add more volume conversion functions as needed } pub mod temperature { pub fn celsius_to_fahrenheit(celsius: f64) -> f64 { celsius * 1.8 + 32.0 } pub fn celsius_to_kelvin(celsius: f64) -> f64 { celsius + 273.15 } // Add more temperature conversion functions as needed } }
#!/usr/bin/env bash set -e cd "$(dirname "$0")/.." source ci/_ source ci/rust-version.sh stable source ci/rust-version.sh nightly export RUST_BACKTRACE=1 export RUSTFLAGS="-D warnings" _ cargo +"$rust_stable" fmt --all -- --check # Clippy gets stuck for unknown reasons if sdk-c is included in the build, so check it separately. # See https://github.com/solana-labs/solana/issues/5503 _ cargo +"$rust_stable" clippy --version _ cargo +"$rust_stable" clippy --all --exclude solana-sdk-c -- --deny=warnings _ cargo +"$rust_stable" clippy --manifest-path sdk-c/Cargo.toml -- --deny=warnings _ cargo +"$rust_stable" audit --version _ cargo +"$rust_stable" audit --ignore RUSTSEC-2019-0013 --ignore RUSTSEC-2018-0015 --ignore RUSTSEC-2019-0031 _ ci/nits.sh _ ci/order-crates-for-publishing.py _ book/build.sh _ ci/check-ssh-keys.sh { cd programs/bpf _ cargo +"$rust_stable" audit --ignore RUSTSEC-2019-0031 for project in rust/*/ ; do echo "+++ do_bpf_checks $project" ( cd "$project" _ cargo +"$rust_stable" fmt -- --check _ cargo +"$rust_nightly" test _ cargo +"$rust_nightly" clippy --version _ cargo +"$rust_nightly" clippy -- --deny=warnings --allow=clippy::missing_safety_doc ) done } echo --- ok
<filename>liteflow-test-springboot/src/main/java/com/yomahub/flowtest/components/CondComponent.java /** * <p>Title: litis</p> * <p>Description: redis的全方位开发运维平台</p> * <p>Copyright: Copyright (c) 2017</p> * @author Bryan.Zhang * @email <EMAIL> * @Date 2017-11-28 */ package com.yomahub.flowtest.components; import com.yomahub.liteflow.core.NodeComponent; import com.yomahub.liteflow.core.NodeCondComponent; import org.springframework.stereotype.Component; @Component("cond") public class CondComponent extends NodeCondComponent { @Override public String processCond() throws Exception { return "b"; } }
ls;;
make clean make -f windows.mk 64bit
<reponame>GunnarEriksson/space-invaders<gh_stars>0 /** * The mystery ships handler in the game. * * Creates, removes and handles all mystery ships in the game. */ /*global Audio */ /*global Guer */ /*global ExplodedMysteryShip */ /*global MysteryShip */ /*global Vector */ /** * The mystery ships constructor. * * Sets the mystery ships specifications. * * @param {number} score - the score for hitting the mystery ship. */ function MysteryShips(score) { this.score = score; this.mysteryShips = null; this.explodedMysteryShips = null; this.aliensDirection = null; this.timer = null; this.playMoveSound = null; this.hitPoints = 100; this.mysteryShipExplosion = new Audio("sound/alien_explosion.wav"); this.shipMoveSound = new Audio("sound/ufo_lowpitch.wav"); } /** * The prototype of the mystery ships describing the characteristics of the * mystery ships. * * @type {Object} */ MysteryShips.prototype = { start: function() { this.mysteryShips = []; this.explodedMysteryShips = []; this.aliensDirection = "left"; this.timer = Guer.random(1600, 1900); this.playMoveSound = 0; }, /** * Draws all mystery ship and the exploded mystery ship, if present. * * @param {Object} ct - The canvas context. * * @return {void} */ draw: function(ct) { for (var i = 0; i < this.mysteryShips.length; i++) { this.mysteryShips[i].draw(ct); } for (var j = 0; j < this.explodedMysteryShips.length; j++) { this.explodedMysteryShips[j].draw(ct); } }, /** * Checks if mystery ship has been hit by a missile. If the an mystery ship * has been hit, it is marked to be removed. * * @param {Object} missile - the missile object. * * @return {boolean} True if an alien has been hit by a missile, false otherwise. */ mysteryShipsHit: function(missile) { for (var i = 0; i < this.mysteryShips.length; i++) { if (this.mysteryShips[i].mysteryShipHit(missile)) { this.mysteryShips[i].shouldBeRemoved = true; return true; } } return false; }, /** * Decreases a time, set by a random value, to check if a mystery ship should * be created an started. The direction of the ship is randomly choosen. * * If the ship is hit by a missle the ship is removed and an exploded mystery * ship is created. A timer controls how long the exploded mystery ship should * be present. * * @param {number} td - Time difference offset * * @return {void} */ update: function(td) { if (this.mysteryShips.length === 0) { this.timer--; } if (this.timer === 0) { var direction = Guer.random(0, 1); if (direction > 0) { this.mysteryShips.push(new MysteryShip(new Vector(850, 90), "left")); } else { this.mysteryShips.push(new MysteryShip(new Vector(15, 90), "right")); } this.timer = Guer.random(1600, 1900); } for (var i = this.mysteryShips.length -1; i >= 0; i--) { this.playMoveSound = (this.playMoveSound + 1) % 10; if (this.playMoveSound === 0) { this.shipMoveSound.play(); } this.mysteryShips[i].update(td); if (this.mysteryShips[i].shouldBeRemoved) { if (!this.mysteryShips[i].reachedBorder) { this.explodedMysteryShips.push(new ExplodedMysteryShip(new Vector(this.mysteryShips[i].position.x, this.mysteryShips[i].position.y), this.hitPoints)); this.score.addScore(this.hitPoints); if (this.mysteryShipExplosion.currentTime > 0) { this.mysteryShipExplosion.pause(); this.mysteryShipExplosion.currentTime = 0; } this.mysteryShipExplosion.play(); } this.mysteryShips.splice(i, 1); } } for (var j = this.explodedMysteryShips.length -1; j >= 0; j--) { this.explodedMysteryShips[j].update(); if (this.explodedMysteryShips[j].timer === 0) { this.explodedMysteryShips.splice(j, 1); } } }, };
<reponame>wsd325888/paike1 package com.zm.paipai; import android.app.Fragment; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.StaggeredGridLayoutManager; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import com.google.gson.Gson; import com.zm.paipai.adapter.ImageAdapter; import com.zm.paipai.pinglunactivity.listActivity; import com.zm.paipai.proj.PathactivityBean; import com.zm.paipai.proj.Product; import com.zm.paipai.proj.SpacesItemDecoration; import org.xutils.common.Callback; import org.xutils.http.RequestParams; import org.xutils.x; import java.util.ArrayList; import java.util.List; /** * Created by Administrator on 2016/9/19. */ public class fragment_remen extends Fragment { private ImageAdapter adpter; private RecyclerView recyclerView; private List<Product> data; private Button bt; private ImageAdapter adapter; final ArrayList<PathactivityBean.Path> pathlist = new ArrayList<PathactivityBean.Path>(); private final String baseUrl="http://10.40.5.24:8080/webpro4/image/"; //用来存储我们需要用到的18个Url地址 private List<String> urls; private ArrayList<String> titles=new ArrayList<>(); private ArrayList<String> dianzan=new ArrayList<>(); // @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { System.out.println("///////////////////////////"); View view=inflater.inflate(R.layout.fragment_remen,null); recyclerView = (RecyclerView)view.findViewById(R.id.recyclerView); getBaseUrl(); initView(); return view; } private void getBaseUrl() { System.out.println("???????????????????????"); urls=new ArrayList<String>(); data = new ArrayList<Product>(); RequestParams params = new RequestParams("http://10.40.5.24:8080/webpro4/getPath"); x.http().get(params, new Callback.CommonCallback<String>() { @Override public void onSuccess(String result) { Gson gson = new Gson(); PathactivityBean bean = gson.fromJson(result, PathactivityBean.class); pathlist.addAll(bean.pathList); for (int i = 0; i < pathlist.size(); i++) { urls.add(baseUrl + pathlist.get(i).path + ".png"); titles.add(pathlist.get(i).descp); dianzan.add(pathlist.get(i).dianzan); System.out.println(result.toString()+"/////////////////////?????"); } for(int i=0;i<urls.size();i++){ Product product = new Product(urls.get(i), titles.get(i),dianzan.get(i)); data.add(product); } adapter.notifyDataSetChanged(); } @Override public void onError(Throwable ex, boolean isOnCallback) { System.out.println(ex.toString()+"?<<<<<<<<<<<<<<>>>>>>>>>>>>"); } @Override public void onCancelled(CancelledException cex) { } @Override public void onFinished() { } }); } private void initView() { System.out.println("???????????/////////////////"); final StaggeredGridLayoutManager layoutManager=new StaggeredGridLayoutManager(2,StaggeredGridLayoutManager.VERTICAL); layoutManager.setGapStrategy(StaggeredGridLayoutManager.GAP_HANDLING_NONE); recyclerView.setOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrollStateChanged(RecyclerView recyclerView, int newState) { super.onScrollStateChanged(recyclerView, newState); layoutManager.invalidateSpanAssignments(); } }); recyclerView.setLayoutManager(layoutManager); adpter = new ImageAdapter(data,getActivity(),R.layout.item); adpter.setOnItemClickListener(new ImageAdapter.OnItemClickListener() { @Override public void onItemClick(View view, int position) { Intent intent=new Intent(getActivity(),listActivity.class); //我们需要传递这张图片的Url地址给MainActivity intent.putExtra("imageUrl",urls.get(position)); startActivity(intent); } }); recyclerView.setAdapter(adpter); SpacesItemDecoration decoration=new SpacesItemDecoration(16); recyclerView.addItemDecoration(decoration); } }
package mathcard.player; import java.util.List; import java.util.Random; import mathcard.card.Card; import mathcard.game.Play; import mathcard.game.Play.Target; public class PlayerRandom extends Player { private Random rand; public PlayerRandom(Random random) { super("Random" + random.hashCode()); rand = random; } @Override public Play play() { Card c = getHand().get(rand.nextInt(getHand().size())); return play(c, Target.random(rand)); } @Override public void pickCard(List<Card> list) { pick(list, rand.nextInt(list.size())); } }
import pandas as pd # Load the dataset data = pd.read_csv('data.csv') # Split the dataset from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(data.drop('target', axis=1), data['target'], test_size=0.20, random_state=101)
from sklearn.svm import SVC from sklearn.model_selection import train_test_split # Step 1: Load the data dataset = pd.read_csv('dataset.csv') X = dataset.drop(['label'], axis = 1).values y = dataset['label'].values # Step 2: Split the data into training set and test set X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20) # Step 3: Create the classifier classifier = SVC(kernel='linear') # Step 4: Train the classifier classifier.fit(X_train, y_train) # Step 5: Test the classifier y_pred = classifier.predict(X_test) # Step 6: Compute the accuracy accuracy = classifier.score(X_test, y_test) # Step 7: Print the accuracy print("Accuracy:", accuracy)
import os import json def process_fish_json(): splited_path = os.path.realpath(__file__).split('\\')[:-1] fish_path = '\\'.join(splited_path) fish_json_name = "fish.json" fish_json_path = os.path.join(fish_path, fish_json_name) with open(fish_json_path, 'r') as file: data = json.load(file) fish_species = data.get('fish_species', []) for fish in fish_species: name = fish.get('name', 'Unknown') size = fish.get('average_size', 'Unknown') habitat = fish.get('habitat', 'Unknown') print(f"Fish: {name}") print(f"Average Size: {size} inches") print(f"Habitat: {habitat}\n") # Example usage process_fish_json()
package com.bustiblelemons.cthulhator.system.properties; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.NonNull; import android.text.TextUtils; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.io.Serializable; import java.util.ArrayList; import java.util.List; /** * Created by bhm on 29.07.14. */ @JsonIgnoreProperties public class Relation implements Parcelable, Serializable { @JsonIgnore public static final Parcelable.Creator<Relation> CREATOR = new Parcelable.Creator<Relation>() { public Relation createFromParcel(Parcel source) { return new Relation(source); } public Relation[] newArray(int size) { return new Relation[size]; } }; private List<String> propertyNames; private int modifier; private ModifierType modifierType = ModifierType.NONE; private int max; private int min; private boolean modifiesMaximum; private boolean modifiesMinimum; public Relation() { } private Relation(Parcel in) { this.modifier = in.readInt(); int tmpModifierType = in.readInt(); this.propertyNames = new ArrayList<String>(); in.readStringList(propertyNames); this.modifierType = tmpModifierType == -1 ? null : ModifierType.values()[tmpModifierType]; this.max = in.readInt(); this.min = in.readInt(); this.modifiesMaximum = in.readByte() != 0; this.modifiesMinimum = in.readByte() != 0; } public ModifierType getModifierType() { return modifierType; } public Relation setModifierType(ModifierType modifierType) { this.modifierType = modifierType; return this; } public int getMax() { return max; } public void setMax(int max) { this.max = max; } public int getMin() { return min; } public void setMin(int min) { this.min = min; } public boolean isModifiesMaximum() { return modifiesMaximum; } public void setModifiesMaximum(boolean modifiesMaximum) { this.modifiesMaximum = modifiesMaximum; } public boolean isModifiesMinimum() { return modifiesMinimum; } public void setModifiesMinimum(boolean modifiesMinimum) { this.modifiesMinimum = modifiesMinimum; } public int getModifier() { return modifier; } public Relation setModifier(int modifier) { this.modifier = modifier; return this; } public List<String> getPropertyNames() { return propertyNames; } public void setPropertyNames(List<String> propertyNames) { this.propertyNames = propertyNames; } public Relation addPropertyName(@NonNull String propertyName) { if (propertyNames == null) { propertyNames = new ArrayList<String>(); } if (!TextUtils.isEmpty(propertyName)) { propertyNames.add(propertyName); } return this; } @JsonIgnore public int getValueByRelation(@NonNull PropertyValueRetreiver propertyValueRetreiver) { if (propertyValueRetreiver != null) { int sum = 0; int mod = 0; for (String propertyName : propertyNames) { if (propertyName != null) { int value = propertyValueRetreiver.onRetreivePropertValue(propertyName); sum = sum + value; mod++; } } if (ModifierType.AVERAGE.equals(this.modifierType)) { modifier = mod; return sum / mod; } return getCalculatedValue(sum); } return 0; } @JsonIgnore protected int getCalculatedValue(int valueFromProperties) { switch (modifierType) { case MULTIPLY: return valueFromProperties * modifier; case ADDITION: return valueFromProperties + modifier; case DIVISION: return valueFromProperties / modifier; case SUBSTRACT: return valueFromProperties - modifier; case AVERAGE: return valueFromProperties / modifier; default: return valueFromProperties; } } @JsonIgnore @Override public int describeContents() { return 0; } @JsonIgnore @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.modifier); dest.writeInt(this.modifierType == null ? -1 : this.modifierType.ordinal()); dest.writeStringList(this.propertyNames); dest.writeInt(this.max); dest.writeInt(this.min); dest.writeByte(modifiesMaximum ? (byte) 1 : (byte) 0); dest.writeByte(modifiesMinimum ? (byte) 1 : (byte) 0); } public Relation withModifier(int mod) { this.modifier = mod; return this; } public Relation withRelation(String name) { addPropertyName(name); return this; } public Relation withModifierType(ModifierType type) { setModifierType(type); return this; } }
#!/bin/sh # # backup full webpages # # SCRIPT_DEPS: [ monolith ] : "${MONOLITH:=monolith}" : "${GET_TITLE:=get-url-title}" ARCHIVE_DIR="/disk/archive/web" ARCHIVE_DIR_FULL="${ARCHIVE_DIR}/full" ARCHIVE_DIR_MIN="${ARCHIVE_DIR}/min" mkdir -p "$ARCHIVE_DIR_FULL" "$ARCHIVE_DIR_MIN" FILENAME="$($GET_TITLE "$1" | tr '~!@#$%^&*()+=:;,<>?"[]\/\\\t\n  ' '-')" printf "archiving in %s.html\n" "$FILENAME" $MONOLITH "$1" -o "${ARCHIVE_DIR_FULL}/${FILENAME}.html" $MONOLITH -acefFiIjv "$1" -o "${ARCHIVE_DIR_MIN}/${FILENAME}.html" printf "finished archiving %s\n" "$1"
(page, done) => { var hh = page.getHttpHeaders("last"); //var staticdom = page.getDom(); if(!hh){ done(this.createResult('HTTP', "<b>No HTTP-header</b> found, most likely due to <b>caching</b>! HTTP-header depending tests might fail or not get reported!", 'warning')); } done(); }
<filename>public/resources/js/mapstyle.js var style = [{ "stylers": [{ "visibility": "off" }] }, { "featureType": "road", "stylers": [{ "visibility": "on" }, { "color": "#ffffff" }] }, { "featureType": "road.arterial", "stylers": [{ "visibility": "on" }, { "color": "#fee379" }] }, { "featureType": "road.highway", "stylers": [{ "visibility": "on" }, { "color": "#fee379" }] }, { "featureType": "landscape", "stylers": [{ "visibility": "on" }, { "color": "#f3f4f4" }] }, { "featureType": "water", "stylers": [{ "visibility": "on" }, { "color": "#7fc8ed" }] }, {}, { "featureType": "road", "elementType": "labels", "stylers": [{ "visibility": "off" }] }, { "featureType": "poi.park", "elementType": "geometry.fill", "stylers": [{ "visibility": "on" }, { "color": "#83cead" }] }, { "elementType": "labels", "stylers": [{ "visibility": "on" }] }, { "featureType": "landscape.man_made", "elementType": "geometry", "stylers": [{ "weight": 0.9 }, { "visibility": "off" }] }]
#!/bin/sh case "$1" in merged|unmerged) mode="$1" ;; *) echo "Usage: $0 (merged | unmerged)" >&2 exit 1 ;; esac origin=${REMOTE:-origin} git ls-remote --heads $origin | while read sha1 ref do ref=$origin/${ref#refs/heads/} case $ref in $origin/master|$origin/debian-*) continue;; # ignore debian as a topic branch esac case "$(git merge-base $sha1 master)" in $sha1) case "$mode" in merged) test $origin/contrib = "$ref" || echo $ref is already in master;; esac;; *) case "$mode" in unmerged) date=$(git show -s --format='%cd' $ref) echo "$origin/master..$ref ($date)": git log $origin/master..$ref --format=' (%an) %s' echo;; esac;; esac done | less -FSRX
<reponame>RobertPHeller/RPi-RRCircuits<gh_stars>1-10 //// -!- C++ -!- ////////////////////////////////////////////////////////////// // // System : // Module : // Object Name : $RCSfile$ // Revision : $Revision$ // Date : $Date$ // Author : $Author$ // Created By : <NAME> // Created : Mon Oct 28 13:33:31 2019 // Last Modified : <210527.0746> // // Description // // Notes // // History // ///////////////////////////////////////////////////////////////////////////// // // Copyright (C) 2019 <NAME> D/B/A Deepwoods Software // 51 Lock<NAME>ill Road // Wendell, MA 01379-9728 // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. // // // ////////////////////////////////////////////////////////////////////////////// static const char rcsid[] = "@(#) : $Id$"; #include <math.h> #include <string.h> #include "openlcb/PolledProducer.hxx" #include "openlcb/EventHandlerTemplates.hxx" #include "openlcb/ConfigRepresentation.hxx" #include "utils/ConfigUpdateListener.hxx" #include "utils/ConfigUpdateService.hxx" #include "openlcb/RefreshLoop.hxx" #include "utils/logging.h" #include "utils/Singleton.hxx" #include <dcc/ProgrammingTrackBackend.hxx> #include "AnalogReadSysFS.h" #include "HBridgeControl.hxx" #include "Hardware.hxx" #include <vector> #include <numeric> HBridgeControl::HBridgeControl(openlcb::Node *node, const HBridgeControlConfig &cfg, uint8_t currentAIN, const uint32_t limitMilliAmps, const uint32_t maxMilliAmps, const Gpio *enableGpio, const Gpio *thermFlagGpio) : node_(node) , cfg_(cfg) , currentAIN_(currentAIN) , enableGpio_(enableGpio) , thermFlagGpio_(thermFlagGpio) , maxMilliAmps_(maxMilliAmps) , overCurrentLimit_((uint32_t)round(limitMilliAmps*.9)) // ~90% max value , shutdownLimit_((uint32_t)round(limitMilliAmps*.99)) , isProgTrack_(false) , progAckLimit_(0) , shortBit_(node, 0, 0, &state_, STATE_OVERCURRENT) , shutdownBit_(node, 0, 0, &state_, STATE_SHUTDOWN) , thermalFlagBit_(node, 0, 0, &thermalFlag_, 1) , shortProducer_(&shortBit_) , shutdownProducer_(&shortBit_) , thermalFlagProducer_(&thermalFlagBit_) { ConfigUpdateService::instance()->register_update_listener(this); } HBridgeControl::HBridgeControl(openlcb::Node *node, const HBridgeControlConfig &cfg, uint8_t currentAIN, const uint32_t maxMilliAmps, const Gpio *enableGpio, const Gpio *thermFlagGpio) : node_(node) , cfg_(cfg) , currentAIN_(currentAIN) , enableGpio_(enableGpio) , thermFlagGpio_(thermFlagGpio) , maxMilliAmps_(maxMilliAmps) , overCurrentLimit_(250) // ~250 mA , shutdownLimit_(500) , isProgTrack_(true) , progAckLimit_(60) // ~60 mA , shortBit_(node, 0, 0, &state_, STATE_OVERCURRENT) , shutdownBit_(node, 0, 0, &state_, STATE_SHUTDOWN) , thermalFlagBit_(node, 0, 0, &thermalFlag_, 1) , shortProducer_(&shortBit_) , shutdownProducer_(&shortBit_) , thermalFlagProducer_(&thermalFlagBit_) { ConfigUpdateService::instance()->register_update_listener(this); } HBridgeControl::~HBridgeControl() { ConfigUpdateService::instance()->unregister_update_listener(this); } void HBridgeControl::poll_33hz(openlcb::WriteHelper *helper, Notifiable *done) { vector<int> samples; while (samples.size() < adcSampleCount_) { samples.push_back(sysfs_adc_getvalue(currentAIN_)); usleep(1); } lastReading_ = (uint32_t)round(CurrentFromAIN(std::accumulate(samples.begin(), samples.end(), 0)/adcSampleCount_)*1000); if (isProgTrack_ && progEnable_) { auto backend = Singleton<ProgrammingTrackBackend>::instance(); if (lastReading_ >= overCurrentLimit_) { backend->notify_service_mode_short(); } else if (lastReading_ >= progAckLimit_) { backend->notify_service_mode_ack(); } } uint8_t previous_state = state_; if (lastReading_ >= shutdownLimit_) { enableGpio_->clr(); state_ = STATE_SHUTDOWN; } else if (lastReading_ >= overCurrentLimit_) { if (overCurrentCheckCount_++ >= overCurrentRetryCount_) { enableGpio_->clr(); state_ = STATE_OVERCURRENT; } } else { if (enableGpio_->is_set()) { overCurrentCheckCount_ = 0; state_ = STATE_ON; } else { state_ = STATE_OFF; } } bool async_event_req = false; if (previous_state != state_) { if (previous_state == STATE_SHUTDOWN || state_ == STATE_SHUTDOWN) { shutdownProducer_.SendEventReport(helper, done); async_event_req = true; } else if (previous_state == STATE_OVERCURRENT || state_ == STATE_OVERCURRENT) { shortProducer_.SendEventReport(helper, done); async_event_req = true; } } if (thermFlagGpio_ != NULL) { uint8_t previous_thermalFlag = thermalFlag_; if (thermFlagGpio_->is_set()) { thermalFlag_ = 1; } else { thermalFlag_ = 0; } if (previous_thermalFlag != thermalFlag_) { thermalFlagProducer_.SendEventReport(helper, done); async_event_req = true; } } if (!async_event_req) { done->notify(); } } ConfigUpdateListener::UpdateAction HBridgeControl::apply_configuration(int fd, bool initial_load, BarrierNotifiable *done) { AutoNotify n(done); UpdateAction res = initial_load ? REINIT_NEEDED : UPDATED; openlcb::EventId short_detected = cfg_.event_short().read(fd); openlcb::EventId short_cleared = cfg_.event_short_cleared().read(fd); openlcb::EventId shutdown = cfg_.event_shutdown().read(fd); openlcb::EventId shutdown_cleared = cfg_.event_shutdown_cleared().read(fd); openlcb::EventId thermalflagon = cfg_.event_thermflagon().read(fd); openlcb::EventId thermalflagoff = cfg_.event_thermflagoff().read(fd); auto saved_node = shortBit_.node(); if (short_detected != shortBit_.event_on() || short_cleared != shortBit_.event_off()) { shortBit_.openlcb::MemoryBit<uint8_t>::~MemoryBit(); new (&shortBit_)openlcb::MemoryBit<uint8_t>(saved_node, short_detected, short_cleared, &state_, STATE_OVERCURRENT); shortProducer_.openlcb::BitEventProducer::~BitEventProducer(); new (&shortProducer_)openlcb::BitEventProducer(&shortBit_); res = REINIT_NEEDED; } if (shutdown != shutdownBit_.event_on() || shutdown_cleared != shutdownBit_.event_off()) { saved_node = shutdownBit_.node(); shutdownBit_.openlcb::MemoryBit<uint8_t>::~MemoryBit(); new (&shutdownBit_)openlcb::MemoryBit<uint8_t>(saved_node, shutdown, shutdown_cleared, &state_, STATE_SHUTDOWN); shutdownProducer_.openlcb::BitEventProducer::~BitEventProducer(); new (&shutdownProducer_)openlcb::BitEventProducer(&shutdownBit_); res = REINIT_NEEDED; } if (thermalflagon != thermalFlagBit_.event_on() || thermalflagoff != thermalFlagBit_.event_off()) { saved_node = thermalFlagBit_.node(); thermalFlagBit_.openlcb::MemoryBit<uint8_t>::~MemoryBit(); new (&thermalFlagBit_)openlcb::MemoryBit<uint8_t>(saved_node, thermalflagon, thermalflagoff, &thermalFlag_, 1); thermalFlagProducer_.openlcb::BitEventProducer::~BitEventProducer(); new (&thermalFlagProducer_)openlcb::BitEventProducer(&thermalFlagBit_); res = REINIT_NEEDED; } return res; } void HBridgeControl::factory_reset(int fd) { }
<filename>src/main/java/controller/PersonController.java<gh_stars>0 package controller; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.RestTemplate; import entities.HttpErrorResponse; import entities.PersonInfo; import services.ApiService; @RestController public class PersonController { @Autowired private ApiService servicio; @Value("${error.text.emptyname}") private String emptyname; @Value("${error.text.notexistname}") private String notexistname; @RequestMapping("/swapi-proxy/person-info") public ResponseEntity<?> findPeople(@RequestParam(value = "name", defaultValue = "") String name) { try { HttpHeaders headers = new HttpHeaders(); headers.add("Content-Type", MediaType.APPLICATION_JSON_VALUE); if (name.isEmpty()) { HttpErrorResponse error = new HttpErrorResponse(); error.setMessage(emptyname); return new ResponseEntity<HttpErrorResponse>(error,headers, HttpStatus.NOT_FOUND); } PersonInfo response = servicio.SendtoApi(name); if (response.getName() == null) { HttpErrorResponse error = new HttpErrorResponse(); error.setMessage(notexistname); return new ResponseEntity<HttpErrorResponse>(error,headers, HttpStatus.NOT_FOUND); } return new ResponseEntity<PersonInfo>(response,headers, HttpStatus.OK); } catch (Exception e) { HttpHeaders headers = new HttpHeaders(); headers.add("Content-Type", MediaType.APPLICATION_JSON_VALUE); HttpErrorResponse error = new HttpErrorResponse(); error.setMessage(e.toString()); return new ResponseEntity<HttpErrorResponse>(error,headers, HttpStatus.INTERNAL_SERVER_ERROR); } } }
#!/bin/bash # Module specific variables go here # Files: file=/path/to/file # Arrays: declare -a array_name # Strings: foo="bar" # Integers: x=9 ############################################### # Bootstrapping environment setup ############################################### # Get our working directory cwd="$(pwd)" # Define our bootstrapper location bootstrap="${cwd}/tools/bootstrap.sh" # Bail if it cannot be found if [ ! -f ${bootstrap} ]; then echo "Unable to locate bootstrap; ${bootstrap}" && exit 1 fi # Load our bootstrap source ${bootstrap} ############################################### # Metrics start ############################################### # Get EPOCH s_epoch="$(gen_epoch)" # Create a timestamp timestamp="$(gen_date)" # Whos is calling? 0 = singular, 1 is as group caller=$(ps $PPID | grep -c stigadm) ############################################### # Perform restoration ############################################### # If ${restore} = 1 go to restoration mode if [ ${restore} -eq 1 ]; then report "Not yet implemented" && exit 1 fi ############################################### # STIG validation/remediation ############################################### # Module specific validation code should go here # Errors should go in ${errors[@]} array (which on remediation get handled) # All inspected items should go in ${inspected[@]} array errors=("${stigid}") # If ${change} = 1 #if [ ${change} -eq 1 ]; then # Create the backup env #backup_setup_env "${backup_path}" # Create a backup (configuration output, file/folde permissions output etc #bu_configuration "${backup_path}" "${author}" "${stigid}" "$(echo "${array_values[@]}" | tr ' ' '\n')" #bu_file "${backup_path}" "${author}" "${stigid}" "${file}" #if [ $? -ne 0 ]; then # Stop, we require a backup #report "Unable to create backup" && exit 1 #fi # Iterate ${errors[@]} #for error in ${errors[@]}; do # Work to remediate ${error} should go here #done #fi # Remove dupes #inspected=( $(remove_duplicates "${inspected[@]}") ) ############################################### # Results for printable report ############################################### # If ${#errors[@]} > 0 if [ ${#errors[@]} -gt 0 ]; then # Set ${results} error message #results="Failed validation" UNCOMMENT ONCE WORK COMPLETE! results="Not yet implemented!" fi # Set ${results} passed message [ ${#errors[@]} -eq 0 ] && results="Passed validation" ############################################### # Report generation specifics ############################################### # Apply some values expected for report footer [ ${#errors[@]} -eq 0 ] && passed=1 || passed=0 [ ${#errors[@]} -gt 0 ] && failed=1 || failed=0 # Calculate a percentage from applied modules & errors incurred percentage=$(percent ${passed} ${failed}) # If the caller was only independant if [ ${caller} -eq 0 ]; then # Show failures [ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}" # Provide detailed results to ${log} if [ ${verbose} -eq 1 ]; then # Print array of failed & validated items [ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}" fi # Generate the report report "${results}" # Display the report cat ${log} else # Since we were called from stigadm module_header "${results}" # Show failures [ ${#errors[@]} -gt 0 ] && print_array ${log} "errors" "${errors[@]}" # Provide detailed results to ${log} if [ ${verbose} -eq 1 ]; then # Print array of failed & validated items [ ${#inspected[@]} -gt 0 ] && print_array ${log} "validated" "${inspected[@]}" fi # Finish up the module specific report module_footer fi ############################################### # Return code for larger report ############################################### # Return an error/success code (0/1) exit ${#errors[@]} # Date: 2018-09-19 # # Severity: CAT-III # Classification: UNCLASSIFIED # STIG_ID: V0050821 # STIG_Version: SV-65027r1 # Rule_ID: OL6-00-000256 # # OS: Oracle_Linux # Version: 6 # Architecture: # # Title: The openldap-servers package must not be installed unless required. # Description: Unnecessary packages should not be installed to decrease the attack surface of the system.
//app.js App({ data:{ num:0, API:'https://sample.zaixian.jichuangsi.com' }, onLaunch: function () { }, onUnlaunch: function () { } })
<gh_stars>10-100 package gov.cms.bfd.server.war.stu3.providers; import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; import gov.cms.bfd.server.war.commons.RequestHeaders; import java.io.IOException; /** A HAPI {@link IClientInterceptor} that allows us to add HTTP headers to our requests. */ public class ExtraParamsInterceptor implements IClientInterceptor { private RequestHeaders requestHeader; @Override public void interceptRequest(IHttpRequest theRequest) { // inject headers values requestHeader .getNVPairs() .forEach( (n, v) -> { theRequest.addHeader(n, v.toString()); }); } /** * @see * ca.uhn.fhir.rest.client.api.IClientInterceptor#interceptResponse(ca.uhn.fhir.rest.client.api.IHttpResponse) */ @Override public void interceptResponse(IHttpResponse theResponse) throws IOException { // nothing needed here } public void setHeaders(RequestHeaders requestHeader) { this.requestHeader = requestHeader; } }
const config = { env: { browser: true, jest: true, node: true, }, extends: ['airbnb', 'airbnb/hooks', 'airbnb-typescript', 'plugin:@typescript-eslint/recommended', 'prettier'], parser: '@typescript-eslint/parser', parserOptions: { ecmaFeatures: { jsx: true, }, ecmaVersion: 2020, sourceType: 'module', project: 'tsconfig.json', }, plugins: ['react', '@typescript-eslint', 'prettier'], rules: { '@typescript-eslint/lines-between-class-members': 0, '@typescript-eslint/no-inferrable-types': 0, '@typescript-eslint/comma-dangle': [ 2, { arrays: 'always-multiline', objects: 'always-multiline', imports: 'always-multiline', exports: 'always-multiline', functions: 'never', enums: 'always-multiline', generics: 'never', tuples: 'always-multiline', }, ], '@typescript-eslint/indent': 0, 'import/no-extraneous-dependencies': [ 2, { devDependencies: [ '**/*.test.js', '**/*.test.jsx', '**/*.spec.js', '**/*.spec.jsx', '**/*.test.ts', '**/*.test.tsx', '**/*.spec.ts', '**/*.spec.tsx', '**/setupTests.ts', ], }, ], 'padded-blocks': [2, 'never'], 'no-plusplus': 0, indent: 0, 'object-property-newline': 0, 'comma-dangle': [ 2, { arrays: 'always-multiline', objects: 'always-multiline', imports: 'always-multiline', exports: 'always-multiline', functions: 'never', }, ], 'brace-style': [2, '1tbs'], 'key-spacing': [ 2, { mode: 'strict', beforeColon: false, afterColon: true, }, ], 'comma-spacing': [ 2, { before: false, after: true, }, ], 'implicit-arrow-linebreak': 0, 'import/extensions': [ 2, 'ignorePackages', { js: 'never', json: 'never', jsx: 'never', ts: 'never', tsx: 'never', }, ], 'import/prefer-default-export': 0, 'linebreak-style': 0, 'max-len': [ 2, { code: 220, ignoreStrings: true, ignoreTemplateLiterals: true, ignoreUrls: true, }, ], quotes: [2, 'single', { avoidEscape: true }], 'no-console': 0, 'no-debugger': 2, 'no-multiple-empty-lines': [ 2, { max: 1, maxBOF: 0, maxEOF: 0, }, ], 'react-hooks/exhaustive-deps': 1, 'react-hooks/rules-of-hooks': 1, 'react/jsx-props-no-spreading': 0, 'react/jsx-filename-extension': [ 1, { extensions: ['.tsx', '.jsx'], }, ], 'react/prop-types': 0, 'react/no-array-index-key': 2, 'react/react-in-jsx-scope': 2, 'react/self-closing-comp': [ 2, { component: true, html: true, }, ], 'react/style-prop-object': [1], 'react/jsx-boolean-value': [1, 'never'], 'react/jsx-closing-bracket-location': 0, 'react/jsx-curly-spacing': [ 2, 'never', { allowMultiline: true, }, ], 'react/jsx-first-prop-new-line': [2, 'multiline-multiprop'], 'react/jsx-fragments': [2, 'syntax'], 'react/jsx-handler-names': 0, 'react/jsx-indent': [ 2, 2, { checkAttributes: false, indentLogicalExpressions: true, }, ], 'react/jsx-key': [ 2, { checkFragmentShorthand: true, }, ], 'react/jsx-max-depth': [1, { max: 8 }], 'react/jsx-max-props-per-line': [ 1, { maximum: 1, when: 'multiline', }, ], 'react/jsx-no-bind': [ 1, { ignoreDOMComponents: false, ignoreRefs: false, allowArrowFunctions: true, allowFunctions: false, allowBind: false, }, ], 'react/jsx-no-duplicate-props': [2, { ignoreCase: true }], 'react/jsx-no-literals': 0, 'react/jsx-no-script-url': 2, 'react/jsx-no-target-blank': [ 1, { allowReferrer: false, enforceDynamicLinks: 'always', }, ], 'react/jsx-no-useless-fragment': 2, 'react/jsx-one-expression-per-line': 0, 'react/jsx-pascal-case': [2, { allowAllCaps: false }], 'react/jsx-props-no-multi-spaces': 2, 'react/jsx-sort-props': [ 1, { callbacksLast: true, shorthandFirst: false, shorthandLast: true, ignoreCase: true, noSortAlphabetically: false, reservedFirst: true, }, ], 'react/jsx-tag-spacing': [ 2, { closingSlash: 'never', beforeSelfClosing: 'always', afterOpening: 'never', beforeClosing: 'never', }, ], 'react/jsx-uses-vars': 1, 'react/jsx-wrap-multilines': [ 2, { declaration: 'parens', assignment: 'parens', return: 'parens', arrow: 'parens-new-line', condition: 'parens-new-line', logical: 'parens-new-line', prop: 'ignore', }, ], 'no-bitwise': [2, { allow: ['>>=', '&'] }], 'sort-keys': 0, 'no-unused-vars': [ 2, { vars: 'all', args: 'after-used', ignoreRestSiblings: false, }, ], 'object-curly-newline': 0, 'lines-between-class-members': 'off', }, settings: { 'import/extensions': ['.ts', '.tsx', '.json'], 'import/parsers': { '@typescript-eslint/parser': ['.ts', '.tsx', '.json'], }, 'import/resolver': { node: { extensions: ['.js', '.jsx', '.ts', '.tsx'], }, }, react: { pragma: 'React', version: 'detect', }, }, }; module.exports = config;
let word_in_str = (str, word) => { let words = str.split(' '); for (var i = 0; i < words.length; i++) { if (words[i] === word) { return true; } } return false; }
#!/bin/sh set -e -o pipefail # Usage: # ./install.sh VERSION=$(curl -s https://api.github.com/repos/redhat-et/microshift/releases | grep tag_name | cut -d '"' -f 4) # Function to get Linux distribution get_distro() { DISTRO=$(egrep '^(ID)=' /etc/os-release| sed 's/"//g' | cut -f2 -d"=") if [[ $DISTRO != @(rhel|fedora|centos) ]] then echo "This Linux distro is not supported by the install script" exit 1 fi } # Function to get system architecture get_arch() { ARCH=$(uname -m) } # If RHEL, use subscription-manager to register register_subs() { REPO="rhocp-4.7-for-rhel-8-x86_64-rpms" # Check subscription status and register if not STATUS=$(sudo subscription-manager status | awk '/Overall Status/ { print $3 }') if [[ $STATUS != "Current" ]] then sudo subscription-manager register --auto-attach fi # Check if already subscribed to the proper repository if ! sudo subscription-manager repos --list-enabled | grep -q ${REPO} then sudo subscription-manager repos --enable=${REPO} fi } # Apply SElinux policies apply_selinux_policy() { # sudo semanage fcontext -a -t container_runtime_exec_t /usr/local/bin/microshift || # sudo semanage fcontext -m -t container_runtime_exec_t /usr/local/bin/microshift # sudo mkdir -p /var/lib/kubelet/ # sudo chcon -R -t container_file_t /var/lib/kubelet/ # sudo chcon -R system_u:object_r:bin_t:s0 /usr/local/bin/microshift sudo setenforce 0 sudo sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config } # Install dependencies install_dependencies() { sudo dnf install -y \ policycoreutils-python-utils \ conntrack \ firewalld } # Establish Iptables rules establish_firewall () { sudo systemctl enable firewalld --now sudo firewall-cmd --zone=public --permanent --add-port=6443/tcp sudo firewall-cmd --zone=public --permanent --add-port=30000-32767/tcp sudo firewall-cmd --zone=public --permanent --add-port=2379-2380/tcp sudo firewall-cmd --zone=public --add-masquerade --permanent sudo firewall-cmd --zone=public --add-port=10250/tcp --permanent sudo firewall-cmd --zone=public --add-port=10251/tcp --permanent sudo firewall-cmd --reload } # Install CRI-O depending on the distro install_crio() { case $DISTRO in "fedora") sudo dnf module -y enable cri-o:1.20 sudo dnf install -y cri-o cri-tools ;; "rhel") sudo dnf install cri-o cri-tools -y ;; "centos") CRIOVERSION=1.20 OS=CentOS_8_Stream sudo curl -L -o /etc/yum.repos.d/devel:kubic:libcontainers:stable.repo https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/$OS/devel:kubic:libcontainers:stable.repo sudo curl -L -o /etc/yum.repos.d/devel:kubic:libcontainers:stable:cri-o:$CRIOVERSION.repo https://download.opensuse.org/repositories/devel:kubic:libcontainers:stable:cri-o:$CRIOVERSION/$OS/devel:kubic:libcontainers:stable:cri-o:$CRIOVERSION.repo sudo dnf install -y cri-o cri-tools ;; esac } # CRI-O config to match Microshift networking values crio_conf() { sudo sed -i 's/10.85.0.0\/16/10.42.0.0\/24/' /etc/cni/net.d/100-crio-bridge.conf sudo sed -i 's/0.3.1/0.4.0/' /etc/cni/net.d/100-crio-bridge.conf if [ "$DISTRO" == "rhel" ]; then sudo sed -i 's|/usr/libexec/crio/conmon|/usr/bin/conmon|' /etc/crio/crio.conf fi } # Start CRI-O verify_crio() { sudo systemctl enable crio sudo systemctl restart crio } # Download and install kubectl get_kubectl() { curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl" sudo chmod +x ./kubectl sudo mv ./kubectl /usr/local/bin/kubectl } # Download and install microshift get_microshift() { if [ $ARCH = "x86_64" ]; then curl -L https://github.com/redhat-et/microshift/releases/download/$VERSION/microshift-linux-amd64 -o microshift curl -L https://github.com/redhat-et/microshift/releases/download/$VERSION/release.sha256 -o release.sha256 fi SHA=$(sha256sum microshift | awk '{print $1}') if [[ $SHA != $(cat release.sha256 | awk '{print $1}') ]]; then echo "SHA256 checksum failed" && exit 1; fi sudo chmod +x microshift sudo mv microshift /usr/local/bin/ apply_selinux_policy cat << EOF | sudo tee /usr/lib/systemd/system/microshift.service [Unit] Description=Microshift [Service] WorkingDirectory=/usr/local/bin/ ExecStart=microshift run Restart=always User=root [Install] WantedBy=multi-user.target EOF sudo systemctl enable microshift.service --now } # Locate kubeadmin configuration to default kubeconfig location prepare_kubeconfig() { mkdir -p $HOME/.kube if [ -f $HOME/.kube/config ]; then mv $HOME/.kube/config $HOME/.kube/config.orig fi sudo KUBECONFIG=/var/lib/microshift/resources/kubeadmin/kubeconfig:$HOME/.kube/config.orig /usr/local/bin/kubectl config view --flatten > $HOME/.kube/config } # Script execution get_distro get_arch if [ $DISTRO = "rhel" ]; then register_subs fi install_dependencies establish_firewall install_crio crio_conf verify_crio get_kubectl get_microshift until sudo test -f /var/lib/microshift/resources/kubeadmin/kubeconfig do sleep 2 done prepare_kubeconfig
<filename>web.js const http = require('http'); const axios = require('axios').default; const url = require('url'); const fs = require('fs'); const qs = require('querystring'); const path = require('path'); module.exports = { start: (client, ops) => { const server = http.createServer((req, res) => { let parsed = url.parse(req.url, true); if (parsed.pathname.startsWith('/static/')) { if (parsed.pathname.startsWith('/static/html/')) { fs.readFile(`./assets/html/${path.parse(parsed.pathname).base}`, 'utf8', (err, data) => { if (err) { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); return; } res.writeHead(200, { 'Content-Type': "text/html; charset=UTF-8", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end(data); }); } else if (parsed.pathname.startsWith('/static/css/')) { fs.readFile(`./assets/css/${path.parse(parsed.pathname).base}`, 'utf8', (err, data) => { if (err) { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); return; } res.writeHead(200, { 'Content-Type': "text/css; charset=UTF-8", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end(data); }); } else if (parsed.pathname.startsWith('/static/js/')) { fs.readFile(`./assets/js/${path.parse(parsed.pathname).base}`, 'utf8', (err, data) => { if (err) { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); return; } res.writeHead(200, { 'Content-Type': "text/javascript; charset=UTF-8", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end(data); }); } else if (parsed.pathname.startsWith('/static/image/')) { fs.readFile(`./assets/image/${path.parse(parsed.pathname).base}`, (err, data) => { if (err) { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); return; } res.writeHead(200, { 'Content-Type': "image/png", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end(data); }); } else if (parsed.pathname.startsWith('/static/json/')) { fs.readFile(`./assets/json/${path.parse(parsed.pathname).base}`, (err, data) => { if (err) { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); return; } res.writeHead(200, { 'Content-Type': "application/json; charset=UTF-8", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end(data); }); } else { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); } } else if (parsed.pathname == '/manifest.json') { res.writeHead(200, { 'content-type': 'application/json; charset=UTF-8', 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); fs.readFile('./assets/json/manifest.json', 'utf8', (err, data) => { res.end(data); }); } else if (parsed.pathname == '/serviceWorker.js') { res.writeHead(200, { 'content-type': 'text/javascript; charset=UTF-8', 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); fs.readFile('./assets/js/serviceWorker.js', 'utf8', (err, data) => { res.end(data); }); } else { if (req.headers['user-agent'] && (req.headers['user-agent'].includes('MSIE') || req.headers['user-agent'].includes('rv:11.0'))) { res.writeHead(200, { 'Content-Type': "text/html; charset=UTF-8", 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); fs.readFile('./assets/html/ie.html', 'utf8', (err, data) => { res.end(data); }); return; } if (client.paths.get(parsed.pathname)) { if (client.paths.get(parsed.pathname).method == req.method) { client.paths.get(parsed.pathname).run(client, req, res, parsed, ops); } else { res.writeHead(405, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('405 Method Not Allowed') } } else { res.writeHead(404, { 'strict-transport-security': 'max-age=86400; includeSubDomains; preload' }); res.end('404 Not Found'); } } }); server.listen(process.env.PORT || 3000); const io = require('socket.io')(server); io.on('connection', socket => { socket.on('notifySubscription', data => { let dbFile = require('/home/data/notifications.json'); dbFile.subscriptions.push(data); fs.writeFile('/home/data/notifications.json', JSON.stringify(dbFile), () => {}); }); }); } }
firebase deploy --token $FIREBASE_TOKEN --non-interactive #token for deployment using master branch, refer wiki for more detail
/* Imports */ import path from 'path'; import { promises as fs, Dirent } from 'fs'; import { ITreeItem } from '../types/index.js'; import { hasOwnDir, listContents, reduceTree, confirmProceed } from './index.js'; /* File utils */ const formatAsTreeItems = (root: string, dirItems: Array<Dirent>): Array<ITreeItem> => { return dirItems.map(dirItem => { return { path: path.join(root, dirItem.name), type: dirItem.isDirectory() ? 'folder' : 'file' }; }); }; const readTree = async (root: string, treeItems: Array<ITreeItem> = []): Promise<ITreeItem[]> => { const dirItems = await fs.readdir(root, { withFileTypes: true }); treeItems = formatAsTreeItems(root, dirItems); for (let treeItem of treeItems) { if (treeItem.type === 'folder') { treeItem.dir = await readTree(treeItem.path); }; }; return treeItems; }; const loadJSON = async <T>(JSONFilePath: string): Promise<T> => { let content; try { const buffer = await fs.readFile(JSONFilePath); content = JSON.parse(buffer.toString()); } catch (err) { console.log(`! NOTE: valid JSON file not found at path: ${JSONFilePath}.`); content = {}; }; return content; }; const importFile = async (targetFilePath: string): Promise<any> => { return await import(targetFilePath); }; const mkdir = async (targetFolderPath: string, treeItem: ITreeItem): Promise<boolean> => { try { await fs.mkdir(targetFolderPath); return true; } catch(err) { const msgs = { error: `ERROR creating folder at path: ${targetFolderPath}.`, exist: 'Folder already exists.' }; if (err.message.slice(0, 6) !== 'EEXIST') { console.log(`✕ ${msgs.error} ${err} Exiting.`); process.exit(); }; if (!hasOwnDir(treeItem)) { console.log( `! ${msgs.error} ${msgs.exist} Note: new folder has no contents. ` + 'Result: existing folder retained, contents unmodified.' ); return false; }; const targetDirItems: Array<Dirent> = await fs.readdir(targetFolderPath, { withFileTypes: true}); const targetTreeItems = formatAsTreeItems(targetFolderPath, targetDirItems); const question = `? ${msgs.error} ${msgs.exist} ` + `${listContents('Existing folder', targetTreeItems)} ` + `${listContents('New folder', treeItem.dir as Array<ITreeItem>)} ` + `Continue using the existing folder? ` + 'A new file will overwrite an existing file with the same name. ' + 'NOTE: a filename with a thru infix changes before creation. ' + '(Enter y to continue or any other key to exit the generation process.) ' if (await confirmProceed(question)) { console.log('Continuing...'); return false; }; console.log('Exiting...'); process.exit(); }; }; const writeFile = async (targetFilePath: string, content: string): Promise<boolean> => { await fs.writeFile(targetFilePath, content); return true; }; const copyFile = async (extantFilePath: string, targetFilePath: string): Promise<boolean> => { await fs.copyFile(extantFilePath, targetFilePath); return true; }; /* Exports */ export { readTree, loadJSON, importFile, mkdir, writeFile, copyFile };
#!/usr/bin/env bash set -ex sbt +publishSigned sbt sonatypeReleaseAll echo "Released"
export * from './hi-there.component'; export * from './hi-there.route'; export * from './hi-there.module';
require "./array/my_max.rb" describe Array do describe "#my_max" do it "finds the maximium value in array" do expect([2, 5, 2, 6, 3, 9].my_max).to eq(9) end end end
<reponame>agneym/react-loading import * as React from 'react'; const Docs = () => { return ( <> <h1>Installation</h1> <code> npm install @agney/react-loading </code> <p>For more detailed docs, visit <a href="https://github.com/agneym/react-loading">Github page</a></p> </> ); } export default Docs;
import classnames from 'classnames'; import React, { useState } from 'react'; import { SupportedLocale } from '../../features/i18n/types'; import styles from './LanguageSwitcher.module.scss'; export interface ILocaleProps { code: SupportedLocale; title: string; } export interface ILanguageSwitcherParams { currentLanguage: ILocaleProps; languageOptions: Array<ILocaleProps>; onLanguageSwitch?: (locale: SupportedLocale) => void; } const LanguageSwitcher = (props: ILanguageSwitcherParams) => { const [isDropdownVisible, setIsDropdownVisible] = useState(false); const { currentLanguage, languageOptions } = props; return ( <div className={styles.languageSwitcherContainer} onClick={() => setIsDropdownVisible(!isDropdownVisible)} > <div className={styles.currentLanguage}> <span className={styles.isMobileOnly}>{currentLanguage.code}</span> <span className={styles.isDektopOnly}>{currentLanguage.title}</span> </div> <div className={styles.languageSelector}> <span className={classnames([ styles.arrow, isDropdownVisible ? styles.isDropdownVisible : null, ])} /> </div> {languageOptions.length > 0 && ( <ul className={classnames([ styles.languagesMenu, isDropdownVisible ? styles.isDropdownVisible : null, ])} > {languageOptions.map((lang) => ( <li key={lang.code} onClick={() => { props.onLanguageSwitch?.(lang.code); }} > <span className={styles.isMobileOnly}>{lang.code}</span> <span className={styles.isDektopOnly}>{lang.title}</span> </li> ))} </ul> )} </div> ); }; export default LanguageSwitcher;
#!/usr/bin/env bash PUSH=$1 DATE="$(date "+%Y%m%d%H%M")" REPOSITORY_NAME="latonaio" IMAGE_NAME="aion-statuskanban" DOCKERFILE_DIR="./cmd/kanban-server" DOCKERFILE_NAME="Dockerfile-kanban-server" # build servicebroker DOCKER_BUILDKIT=1 docker build -f ${DOCKERFILE_DIR}/${DOCKERFILE_NAME} -t ${REPOSITORY_NAME}/${IMAGE_NAME}:"${DATE}" . docker tag ${REPOSITORY_NAME}/${IMAGE_NAME}:"${DATE}" ${REPOSITORY_NAME}/${IMAGE_NAME}:latest if [[ $PUSH == "push" ]]; then docker push ${REPOSITORY_NAME}/${IMAGE_NAME}:"${DATE}" docker push ${REPOSITORY_NAME}/${IMAGE_NAME}:latest fi
/* Copyright 2017 <NAME> * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "test_common.hpp" using testing::_; using testing::Return; class ActionLayer : public TestFixture {}; // TEST_F(ActionLayer, LayerStateDBG) { // layer_state_set(0); // } // TEST_F(ActionLayer, LayerStateSet) { // layer_state_set(0); // EXPECT_EQ(layer_state, 0); // layer_state_set(0b001100); // EXPECT_EQ(layer_state, 0b001100); // } // TEST_F(ActionLayer, LayerStateIs) { // layer_state_set(0); // EXPECT_EQ(layer_state_is(0), true); // EXPECT_EQ(layer_state_is(1), true); // layer_state_set(1); // EXPECT_EQ(layer_state_is(0), true); // EXPECT_EQ(layer_state_is(1), true); // layer_state_set(2); // EXPECT_EQ(layer_state_is(0), false); // EXPECT_EQ(layer_state_is(1), false); // EXPECT_EQ(layer_state_is(2), true); // } TEST_F(ActionLayer, LayerStateCmp) { uint32_t prev_layer; prev_layer = 0; EXPECT_EQ(layer_state_cmp(prev_layer, 0), true); EXPECT_EQ(layer_state_cmp(prev_layer, 1), false); prev_layer = 1; EXPECT_EQ(layer_state_cmp(prev_layer, 0), true); EXPECT_EQ(layer_state_cmp(prev_layer, 1), false); prev_layer = 2; EXPECT_EQ(layer_state_cmp(prev_layer, 0), false); EXPECT_EQ(layer_state_cmp(prev_layer, 1), true); EXPECT_EQ(layer_state_cmp(prev_layer, 2), false); } // TEST_F(ActionLayer, LayerClear) { // layer_clear(); // EXPECT_EQ(layer_state, 0); // } // TEST_F(ActionLayer, LayerMove) { // layer_move(0); // EXPECT_EQ(layer_state, 1); // layer_move(3); // EXPECT_EQ(layer_state, 0b1000); // } // TEST_F(ActionLayer, LayerOn) { // layer_clear(); // layer_on(1); // layer_on(3); // layer_on(3); // EXPECT_EQ(layer_state, 0b1010); // } // TEST_F(ActionLayer, LayerOff) { // layer_clear(); // layer_on(1); // layer_on(3); // layer_off(3); // layer_off(2); // EXPECT_EQ(layer_state, 0b1000); // }
<gh_stars>1-10 /* * MIT License * * Copyright (c) 2021 Imanity Software * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.imanity.addon.chunkanalyzer.data; import lombok.Getter; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.entity.EntityType; import org.imanity.imanityspigot.ImanitySpigot; import org.imanity.imanityspigot.chunk.ChunkAnalyse; import org.imanity.imanityspigot.config.ImanitySpigotWorldConfig; @Getter public class ChunkAnalyzeResult { private final ChunkAnalyse.SortTarget sortTarget; private final ChunkAnalyse.SortMethod sortMethod; private final ChunkAnalyse.WorldAnalysesExport export; private final long time; private final ImanitySpigotWorldConfig worldConfig; // NOT USEFUL TO USE MULTIPLE CONSTRUCTOR SINCE WE WANT TO HANDLE IN IF THINGS ARE NULL OR NOT public ChunkAnalyzeResult(ChunkAnalyse.SortTarget sortTarget, ChunkAnalyse.SortMethod sortMethod, EntityType entityType, ChunkAnalyse.TileEntityType tileEntityType, World world) { this.sortTarget = sortTarget; this.sortMethod = sortMethod; if (entityType != null) { this.export = Bukkit.imanity().getChunkAnalyse().getAnalyseExport(world, sortTarget, sortMethod, entityType); } else if (tileEntityType != null) { this.export = Bukkit.imanity().getChunkAnalyse().getAnalyseExport(world, sortTarget, sortMethod, tileEntityType); } else { this.export = Bukkit.imanity().getChunkAnalyse().getAnalyseExport(world, sortTarget, sortMethod); } this.time = System.currentTimeMillis(); this.worldConfig = ImanitySpigot.INSTANCE.getWorldConfig(world.getName()); } public WarningType getWarningType(ChunkAnalyse.ChunkAnalysesExport chunk) { double potentialLagChunkTickTime = 2.0; // TODO this.worldConfig.getPotentialLagChunkTickTime(); double average = chunk.getTotal().getAvg(); return average >= potentialLagChunkTickTime ? WarningType.HIGH : average >= potentialLagChunkTickTime * 0.75 ? WarningType.MEDIUM : WarningType.LOW; } @Getter public enum WarningType { HIGH("&c", Material.REDSTONE_BLOCK), MEDIUM("&e", Material.GOLD_BLOCK), LOW("&2", Material.EMERALD_BLOCK); private final String color; private final Material icon; WarningType(String color, Material icon) { this.color = color; this.icon = icon; } } }
<filename>packages/multi/rollup.config.js import resolve from '@rollup/plugin-node-resolve'; import commonjs from '@rollup/plugin-commonjs'; import typescript from '@rollup/plugin-typescript'; import svelte from 'rollup-plugin-svelte'; import { terser } from 'rollup-plugin-terser'; import pkg from './package.json'; const { preprocess } = require('./svelte.config'); const dev = process.env.ROLLUP_WATCH; export default { input: 'runtime/index.ts', inlineDynamicImports: true, output: [ { file: pkg.module, format: 'es', sourcemap: true }, { file: pkg.main, format: 'umd', name: 'multi', sourcemap: true }, ], plugins: [ svelte({ // enable run-time checks when not in production compilerOptions: { // Without this, dynamic components don't work. dev: true, }, preprocess, }), // If you have external dependencies installed from // npm, you'll most likely need these plugins. In // some cases you'll need additional configuration — // consult the documentation for details: // https://github.com/rollup/rollup-plugin-commonjs resolve({ browser: true, dedupe: ['svelte'], }), commonjs(), typescript({ tsconfig: './tsconfig.json', }), // If we're building for production (npm run build // instead of npm run dev), minify !dev && terser(), ], };
<gh_stars>0 var https = require('https') var aws4 = require('aws4') require('dotenv').config() const Post = require('../models/post'); const list = async (req, res) => { if (!req.query) { return res.json({}); } search = req.query.text; var index = 'posts' var opts = { host: 'search-post-es-hgy7q3zf65w7yfxf5yjocuubq4.us-east-2.es.amazonaws.com', path: ('/' + index + '/' + '_search?q=' + search), service: 'es', region: 'us-east-2', } aws4.sign(opts, { accessKeyId: process.env.ES_ACCESS_KEY_ID, secretAccessKey: process.env.ES_SECRET_KEY}) https.request(opts, resp => { let data = ''; resp.on('data', (chunk) => { data += chunk; }); resp.on('end', async () => { results = JSON.parse(data); if (!results || !results.hits || !results.hits.hits) { res.status(500).json([]); } else { const postIds = results.hits.hits.map(x => x._id); let foundPosts = await Promise.all(postIds.map(async id => { return await Post.get( {"id" : id } ); })); res.json(foundPosts.filter(x => x)); } }); }) .end(opts.body || '') }; module.exports = { list };
SELECT student_name, COUNT(*) AS "Number of Courses" FROM student_courses GROUP BY student_name;
#!/bin/bash # Copyright 2013 Daniel Povey # 2014 David Snyder # Apache 2.0. # This script extracts iVectors for a set of utterances, given # features and a trained iVector extractor. # Begin configuration section. nj=30 num_threads=1 # Number of threads used by ivector-extract. It is usually not # helpful to set this to > 1. It is only useful if you have # fewer speakers than the number of jobs you want to run. cmd="run.pl" stage=0 num_gselect=20 # Gaussian-selection using diagonal model: number of Gaussians to select min_post=0.025 # Minimum posterior to use (posteriors below this are pruned out) posterior_scale=1.0 # This scale helps to control for successve features being highly # correlated. E.g. try 0.1 or 0.3. apply_cmn=true # If true, apply sliding window cepstral mean normalization # End configuration section. echo "$0 $@" # Print the command line for logging if [ -f path.sh ]; then . ./path.sh; fi . parse_options.sh || exit 1; if [ $# != 3 ]; then echo "Usage: $0 <extractor-dir> <data> <ivector-dir>" echo " e.g.: $0 exp/extractor_2048_male data/train_male exp/ivectors_male" echo "main options (for others, see top of script file)" echo " --config <config-file> # config containing options" echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs." echo " --nj <n|10> # Number of jobs (also see num-threads)" echo " --num-threads <n|1> # Number of threads for each job" echo " --stage <stage|0> # To control partial reruns" echo " --num-gselect <n|20> # Number of Gaussians to select using" echo " # diagonal model." echo " --min-post <min-post|0.025> # Pruning threshold for posteriors" echo " --apply-cmn <true,false|true> # if true, apply sliding window cepstral mean" echo " # normalization to features" exit 1; fi srcdir=$1 data=$2 dir=$3 for f in $srcdir/final.ie $srcdir/final.ubm $data/feats.scp ; do [ ! -f $f ] && echo "No such file $f" && exit 1; done # Set various variables. mkdir -p $dir/log sdata=$data/split$nj; utils/split_data.sh $data $nj || exit 1; delta_opts=`cat $srcdir/delta_opts 2>/dev/null` ## Set up features. if $apply_cmn; then feats="ark,s,cs:add-deltas $delta_opts scp:$sdata/JOB/feats.scp ark:- | apply-cmvn-sliding --norm-vars=false --center=true --cmn-window=300 ark:- ark:- | select-voiced-frames ark:- scp,s,cs:$sdata/JOB/vad.scp ark:- |" else feats="ark,s,cs:add-deltas $delta_opts scp:$sdata/JOB/feats.scp ark:- | select-voiced-frames ark:- scp,s,cs:$sdata/JOB/vad.scp ark:- |" fi if [ $stage -le 0 ]; then echo "$0: extracting iVectors" dubm="fgmm-global-to-gmm $srcdir/final.ubm -|" $cmd --num-threads $num_threads JOB=1:$nj $dir/log/extract_ivectors.JOB.log \ gmm-gselect --n=$num_gselect "$dubm" "$feats" ark:- \| \ fgmm-global-gselect-to-post --min-post=$min_post $srcdir/final.ubm "$feats" \ ark,s,cs:- ark:- \| scale-post ark:- $posterior_scale ark:- \| \ ivector-extract --verbose=2 --num-threads=$num_threads $srcdir/final.ie "$feats" \ ark,s,cs:- ark,scp,t:$dir/ivector.JOB.ark,$dir/ivector.JOB.scp || exit 1; fi if [ $stage -le 1 ]; then echo "$0: combining iVectors across jobs" for j in $(seq $nj); do cat $dir/ivector.$j.scp; done >$dir/vector.scp || exit 1; fi if [ $stage -le 2 ]; then # Be careful here: the speaker-level iVectors are now length-normalized, # even if they are otherwise the same as the utterance-level ones. echo "$0: computing mean of iVectors for each speaker and length-normalizing" $cmd $dir/log/speaker_mean.log \ ivector-normalize-length scp:$dir/vector.scp ark:- \| \ ivector-mean ark:$data/spk2utt ark:- ark:- ark,t:$dir/num_utts.ark \| \ ivector-normalize-length ark:- ark,scp:$dir/spk_vector.ark,$dir/spk_vector.scp || exit 1; fi
<filename>app/src/main/java/com/sereno/view/ColorPickerView.java package com.sereno.view; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Path; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import com.sereno.color.HSVColor; import com.sereno.vfv.R; public class ColorPickerView extends View implements ColorPickerData.IColorPickerDataListener { private enum ColorPickerTarget { SLIDER, PICKER } /** Default hue slider picker height*/ private static final int PICKER_HEIGHT = 50; /** Default space between the picker SV and slider*/ private static final int PICKER_SPACE = 10; /** Default circle radius targeting the current color*/ private static final int PICKER_CIRCLE_RADIUS = 15; /** Default slider handler height*/ private static final int SLIDER_HEIGHT = 10; /** The model bound to this view*/ private ColorPickerData m_model = new ColorPickerData(); /** The paint object used to draw on the canvas*/ private Paint m_paint = new Paint(); /** circle radius targeting the current color*/ private int m_circleRadius; /** hue slider picker height*/ private int m_hueHeight; /** space between the picker SV and slider*/ private int m_pickerSpace; /** Default slider handler height*/ private int m_sliderHeight; /** What are we currently moving ?*/ private ColorPickerTarget m_currentTargetSelection = ColorPickerTarget.PICKER; public ColorPickerView(Context context) { super(context); init(null); } public ColorPickerView(Context context, @Nullable AttributeSet attrs) { super(context, attrs); init(attrs); } public ColorPickerView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(attrs); } public ColorPickerView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(attrs); } /** Initialize the internal state of the color picker view * @param attrs the parameters key/value attributes*/ private void init(AttributeSet attrs) { TypedArray ta = getContext().obtainStyledAttributes(attrs, R.styleable.ColorPickerView); m_circleRadius = ta.getDimensionPixelSize(R.styleable.ColorPickerView_circleRadius, PICKER_CIRCLE_RADIUS); m_hueHeight = ta.getDimensionPixelSize(R.styleable.ColorPickerView_pickerHueHeight, PICKER_HEIGHT); m_pickerSpace = ta.getDimensionPixelSize(R.styleable.ColorPickerView_pickerSpace, PICKER_SPACE); m_sliderHeight = ta.getDimensionPixelSize(R.styleable.ColorPickerView_sliderHeight, SLIDER_HEIGHT); ta.recycle(); m_model.addListener(this); m_paint.setStrokeWidth(3.0f); } @Override public void onDraw(Canvas canvas) { HSVColor hsvRGB = m_model.getColor(); //Draw picker color palette int handlerWidth = (int)(2.0f*m_sliderHeight*Math.tan(30.0/180.0*Math.PI)); int pickerWidth = getWidth() - handlerWidth; int pickerHeight = (int)Math.max(0.0, getHeight() - m_hueHeight - m_pickerSpace - m_sliderHeight); HSVColor hsv = new HSVColor(0, 0, 0, 1.0f); m_paint.setStyle(Paint.Style.FILL); for(int i = 0; i < pickerWidth; i+=3) { for(int j = 0; j < pickerHeight; j+=3) { hsv.v = ((float)j)/(pickerHeight-1); hsv.s = ((float)i)/(pickerWidth-1); hsv.h = hsvRGB.h; int intColor = hsv.toRGB().toARGB8888(); m_paint.setColor(intColor); canvas.drawRect(i+handlerWidth/2.0f, j, i+handlerWidth/2.0f+Math.min(3.0f, pickerWidth-i-1), j+Math.min(3.0f, pickerHeight-j-1), m_paint); } } HSVColor hsvRGBClone = new HSVColor(hsvRGB.h, 1.0f, 1.0f, 1.0f); int rgbHandler = hsvRGBClone.toRGB().toARGB8888(); //Draw the value slider for(int i = pickerWidth-1; i >= 0; i--) { hsvRGBClone.h = 360.0f*((float)i)/(pickerWidth-1); m_paint.setColor(hsvRGBClone.toRGB().toARGB8888()); canvas.drawLine(i+handlerWidth/2.0f, pickerHeight+m_pickerSpace, i+handlerWidth/2.0f, pickerHeight+m_pickerSpace+m_hueHeight, m_paint); } //Draw the handler Path path = new Path(); path.moveTo(pickerWidth * hsvRGB.h/360.0f+handlerWidth/2.0f, getHeight()-m_sliderHeight); path.lineTo(pickerWidth * hsvRGB.h/360.0f, getHeight()); path.lineTo(pickerWidth * hsvRGB.h/360.0f+handlerWidth, getHeight()); path.close(); m_paint.setColor(rgbHandler); canvas.drawPath(path, m_paint); //Draw the circle where the actual picker is int x = (int)(hsvRGB.s*pickerWidth); int y = (int)(hsvRGB.v*pickerHeight); m_paint.setStyle(Paint.Style.STROKE); m_paint.setColor(android.graphics.Color.BLACK); canvas.drawCircle(x, y, m_circleRadius, m_paint); } @Override public boolean onTouchEvent(MotionEvent event) { super.onTouchEvent(event); HSVColor hsvRGB = null; int handlerWidth = (int)(2.0f*m_sliderHeight*Math.tan(30.0/180.0*Math.PI)); int pickerWidth = getWidth()-handlerWidth; float x = Math.max(0.0f, Math.min(event.getX() - handlerWidth/2.0f, pickerWidth)); //Determine what we are moving if(event.getAction() == MotionEvent.ACTION_DOWN) { if(event.getY() > getHeight() - m_hueHeight) m_currentTargetSelection = ColorPickerTarget.SLIDER; else if(event.getY() < getHeight() - m_hueHeight) m_currentTargetSelection = ColorPickerTarget.PICKER; } //Move the target if(event.getAction() == MotionEvent.ACTION_DOWN || event.getAction() == MotionEvent.ACTION_MOVE) { //Check the slider area if(m_currentTargetSelection == ColorPickerTarget.SLIDER) { hsvRGB = (HSVColor)m_model.getColor().clone(); hsvRGB.h = 360.0f*x/pickerWidth; } //Check the picker area else if(m_currentTargetSelection == ColorPickerTarget.PICKER) { float y = Math.max(0.0f, Math.min(event.getY(), getHeight() - m_hueHeight - m_pickerSpace)); hsvRGB = (HSVColor)m_model.getColor().clone(); hsvRGB.v = y / (getHeight() - m_hueHeight - m_pickerSpace); hsvRGB.s = x / pickerWidth; } } //Update the color if necessary if(hsvRGB != null) { m_model.setColor(hsvRGB); return true; } return false; } /** Get the model bound to this color picker view * @return the model*/ public ColorPickerData getModel() { return m_model; } /** Set the model bound to this color picker view * @param model the new model. Must be different from NULL*/ public void setModel(ColorPickerData model) { m_model.removeListener(this); m_model = model; m_model.addListener(this); } @Override public void onSetColor(ColorPickerData data, int color) { invalidate(); } }
<gh_stars>0 /* * -* *- *- *- *- *- *- * * ** -* -* -* - *- *- *-* - ** - *- - * *- */ /* * _ _ +\ */ /* - | |_ ___ ___ ___ ___ ___ ___ ___ _| |___ ___ ___ ___ + */ /* + | _| _| .'| |_ -| _| -_| | . | -_| | _| -_| /* */ /* * |_| |_| |__,|_|_|___|___|___|_|_|___|___|_|_|___|___| + */ /* - ~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~--~ * */ /* * <NAME> | okruitho | Alpha_1337k *- */ /* -* <NAME> | rvan-hou | robijnvh -+ */ /* * / <NAME> | jbennink | JonasDBB /- */ /* / <NAME> | tvan-cit | Tjobo-Hero * */ /* + <NAME> | rbraaksm | rbraaksm - */ /* *. ._ */ /* *. main.ts | Created: 2021-10-06 17:48:04 ._ */ /* - Edited on 2021-10-06 17:48:04 by alpha .- */ /* -* *- *- * -* -* -* ** - *-* -* * / -* -*- * /- - -* --*-*++ * -* * */ import { NestFactory } from '@nestjs/core'; import { AppModule } from './app.module'; import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger'; import { join } from 'path'; import { NestExpressApplication } from '@nestjs/platform-express'; async function bootstrap() { const app = await NestFactory.create<NestExpressApplication>(AppModule); const config = new DocumentBuilder() .setTitle('Pong Pint API') .setDescription('the description of the Api') .setVersion('1.0') .build(); const document = SwaggerModule.createDocument(app, config); SwaggerModule.setup('/swagger', app, document); app.enableCors(); app.useStaticAssets(join(__dirname, '..', 'site_static')); await app.listen(5000); } bootstrap();
<reponame>Neha-Dhuri/Atlas_Demo package com.tatadigital.tcpapp.gui.pages; import java.util.List; import org.apache.log4j.Logger; import org.openqa.selenium.WebDriver; import org.openqa.selenium.support.FindBy; import com.qaprosoft.carina.core.foundation.webdriver.decorator.ExtendedWebElement; import com.qaprosoft.carina.core.gui.AbstractPage; public class SignupPage extends AbstractPage { Logger LOGGER = Logger.getLogger(HomePage.class); @FindBy(xpath = "//input[@list='countrycodes']") private List<ExtendedWebElement> countryCodes; @FindBy(xpath = "//input[@name='mobile']") private ExtendedWebElement mobileNo; @FindBy(xpath = "//button[text()='Get OTP']") private ExtendedWebElement getOTPbtn; @FindBy(xpath = "//input[@placeholder='Enter OTP here']") private ExtendedWebElement enterOTP; @FindBy(xpath = "//label[text()='Submit OTP']") private ExtendedWebElement SubmitOTP; @FindBy(xpath = "//input[@name='firstName']") private ExtendedWebElement FirstName; @FindBy(xpath = "//input[@name='lastName']") private ExtendedWebElement LastName; @FindBy(xpath = "//input[@name='emailId']") private ExtendedWebElement Email; @FindBy(xpath = "//input[@name='dob']") private ExtendedWebElement DOB; @FindBy(xpath = "//button[text()='Done']") private ExtendedWebElement doneBtn; public SignupPage(WebDriver driver) { super(driver); } //signup with otp method public void signupWithOTP(String countryCode, String mobileNo){ // if(!countryCode.isEmpty()){ // LOGGER.info("selecting '" + countryCode + "' countryCode..."); // for (ExtendedWebElement countryc : countryCodes) { // if (countryc.getText().equalsIgnoreCase(countryCode)) { // countryc.click(); // } // } // } this.mobileNo.type(mobileNo); getOTPbtn.click(); enterOTP.type("1234"); SubmitOTP.click(); } //verify otp public void newuserReg(String FN, String LN, String semail, String dob){ FirstName.type(FN); LastName.type(FN); Email.type(semail); DOB.type(dob); doneBtn.click(); } }
from cryptography.fernet import Fernet import secrets def generate_raw_shares(data: bytes, prime: int) -> Iterator[bytes]: key = Fernet.generate_key() f = Fernet(key) encrypted_data = f.encrypt(data) # Generate raw shares using a secure method raw_share_1 = secrets.token_bytes(16) raw_share_2 = secrets.token_bytes(16) yield raw_share_1 yield raw_share_2
<gh_stars>1-10 require "test/test_helper" class Admin::CategoriesControllerTest < ActionController::TestCase should "verify form partial can overwrited by model" do get :new assert_match "categories#_form.html.erb", @response.body end end
<gh_stars>0 import React from 'react'; import {observer} from 'mobx-react'; import styles from './index.less'; function MainContBox(props) { return ( <div className={styles.box}> {props.children} </div> ); } export default observer(MainContBox);
<reponame>duncpro/squaremap package xyz.jpenilla.squaremap.common; import java.util.UUID; import net.kyori.adventure.text.Component; import net.minecraft.server.level.ServerPlayer; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.framework.qual.DefaultQualifier; import xyz.jpenilla.squaremap.api.PlayerManager; @DefaultQualifier(NonNull.class) public interface PlayerManagerInternal extends PlayerManager { boolean hidden(ServerPlayer player); default boolean otherwiseHidden(ServerPlayer player) { return false; } Component displayName(ServerPlayer player); @Nullable ServerPlayer player(UUID uuid); }
from enum import Enum from typing import Union class GZEntryFlags(Enum): # Define GZEntryFlags enumeration values here FLAG1 = 1 FLAG2 = 2 # ... class GZCompressionMethod(Enum): # Define GZCompressionMethod enumeration values here DEFLATE = 8 # ... class GZDeflateCompressionFlags(Enum): # Define GZDeflateCompressionFlags enumeration values here FLAG1 = 1 FLAG2 = 2 # ... class GZEntry: def __init__(self, comment: str, raw_comment: bytes, flags: GZEntryFlags, compression_method: Union[GZCompressionMethod, int], compression_flags: Union[GZDeflateCompressionFlags, int], compressed_length: int, uncompressed_length: int, uncompressed_crc32: int, entry_start_offset: int, data_start_offset: int): self.comment = comment self.raw_comment = raw_comment self.flags = flags self.compression_method = compression_method self.compression_flags = compression_flags self.compressed_length = compressed_length self.uncompressed_length = uncompressed_length self.uncompressed_crc32 = uncompressed_crc32 self.entry_start_offset = entry_start_offset self.data_start_offset = data_start_offset def is_valid(self) -> bool: return (self.compressed_length >= 0 and self.uncompressed_length >= 0 and self.entry_start_offset >= 0 and self.data_start_offset >= 0 and isinstance(self.compression_method, (GZCompressionMethod, int)) and isinstance(self.compression_flags, (GZDeflateCompressionFlags, int))) # Usage example entry = GZEntry("Sample entry", b"Sample entry", GZEntryFlags.FLAG1, GZCompressionMethod.DEFLATE, GZDeflateCompressionFlags.FLAG1, 100, 200, 12345, 0, 10) print(entry.is_valid()) # Output: True
#!/bin/bash n=8 sed -i "" -e "s/templatesession/session$n/" README.md DESCRIPTION _pkgdown.yml sed -i "" -e "s/template_session/session$n/" README.md sed -i "" -e "s/sessionN/session$n/" DESCRIPTION sed -i "" -e "s/session N/session $n/" DESCRIPTION sed -i "" -e "s/Session N/Session $n/" vignettes/session_lecture.Rmd vignettes/session_lab.Rmd
/* Jameleon - An automation testing tool.. Copyright (C) 2007 <NAME> (<EMAIL>) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package net.sf.jameleon.reporting; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import java.io.StringWriter; import java.util.Calendar; public class HtmlTestRunSummaryReporterTest extends TestCase { HtmlTestRunSummaryReporter reporter; StringWriter writer; public static void main(String args[]) { junit.textui.TestRunner.run(suite()); } public static Test suite() { return new TestSuite(HtmlTestRunSummaryReporterTest.class); } public HtmlTestRunSummaryReporterTest(String name) { super(name); } public void setUp(){ reporter = new HtmlTestRunSummaryReporter(); writer = new StringWriter(); reporter.setWriter(writer); } public void testGettersSetters(){ assertEquals("Default results header", HtmlTestRunSummaryReporter.DEFAULT_HEADER_TEMPLATE, reporter.getTestRunSummaryResultsHeaderTemplate()); reporter.setTestRunSummaryResultsHeaderTemplate("foo"); assertEquals("results header", "foo", reporter.getTestRunSummaryResultsHeaderTemplate()); assertEquals("Default results begin row", HtmlTestRunSummaryReporter.DEFAULT_RESULT_BEGIN_ROW_TEMPLATE, reporter.getTestRunBeginResultRowTemplate()); reporter.setTestRunBeginResultRowTemplate("foo"); assertEquals("results footer", "foo", reporter.getTestRunBeginResultRowTemplate()); assertEquals("Default result row", HtmlTestRunSummaryReporter.DEFAULT_RESULT_ROW_TEMPLATE, reporter.getTestRunResultRowTemplate()); reporter.setTestRunResultRowTemplate("foo"); assertEquals("results result row", "foo", reporter.getTestRunResultRowTemplate()); assertFalse("printHeader should be false", reporter.isPrintHeader()); reporter.setPrintHeader(true); assertTrue("printHeader should be true", reporter.isPrintHeader()); } // public void testReportTestRunStart() throws Exception{ // Calendar startTime = Calendar.getInstance(); // reporter.setPrintHeader(false); // reporter.reportTestRunStart(startTime); // assertFalse("printHeader should be false", reporter.isPrintHeader()); // String result = writer.toString(); // assertTrue("The writer was populated with something", result.length() > 0); // String path = ReporterUtils.formatTime(startTime)+"/TestResults.html"; // assertTrue("The writer should contain a link to the test run results file " + result, result.indexOf(path) > 0); // } public void testReportTestRunComplete() throws Exception{ TestCaseCounter counter = new TestCaseCounter(); counter.incrementPassed(12); counter.incrementFailed(12); Calendar startTime = Calendar.getInstance(); Calendar endTime = (Calendar) startTime.clone(); endTime.set(Calendar.SECOND, startTime.get(Calendar.SECOND) + 3); reporter.reportTestRunComplete(startTime, endTime, counter); String results = writer.toString(); assertTrue("The writer was populated with something", results.length() > 0); String path = ReporterUtils.formatTime(startTime)+"/TestResults.html"; assertTrue("The writer should contain a link to the test run results file " + results, results.indexOf(path) > 0); assertTrue("execution time " + results, results.indexOf("<td nowrap=\"nowrap\" style=\"padding-right:15px\">Execution Time: 3.000s</td>") >= 0); assertTrue("total executed " + results, results.indexOf("<td nowrap=\"nowrap\" style=\"padding-right:15px\">Total Run: 24</td>") > 0); assertTrue("total failed " + results, results.indexOf("<td nowrap=\"nowrap\" style=\"padding-right:15px\">Total Failed: 12</td>") > 0); assertTrue("percent passed " + results, results.indexOf("<td nowrap=\"nowrap\" style=\"padding-right:15px\">Percent Passed: 50%</td>") > 0); assertTrue("The writer should contain an test id", results.indexOf("id=\""+ReporterUtils.formatTime(startTime)+"\"") > 0); } }
for n in range(20): print('testing dict with {} items'.format(n)) for i in range(n): # create dict d = dict() for j in range(n): d[str(j)] = j print(len(d)) # delete an item del d[str(i)] print(len(d)) # check items for j in range(n): if str(j) in d: if j == i: print(j, 'in d, but it should not be') else: if j != i: print(j, 'not in d, but it should be')
#!/bin/bash DIR="$( cd "$(dirname "$0")" ; pwd -P )" cd ${DIR}/docker docker build --rm --no-cache --add-host pontus-sandbox.pontusvision.com:172.17.0.2 -t pontusvisiongdpr/pontus-ad-base . docker push pontusvisiongdpr/pontus-ad-base #docker run --privileged --hostname pontus-sandbox.pontusvision.com -d --rm -p389:389/udp -p389:389 -p636:636 pontusvisiongdpr/pontus-ad-base
export interface IPair { difficulty: { level: number }, stat: { frontend_question_id: number, question__title_slug: string } }
package com.honyum.elevatorMan.data; import java.io.Serializable; /** * Created by LiYouGui on 2017/12/11. */ public class ContractFile implements Serializable{ private String id; private String contractId; //合同ID private String fileName; //附件名称 private String url = ""; //附件路径 private String createTime; //创建时间 public void setCreateTime(String createTime) { this.createTime = createTime; } public String getCreateTime() { return createTime; } public void setContractId(String contractId) { this.contractId = contractId; } public String getContractId() { return contractId; } public void setId(String id) { this.id = id; } public String getId() { return id; } public void setFileName(String fileName) { this.fileName = fileName; } public String getFileName() { return fileName; } public void setUrl(String url) { this.url = url; } public String getUrl() { return url; } }
<filename>texture_modulation_demo.py #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2020 <NAME> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import numpy as np import time import math import torch import torch.nn as nn import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim as optim import torch.utils.data import torchvision.transforms as transforms import torch.nn.functional as F from torch.utils.data import DataLoader import torchvision.utils as vutils from torch.autograd import Variable import os import utils import os, time, datetime from os import listdir import glob import sys import cv2 from IPython.display import display from PIL import Image from models import * #---------------------------------------------------------------------------------------------------------------- #-----------------------------------main process-------------------------------------- #---------------------------------------------------------------------------------------------------------------- class TM_Demo(): def __init__(self,iamge_path,hyper_params): self.Experiment_Confs = hyper_params self.Model_conf = hyper_params.model self.Data_conf = hyper_params.dataset self.hyper_params = hyper_params.experiment self.handle_test_size = self.hyper_params['handle_test_size'] self.test_mod_size = self.hyper_params['test_mod_size'] self.model_save_dir = self.hyper_params['model_save_dir'] self.gpuID=self.hyper_params['device_ids'][0] self.iamge_path=iamge_path self.model = eval(self.Model_conf.name)(self.Model_conf) state_dict = torch.load(self.model_save_dir+self.hyper_params["best_model"]+'.pth', map_location="cuda:"+str(self.gpuID)) from collections import OrderedDict tmp=self.model_save_dir+self.hyper_params["best_model"]+'.pth' if tmp.find('checkpoint') >=0: state_dict = state_dict['model_state_dict'] print(' load checkpoint ' ) new_state_dict = OrderedDict() for k, v in state_dict.items(): if k.find('module') >= 0: name = k[7:] # remove 'module' else: name = k new_state_dict[name] = v self.model.load_state_dict(new_state_dict) self.model.cuda(self.gpuID) self.model.eval() # evaladuation mode np.random.seed(seed=self.hyper_params['seed']) # for reproducibility torch.manual_seed(self.hyper_params['seed']) torch.cuda.manual_seed(self.hyper_params['seed']) torch.backends.cudnn.benchmark = True torch.backends.cudnn.enabled = True torch.cuda.set_device(self.gpuID) with torch.no_grad(): print('------------------------') for module in self.model.children(): module.train(False) ####################################################### # LOAD IMAGE if self.hyper_params['color'] == 1: x_original = cv2.imread(self.iamge_path, 0) elif self.hyper_params['color'] == 3: x_original = cv2.imread(self.iamge_path, cv2.IMREAD_UNCHANGED) # BGR or G if self.handle_test_size: x,x_w_pad_size,x_h_pad_size = utils.pad_to_image(x_original,self.test_mod_size) else: x = x_original x =x.astype(np.float32)/255.0 y = x + np.random.normal(0, self.hyper_params['sigma']/255.0, x.shape) # Add Gaussian noise without clipping y = y.astype(np.float32) if y.ndim == 3: y = np.transpose(y, (2,0, 1)) y_ = torch.from_numpy(y).unsqueeze(0) else: y_ = torch.from_numpy(y).view(1, -1, y.shape[0], y.shape[1]) y_ = y_.cuda(self.gpuID) ######################################################### # Inference, convert to uint8 and remove pad x_ = self.model(y_) x_ = utils.tensor2uint(x_) if self.handle_test_size: x_ = utils.shave_pad(x_,x_w_pad_size,x_h_pad_size) self.no_modulated_image = x_ if self.hyper_params['color'] == 3: self.no_modulated_image = cv2.cvtColor(self.no_modulated_image, cv2.COLOR_BGR2RGB) # RGB #---------------------------------------------------------------------------------------------------------------- #-----------------------------------Modulation-------------------------------------- #---------------------------------------------------------------------------------------------------------------- def ex_test(self,e_0=0,e_1=0,e_2=0,e_3=0,e_4=0): np.random.seed(seed=self.hyper_params['seed']) # for reproducibility torch.manual_seed(self.hyper_params['seed']) torch.cuda.manual_seed(self.hyper_params['seed']) torch.backends.cudnn.benchmark = True torch.backends.cudnn.enabled = True torch.cuda.set_device(self.gpuID) ####################################################### # Apply lambdas to the GTL self.model.layers[0].lambdas=np.array([0,0,0,0,0]).astype(np.float32) # init lambda to zero self.model.layers[0].lambdas=np.array([e_0,e_1,e_2,e_3,e_4]).astype(np.float32) # apply user input self.model = self.model.cuda(self.gpuID) self.model.eval() # evaladuation mode with torch.no_grad(): print('------------------------') for module in self.model.children(): module.train(False) ####################################################### # LOAD IMAGE if self.hyper_params['color'] == 1: x_original = cv2.imread(self.iamge_path, 0) elif self.hyper_params['color'] == 3: x_original = cv2.imread(self.iamge_path, cv2.IMREAD_UNCHANGED) # BGR or G if self.handle_test_size: x,x_w_pad_size,x_h_pad_size = utils.pad_to_image(x_original,self.test_mod_size) else: x = x_original x =x.astype(np.float32)/255.0 y = x + np.random.normal(0, self.hyper_params['sigma']/255.0, x.shape) # Add Gaussian noise without clipping y = y.astype(np.float32) if y.ndim == 3: y = np.transpose(y, (2,0, 1)) y_ = torch.from_numpy(y).unsqueeze(0) else: y_ = torch.from_numpy(y).view(1, -1, y.shape[0], y.shape[1]) y_ = y_.cuda(self.gpuID) ######################################################### # Inference, convert to uint8 and remove pad x_ = self.model(y_) x_ = utils.tensor2uint(x_) if self.handle_test_size: x_ = utils.shave_pad(x_,x_w_pad_size,x_h_pad_size) ####################################################### # return image torch.cuda.empty_cache() if self.hyper_params['color'] == 3: x_ = cv2.cvtColor(x_, cv2.COLOR_BGR2RGB) # RGB return display(Image.fromarray(x_.astype('uint8'))), display(Image.fromarray(self.no_modulated_image.astype('uint8'))) return display(Image.fromarray(x_.astype('uint8'), mode='L')),display(Image.fromarray(self.no_modulated_image.astype('uint8'), mode='L'))
<gh_stars>0 /* eslint-disable */ /* tslint:disable */ /** * This is an autogenerated file created by the Stencil compiler. * It contains typing information for all components that exist in this project. */ import { HTMLStencilElement, JSXBase } from "@stencil/core/internal"; export namespace Components { interface ClTwo { /** * Optional value to override `'background-color': 'rgb(250, 235, 215)'` on the content in `slot` "primary-content-panel" */ "primaryBackgroundColor": string; /** * Optional value to override `'background-color': 'rgb(222, 184, 135)'` on the content in `slot` "supplemental-content-panel" */ "supplementalBackgroundColor": string; } } declare global { interface HTMLClTwoElement extends Components.ClTwo, HTMLStencilElement { } var HTMLClTwoElement: { prototype: HTMLClTwoElement; new (): HTMLClTwoElement; }; interface HTMLElementTagNameMap { "cl-two": HTMLClTwoElement; } } declare namespace LocalJSX { interface ClTwo { /** * Optional value to override `'background-color': 'rgb(250, 235, 215)'` on the content in `slot` "primary-content-panel" */ "primaryBackgroundColor"?: string; /** * Optional value to override `'background-color': 'rgb(222, 184, 135)'` on the content in `slot` "supplemental-content-panel" */ "supplementalBackgroundColor"?: string; } interface IntrinsicElements { "cl-two": ClTwo; } } export { LocalJSX as JSX }; declare module "@stencil/core" { export namespace JSX { interface IntrinsicElements { "cl-two": LocalJSX.ClTwo & JSXBase.HTMLAttributes<HTMLClTwoElement>; } } }
class Board: def __init__(self, n): self.n = n self.board = [[None for _ in range(n)] for _ in range(n)] def __getitem__(self, index): return self.board[index] def check_win(self, color): # Check rows and columns for i in range(self.n): row_count = 0 col_count = 0 for j in range(self.n): if self[i][j] == color: row_count += 1 if self[j][i] == color: col_count += 1 if row_count == self.n or col_count == self.n: return True # Check diagonals diag1_count = 0 diag2_count = 0 for i in range(self.n): if self[i][i] == color: diag1_count += 1 if self[i][self.n - 1 - i] == color: diag2_count += 1 if diag1_count == self.n or diag2_count == self.n: return True return False
'use strict'; /* This file contains verifying specs for: https://github.com/sindresorhus/atom-editorconfig/issues/118 */ const fs = require('fs'); const path = require('path'); const testPrefix = path.basename(__filename).split('-').shift(); const projectRoot = path.join(__dirname, 'fixtures'); const filePath = path.join(projectRoot, `test.${testPrefix}`); describe('Issue #118', () => { const textWithoutTrailingWhitespaces = 'I\nam\nProvidence.'; const textWithManyTrailingWhitespaces = 'I \t \nam \t \nProvidence.'; let textEditor; beforeEach('Activating package', async () => { attachToDOM(atom.views.getView(atom.workspace)); await atom.packages.activatePackage(path.join(__dirname, '..')); textEditor = await atom.workspace.open(filePath); }); afterEach(`Removing created fixture: ${filePath}`, () => { if (fs.existsSync(filePath) && fs.statSync(filePath).isFile()) { fs.unlinkSync(filePath); } }); when('Atom is told to remove trailing whitespace', () => { beforeEach(() => { textEditor.getBuffer().editorconfig.settings.trim_trailing_whitespace = true; textEditor.getBuffer().editorconfig.settings.insert_final_newline = false; }); it('strips trailing whitespaces on save', async () => { textEditor.setText(textWithManyTrailingWhitespaces); await textEditor.save(); expect(textEditor.getText()).to.equal(textWithoutTrailingWhitespaces); }); }); });
def filter_by_length(list): """ Filters out strings in the given list that have length 3. """ filtered_list = [word for word in list if len(word) == 3] return filtered_list if __name__ == '__main__': list = ['ada', 'python', 'ruby', 'egg', 'coffee'] print(filter_by_length(list))
import * as React from "react"; import { Callout } from "../components"; import { AppConsumer } from "./AppContext"; const container = () => ( <AppConsumer> {({ showForm, setHeight, getForm }) => { const form = getForm(); const { callout, themeColor } = form; return ( <Callout onSubmit={showForm} setHeight={setHeight} configs={callout || {}} color={themeColor || ""} /> ); }} </AppConsumer> ); export default container;
#!/bin/bash NPM_BIN=$(npm bin) node "$NPM_BIN/webpack" --config ./tools/webpack/webpack.config.babel.js cp ./tools/scripts/es5.js ./es5.js node "$NPM_BIN/babel" src --out-dir lib
class Circle: def __init__(self, radius): self.radius = radius def area(self): return 3.14 * self.radius**2 def circumference(self): return 2 * 3.14 * self.radius
#!/usr/bin/env bash sudo docker build --no-cache -t conanio/gcc7-x86 .
from logml.eda_tools.profiling_tools.utils import EligibleProfilingTools from logml.eda_tools.profiling_tools.cell import Cell @EligibleProfilingTools.register_view class SummaryView: """ Simple dataset summary workflow: - head/tail - simple descriptive statistics - numerical/categorical column lists """ VIEW_ID = 'summary' def __init__(self, cfg: dict): self._cfg = cfg def get_cells(self): # Define required cells (configs). cell_cfgs = [ dict(cell_type='markdown', content='# Summary'), dict(cell_type='markdown', content='### Head'), dict(cell_type='code', content=''' display(df.head()) '''), dict(cell_type='markdown', content='### Tail'), dict(cell_type='code', content=''' display(df.tail()) '''), dict(cell_type='markdown', content='### Summary statistics'), dict(cell_type='code', content=''' display(df.describe()) '''), dict(cell_type='markdown', content='### Numeric columns'), dict(cell_type='code', content=''' sorted(list(df.select_dtypes(include=[np.number]).columns)) '''), dict(cell_type='markdown', content='### Non-numerical (e.g. categorical) columns'), dict(cell_type='code', content=''' sorted(list(df.select_dtypes(include=[np.object]).columns)) ''') ] # Generate actual Jupyter cells. return [ Cell(cell_cfg)() for cell_cfg in cell_cfgs ]
#!/usr/bin/env bash source env/bin/activate python compare_face_detection.py deactivate
<gh_stars>10-100 package io.opensphere.overlay; import java.awt.BorderLayout; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.StringSelection; import java.awt.event.InputEvent; import java.util.function.Supplier; import javax.swing.BorderFactory; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JTextArea; import javax.swing.WindowConstants; import io.opensphere.core.Notify; import io.opensphere.core.UnitsRegistry; import io.opensphere.core.control.DiscreteEventAdapter; import io.opensphere.core.mgrs.MGRSConverter; import io.opensphere.core.mgrs.UTM; import io.opensphere.core.model.GeographicPosition; import io.opensphere.core.model.LatLonAlt; import io.opensphere.core.units.UnitsProvider; import io.opensphere.core.units.angle.Coordinates; import io.opensphere.core.units.angle.DecimalDegrees; import io.opensphere.core.units.angle.DegDecimalMin; import io.opensphere.core.units.angle.DegreesMinutesSeconds; import io.opensphere.core.units.length.Length; import io.opensphere.core.util.swing.EventQueueUtilities; /** Manager for a popup that displays the latest cursor position. */ public class CursorPositionPopupManager { /** An MGRS converter. Package visibility to prevent synthetic accessors. */ static final MGRSConverter MGRS_CONVERTER = new MGRSConverter(); /** * The units registry used to convert between different formats. Package * visibility to prevent synthetic accessors. */ final UnitsRegistry myUnitsRegistry; /** * The location to be displayed by the popup manager. Package visibility to * prevent synthetic accessors. */ LatLonAlt myLocation; /** * Supplier for the dialog parent. Package visibility to prevent synthetic * accessors. */ final Supplier<? extends JFrame> myDialogParentSupplier; /** * A flag to inform the popup that an elevation provider is present. Package * visibility to prevent synthetic accessors. */ boolean myHasElevationProvider; /** The key listener that displays a popup with the cursor position. */ private final DiscreteEventAdapter myPopupListener = new DiscreteEventAdapter("Cursor Position", "Display Cursor Position", "Show a popup with the current mouse cursor position") { /** Counter for mouse position popups. */ private int myCounter; @Override public void eventOccurred(InputEvent event) { if (myLocation != null) { EventQueueUtilities.invokeLater(() -> { JDialog dialog = new JDialog(myDialogParentSupplier.get()); dialog.setTitle("Mouse Position " + ++myCounter); JPanel detailsPanel = new JPanel(new BorderLayout()); detailsPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10)); DecimalDegrees latitudeDD = Coordinates.create(DecimalDegrees.class, myLocation.getLatD()); DecimalDegrees longitudeDD = Coordinates.create(DecimalDegrees.class, myLocation.getLonD()); DegDecimalMin latitudeDDM = Coordinates.create(DegDecimalMin.class, myLocation.getLatD()); DegDecimalMin longitudeDDM = Coordinates.create(DegDecimalMin.class, myLocation.getLonD()); DegreesMinutesSeconds latitudeDMS = Coordinates.create(DegreesMinutesSeconds.class, myLocation.getLatD()); DegreesMinutesSeconds longitudeDMS = Coordinates.create(DegreesMinutesSeconds.class, myLocation.getLonD()); StringBuilder builder = new StringBuilder("DD:\t"); builder.append(latitudeDD.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDD.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); builder.append("DMS:\t"); builder.append(latitudeDMS.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDMS.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); builder.append("DDM:\t"); builder.append(latitudeDDM.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDDM.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); builder.append("MGRS:\t"); builder.append(MGRS_CONVERTER.createString(new UTM(new GeographicPosition(myLocation)))); if (myHasElevationProvider) { UnitsProvider<Length> lengthProvider = myUnitsRegistry.getUnitsProvider(Length.class); Length alt = Length.create(lengthProvider.getPreferredUnits(), myLocation.getAltitude().getMagnitude()); builder.append("\nAlt:\t").append(alt.toShortLabelString(10, 0).trim()); } JTextArea area = new JTextArea(builder.toString()); area.setEditable(false); area.setBorder(BorderFactory.createEmptyBorder()); area.setBackground(detailsPanel.getBackground()); detailsPanel.add(area); dialog.getContentPane().add(detailsPanel, BorderLayout.CENTER); dialog.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); dialog.setLocationRelativeTo(dialog.getParent()); dialog.pack(); dialog.setVisible(true); }); } } }; /** * The key listener that will copy the coordinates to the clipboard in * preferred format. */ private final DiscreteEventAdapter myAltListener = new DiscreteEventAdapter("Cursor Position", "Copy Current Coordinates", "Copies the current coordinates using the preference set in the settings -> coordinates menu.") { private String myCoordLabel; @Override public void eventOccurred(InputEvent event) { if (myLocation != null) { DecimalDegrees latitudeDD = Coordinates.create(DecimalDegrees.class, myLocation.getLatD()); DecimalDegrees longitudeDD = Coordinates.create(DecimalDegrees.class, myLocation.getLonD()); DegDecimalMin latitudeDDM = Coordinates.create(DegDecimalMin.class, myLocation.getLatD()); DegDecimalMin longitudeDDM = Coordinates.create(DegDecimalMin.class, myLocation.getLonD()); DegreesMinutesSeconds latitudeDMS = Coordinates.create(DegreesMinutesSeconds.class, myLocation.getLatD()); DegreesMinutesSeconds longitudeDMS = Coordinates.create(DegreesMinutesSeconds.class, myLocation.getLonD()); StringBuilder builder = new StringBuilder(""); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); switch (myUnitsRegistry.getPreferredUnits(Coordinates.class).getSimpleName()) { case "DegreesMinutesSeconds": builder.append(latitudeDMS.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDMS.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); myCoordLabel = builder.toString(); clipboard.setContents(new StringSelection(myCoordLabel), null); break; case "DegDecimalMin": builder.append(latitudeDDM.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDDM.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); myCoordLabel = builder.toString(); clipboard.setContents(new StringSelection(myCoordLabel), null); break; case "DecimalDegrees": builder.append(latitudeDD.toShortLabelString(14, 6, 'N', 'S').trim()).append("\t"); builder.append(longitudeDD.toShortLabelString(14, 6, 'E', 'W').trim()).append("\n"); myCoordLabel = builder.toString(); clipboard.setContents(new StringSelection(myCoordLabel), null); break; case "MGRS": builder.append(MGRS_CONVERTER.createString(new UTM(new GeographicPosition(myLocation)))); myCoordLabel = builder.toString(); clipboard.setContents(new StringSelection(myCoordLabel), null); break; } Notify.info("Copied " + myCoordLabel + " to clipboard"); } } }; /** * Constructor. * * @param dialogParentSupplier The dialog parent provider. * @param unitsRegistry the registry through which unit information is * obtained. */ public CursorPositionPopupManager(Supplier<? extends JFrame> dialogParentSupplier, UnitsRegistry unitsRegistry) { myDialogParentSupplier = dialogParentSupplier; myUnitsRegistry = unitsRegistry; } /** * Get the listener for mouse events. * * @return The listener. */ public DiscreteEventAdapter getListener() { return myPopupListener; } /** * Gets the alternate more selective coordinate copy listener. * * @return the listener {@link #myAltListener}. */ public DiscreteEventAdapter getAltListener() { return myAltListener; } /** * Stores the supplied value in the {@link #myLocation} field. * * @param location the value to store in the location field. * @param hasElevationProvider a flag used to inform the popup that an * elevation provider is present. */ public void setLocation(LatLonAlt location, boolean hasElevationProvider) { myLocation = location; myHasElevationProvider = hasElevationProvider; } }
import numpy as np import pytest class ShapeError(Exception): pass def loss(batch, target): if not isinstance(batch, dict) or 'imgs' not in batch or 'labels' not in batch: raise ValueError("Input batch should be a dictionary containing 'imgs' and 'labels' keys") imgs = batch['imgs'] labels = batch['labels'] if not isinstance(imgs, np.ndarray) or not isinstance(labels, np.ndarray): raise ValueError("Input images and labels should be numpy arrays") if imgs.shape[0] != labels.shape[0]: raise ShapeError("Number of images and labels should match") if imgs.shape[1:] != labels.shape[1:]: raise ShapeError("Image and label shapes should match") mean_pixel_values = np.mean(imgs, axis=(1, 2, 3)) differences = mean_pixel_values - labels.squeeze() mean_squared_diff = np.mean(differences**2) return mean_squared_diff # Test cases def test_loss_function(): assert loss({'imgs': np.ones((3, 2, 2, 1)), 'labels': np.ones((3, 1))}, np.ones(1)) == 0.0 assert loss({'imgs': np.ones((5, 3, 3, 4)), 'labels': np.ones((5, 1))}, 'any') == 0.0 # Returns 0.0 due to string target with pytest.raises(ShapeError): loss({'imgs': np.ones((3, 5, 2, 1)), 'labels': np.ones((3, 1))}, 'any') # Raises ShapeError due to incompatible shapes
<filename>docs/html/_atom_8h.js var _atom_8h = [ [ "Atom", "class_smol_dock_1_1_atom.html", "class_smol_dock_1_1_atom" ], [ "atomTypeToAtomicRadius", "_atom_8h.html#a9c3abf1e37dc4fe013df80997dedb20b", null ], [ "atomTypeToString", "_atom_8h.html#adc50d67bf7b33de8b30d90f97c25fb24", null ], [ "atomTypeToSymbolString", "_atom_8h.html#abf06ab90cb8bd0858a883be16bb9f563", null ], [ "atomVariantToString", "_atom_8h.html#a506cf373a9b0185cffe654280332e312", null ], [ "operator &", "_atom_8h.html#a3e774ae0fc0908c0ed5dc7e9a1fb349a", null ], [ "operator|", "_atom_8h.html#a5b696009e136f369f88b0ebfc51175bc", null ], [ "stringToAtomType", "_atom_8h.html#ae559278f4b66ec9a09ddfaebb224f897", null ] ];
var _compatibility_tests_8cpp = [ [ "BOOST_AUTO_TEST_CASE", "_compatibility_tests_8cpp.xhtml#a8189edd0b64c34308ac5ae769f5baae3", null ] ];
<reponame>isandlaTech/cohorte-runtime<filename>java/deprecated/ui/org.psem2m.isolates.ui.admin/src/org/psem2m/isolates/ui/admin/impl/EFrameSize.java /** * Copyright 2014 isandlaTech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.psem2m.isolates.ui.admin.impl; /** * @author ogattaz * */ public enum EFrameSize { BOTTOM(EDimension.VERTICAL), HEIGHT(EDimension.VERTICAL), LEFT( EDimension.HORIZONTAL), RIGHT(EDimension.HORIZONTAL), TOP( EDimension.VERTICAL), WIDTH(EDimension.HORIZONTAL); /** * */ private EDimension pDimensionSense; EFrameSize(final EDimension aDimensionSense) { pDimensionSense = aDimensionSense; } /** * @return true if the FramSize is BOTTOM */ public boolean isBottom() { return this == BOTTOM; } /** * @return true if the FramSize is HEIGHT */ public boolean isHeight() { return this == HEIGHT; } /** * @return true if the dimension of this FramSize is horizontal */ public boolean isHorizontally() { return pDimensionSense.isHorizontal(); } /** * @return true if the FramSize is LEFT */ public boolean isLeft() { return this == LEFT; } /** * @return true if the FramSize is RIGHT */ public boolean isRight() { return this == RIGHT; } /** * @return true if the FramSize is TOP */ public boolean isTop() { return this == TOP; } /** * @return true if the dimension of this FramSize is vertical */ public boolean isVertically() { return pDimensionSense.isVertical(); } /** * @return true if the FramSize is WIDTH */ public boolean isWidth() { return this == WIDTH; } }
<reponame>AlissonSteffens/OPKCalculator<gh_stars>0 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package br.univali.visao.panels; import br.univali.model.interpolacao.EquationCalculator; import br.univali.model.minimos_quadrados.MinimosQuadrados; import br.univali.model.minimos_quadrados.Point; import br.univali.visao.JGraph; import br.univali.visao.UI; import br.univali.visao.ViewAdapter; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * * @author Alisson */ public class MinimosQuadradosPanel extends javax.swing.JPanel { /** * Creates new form MinimosQuadradosPanel */ public MinimosQuadradosPanel() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel27 = new javax.swing.JPanel(); jPanel26 = new javax.swing.JPanel(); jPanel23 = new javax.swing.JPanel(); jLabel8 = new javax.swing.JLabel(); tipoFunacaoAproximacao = new javax.swing.JComboBox(); jPanel24 = new javax.swing.JPanel(); grauLabel = new javax.swing.JLabel(); grauPolinomio = new javax.swing.JTextField(); jPanel25 = new javax.swing.JPanel(); jButton6 = new javax.swing.JButton(); jButton3 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); jPanel22 = new javax.swing.JPanel(); jPanel21 = new javax.swing.JPanel(); jLabel5 = new javax.swing.JLabel(); ynsMMQ = new javax.swing.JTextField(); jPanel20 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); xzesMMQ = new javax.swing.JTextField(); jPanel29 = new javax.swing.JPanel(); jPanel28 = new javax.swing.JPanel(); jLabel7 = new javax.swing.JLabel(); mMQFuncaoAproximada = new javax.swing.JTextField(); jScrollPane4 = new javax.swing.JScrollPane(); areaMMQ = new javax.swing.JTextArea(); setBorder(javax.swing.BorderFactory.createEmptyBorder(4, 4, 4, 4)); setLayout(new java.awt.BorderLayout(4, 4)); jPanel27.setLayout(new java.awt.BorderLayout(4, 4)); jPanel26.setLayout(new java.awt.BorderLayout(4, 4)); jPanel23.setLayout(new java.awt.BorderLayout(4, 4)); jLabel8.setText("Tipo de Funçao de Aproximação "); jPanel23.add(jLabel8, java.awt.BorderLayout.WEST); tipoFunacaoAproximacao.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Polinomial", "Geométrica", "ae^bx" })); tipoFunacaoAproximacao.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { tipoFunacaoAproximacaoActionPerformed(evt); } }); jPanel23.add(tipoFunacaoAproximacao, java.awt.BorderLayout.CENTER); jPanel26.add(jPanel23, java.awt.BorderLayout.WEST); jPanel24.setLayout(new java.awt.BorderLayout(4, 4)); grauLabel.setText(" Grau do Polinômio"); jPanel24.add(grauLabel, java.awt.BorderLayout.WEST); jPanel24.add(grauPolinomio, java.awt.BorderLayout.CENTER); jPanel26.add(jPanel24, java.awt.BorderLayout.CENTER); jPanel25.setLayout(new java.awt.BorderLayout(4, 4)); jButton6.setText("Gráfico"); jButton6.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton6ActionPerformed(evt); } }); jPanel25.add(jButton6, java.awt.BorderLayout.WEST); jButton3.setText("Calcular"); jButton3.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton3ActionPerformed(evt); } }); jPanel25.add(jButton3, java.awt.BorderLayout.CENTER); jButton2.setIcon(new javax.swing.ImageIcon(getClass().getResource("/br/univali/resources/interrogation.png"))); // NOI18N jButton2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton2ActionPerformed(evt); } }); jPanel25.add(jButton2, java.awt.BorderLayout.EAST); jPanel26.add(jPanel25, java.awt.BorderLayout.EAST); jPanel27.add(jPanel26, java.awt.BorderLayout.CENTER); jPanel22.setLayout(new java.awt.BorderLayout(4, 4)); jPanel21.setLayout(new java.awt.BorderLayout(4, 4)); jLabel5.setText("Y dos Pontos"); jPanel21.add(jLabel5, java.awt.BorderLayout.WEST); jPanel21.add(ynsMMQ, java.awt.BorderLayout.CENTER); jPanel22.add(jPanel21, java.awt.BorderLayout.SOUTH); jPanel20.setLayout(new java.awt.BorderLayout(4, 4)); jLabel4.setText("X dos Pontos"); jPanel20.add(jLabel4, java.awt.BorderLayout.LINE_START); jPanel20.add(xzesMMQ, java.awt.BorderLayout.CENTER); jPanel22.add(jPanel20, java.awt.BorderLayout.NORTH); jPanel27.add(jPanel22, java.awt.BorderLayout.PAGE_START); add(jPanel27, java.awt.BorderLayout.NORTH); jPanel29.setLayout(new java.awt.BorderLayout(4, 4)); jPanel28.setLayout(new java.awt.BorderLayout(4, 4)); jLabel7.setText("Função Aproximada"); jPanel28.add(jLabel7, java.awt.BorderLayout.WEST); jPanel28.add(mMQFuncaoAproximada, java.awt.BorderLayout.CENTER); jPanel29.add(jPanel28, java.awt.BorderLayout.SOUTH); areaMMQ.setEditable(false); areaMMQ.setColumns(20); areaMMQ.setRows(5); jScrollPane4.setViewportView(areaMMQ); jPanel29.add(jScrollPane4, java.awt.BorderLayout.CENTER); add(jPanel29, java.awt.BorderLayout.CENTER); }// </editor-fold>//GEN-END:initComponents private void tipoFunacaoAproximacaoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_tipoFunacaoAproximacaoActionPerformed if(!tipoFunacaoAproximacao.getSelectedItem().toString().equals("Polinomial")) { grauLabel.setVisible(false); grauPolinomio.setVisible(false); } else { grauLabel.setVisible(true); grauPolinomio.setVisible(true); } }//GEN-LAST:event_tipoFunacaoAproximacaoActionPerformed private void jButton6ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton6ActionPerformed String stringXs=xzesMMQ.getText().toString(); String stringYs=ynsMMQ.getText().toString(); String grau; String tipoAproximacao = tipoFunacaoAproximacao.getSelectedItem().toString(); if(grauPolinomio.isVisible()) { grau=grauPolinomio.getText().toString(); } else { grau="1"; } List<Point> pontos = ViewAdapter.stringsToPoints(stringXs, stringYs); MinimosQuadrados minimosQuadrados = new MinimosQuadrados(pontos,Integer.parseInt(grau), tipoAproximacao); JGraph graph =new JGraph(ViewAdapter.stringsToPoints(stringXs, stringYs),EquationCalculator.calcularFuncao(minimosQuadrados.getList(),pontos.get(0).getX(), pontos.get(pontos.size()-1).getX()+1, 1.0, tipoAproximacao)); try { graph.showHTML(); } catch (Exception ex) { Logger.getLogger(UI.class.getName()).log(Level.SEVERE, null, ex); } }//GEN-LAST:event_jButton6ActionPerformed private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed String stringXs=xzesMMQ.getText().toString(); String stringYs=ynsMMQ.getText().toString(); String grau; String tipoAproximacao = tipoFunacaoAproximacao.getSelectedItem().toString(); if(grauPolinomio.isVisible()) { grau=grauPolinomio.getText().toString(); } else { grau="1"; } MinimosQuadrados minimosQuadrados = new MinimosQuadrados(ViewAdapter.stringsToPoints(stringXs, stringYs),Integer.parseInt(grau), tipoAproximacao); String tesxtoResposta=""; tesxtoResposta+= ViewAdapter.doubleVectorToString(minimosQuadrados.getList()); tesxtoResposta+="\nMatriz Inicial: \n"; tesxtoResposta+=ViewAdapter.matrixToString(minimosQuadrados.getMatrizInicial()); tesxtoResposta+="\nMatriz Escalonada: \n"; tesxtoResposta+=ViewAdapter.matrixToString(minimosQuadrados.getMatrizEscalonada()); areaMMQ.setText(tesxtoResposta); mMQFuncaoAproximada.setText(ViewAdapter.doubleVectorEquation(minimosQuadrados.getList(),tipoAproximacao)); }//GEN-LAST:event_jButton3ActionPerformed private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed String tipoAproximacao = tipoFunacaoAproximacao.getSelectedItem().toString(); if(tipoAproximacao.equals("Polinomial")) { xzesMMQ.setText("-2 -1 0 1 2"); ynsMMQ.setText("0 0 -1 0 7"); grauPolinomio.setText("1"); } else if(tipoAproximacao.equals("Geométrica")){ xzesMMQ.setText("0,1 0,5 1 2 3"); ynsMMQ.setText("0,005 0,5 4 30 110"); } else if(tipoAproximacao.equals("ae^bx")){ xzesMMQ.setText("2 5 8 11 14 17 27 31 41 44"); ynsMMQ.setText("94,8 89,7 81,3 74,9 68,7 64,0 49,3 44,0 39,1 31,6"); } }//GEN-LAST:event_jButton2ActionPerformed @Override public String toString() { String out = ""; out+= "X: "+xzesMMQ.getText(); out+= "\nY: "+ ynsMMQ.getText(); out+= "\nTipo: "+ tipoFunacaoAproximacao.getSelectedItem().toString(); out+= "\nGrau: "+ grauLabel.getText(); out+= "\nSolução: "+ areaMMQ.getText(); out+= "\nFunção Aproximada: "+ mMQFuncaoAproximada.getText(); out = out.replace(".", ","); return out; } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JTextArea areaMMQ; private javax.swing.JLabel grauLabel; private javax.swing.JTextField grauPolinomio; private javax.swing.JButton jButton2; private javax.swing.JButton jButton3; private javax.swing.JButton jButton6; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JPanel jPanel20; private javax.swing.JPanel jPanel21; private javax.swing.JPanel jPanel22; private javax.swing.JPanel jPanel23; private javax.swing.JPanel jPanel24; private javax.swing.JPanel jPanel25; private javax.swing.JPanel jPanel26; private javax.swing.JPanel jPanel27; private javax.swing.JPanel jPanel28; private javax.swing.JPanel jPanel29; private javax.swing.JScrollPane jScrollPane4; private javax.swing.JTextField mMQFuncaoAproximada; private javax.swing.JComboBox tipoFunacaoAproximacao; private javax.swing.JTextField xzesMMQ; private javax.swing.JTextField ynsMMQ; // End of variables declaration//GEN-END:variables }
/** * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import("sessions"); import("stringutils.randomHash"); import("funhtml.*"); import("etherpad.log"); import("etherpad.globals.*"); import("etherpad.pro.pro_utils"); import("etherpad.utils.*"); import("cache_utils.syncedWithCache"); jimport("java.lang.System.out.println"); var _TRACKING_COOKIE_NAME = "ET"; var _SESSION_COOKIE_NAME = "ES"; function _updateInitialReferrer(data) { if (data.initialReferer) { return; } var ref = request.headers["Referer"]; if (!ref) { return; } if (ref.indexOf('http://'+request.host) == 0) { return; } if (ref.indexOf('https://'+request.host) == 0) { return; } data.initialReferer = ref; log.custom("referers", {referer: ref}); } function _getScopedDomain(subDomain) { var d = request.domain; if (d.indexOf(".") == -1) { // special case for "localhost". For some reason, firefox does not like cookie domains // to be ".localhost". return undefined; } if (subDomain) { d = subDomain + "." + d; } return "." + d; } //-------------------------------------------------------------------------------- // pass in subDomain to get the session data for a particular subdomain -- // intended for debugging. function getSession(subDomain) { var sessionData = sessions.getSession({ cookieName: _SESSION_COOKIE_NAME, domain: _getScopedDomain(subDomain) }); _updateInitialReferrer(sessionData); return sessionData; } function getSessionId() { return sessions.getSessionId(_SESSION_COOKIE_NAME, false, _getScopedDomain()); } function _getGlobalSessionId() { return (request.isDefined && request.cookies[_SESSION_COOKIE_NAME]) || null; } function isAnEtherpadAdmin() { var sessionId = _getGlobalSessionId(); if (! sessionId) { return false; } return syncedWithCache("isAnEtherpadAdmin", function(c) { return !! c[sessionId]; }); } function setIsAnEtherpadAdmin(v) { var sessionId = _getGlobalSessionId(); if (! sessionId) { return; } syncedWithCache("isAnEtherpadAdmin", function(c) { if (v) { c[sessionId] = true; } else { delete c[sessionId]; } }); } //-------------------------------------------------------------------------------- function setTrackingCookie() { if (request.cookies[_TRACKING_COOKIE_NAME]) { return; } var trackingVal = randomHash(16); var expires = new Date(32503708800000); // year 3000 response.setCookie({ name: _TRACKING_COOKIE_NAME, value: trackingVal, path: "/", domain: _getScopedDomain(), expires: expires }); } function getTrackingId() { // returns '-' if no tracking ID (caller can assume) return (request.cookies[_TRACKING_COOKIE_NAME] || response.getCookie(_TRACKING_COOKIE_NAME) || '-'); } //-------------------------------------------------------------------------------- function preRequestCookieCheck() { if (isStaticRequest()) { return; } // If this function completes without redirecting, then it means // there is a valid session cookie and tracking cookie. if (request.cookies[_SESSION_COOKIE_NAME] && request.cookies[_TRACKING_COOKIE_NAME]) { if (request.params.cookieShouldBeSet) { response.redirect(qpath({cookieShouldBeSet: null})); } return; } // Only superdomains can set cookies. var isSuperdomain = SUPERDOMAINS[request.domain]; if (isSuperdomain) { // superdomain without cookies getSession(); setTrackingCookie(); // check if we need to redirect back to a subdomain. if ((request.path == "/") && (request.params.setCookie) && (request.params.contUrl)) { var contUrl = request.params.contUrl; if (contUrl.indexOf("?") == -1) { contUrl += "?"; } contUrl += "&cookieShouldBeSet=1"; response.redirect(contUrl); } } else { var parts = request.domain.split("."); if (parts.length < 3) { // invalid superdomain response.write("invalid superdomain"); response.stop(); } // subdomain without cookies if (request.params.cookieShouldBeSet) { log.warn("Cookie failure!"); renderFramedHtml(DIV({style: "border: 1px solid #ccc; padding: 1em; width: 600px; margin: 1em auto; font-size: 1.4em;"}, P("Please enable cookies in your browser in order to access this site."), BR(), P(A({href: "/"}, "Continue")))); response.stop(); } else { var contUrl = request.url; var p = request.host.split(':')[1]; p = (p ? (":"+p) : ""); response.redirect(request.scheme+"://"+pro_utils.getRequestSuperdomain()+p+ "/?setCookie=1&contUrl="+encodeURIComponent(contUrl)); } } }
/* Copyright (c) 2012, Yahoo! Inc. All rights reserved. Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms. */ var nopt = require('nopt'), path = require('path'), fs = require('fs'), Collector = require('../collector'), formatOption = require('../util/help-formatter').formatOption, util = require('util'), utils = require('../object-utils'), filesFor = require('../util/file-matcher').filesFor, Command = require('./index'); function CheckCoverageCommand() { Command.call(this); } CheckCoverageCommand.TYPE = 'check-coverage'; util.inherits(CheckCoverageCommand, Command); Command.mix(CheckCoverageCommand, { synopsis: function () { return "checks overall coverage against thresholds from coverage JSON files. Exits 1 if thresholds are not met, 0 otherwise"; }, usage: function () { util.error('\nUsage: ' + this.toolName() + ' ' + this.type() + ' <options> [<include-pattern>]\n\nOptions are:\n\n' + [ formatOption('--statements <threshold>', 'statement coverage threshold'), formatOption('--functions <threshold>', 'function coverage threshold'), formatOption('--branches <threshold>', 'branch coverage threshold'), formatOption('--lines <threshold>', 'line coverage threshold') ].join('\n\n') + '\n'); util.error('\n\n'); util.error('Thresholds, when specified as a positive number are taken to be the minimum percentage required.'); util.error('When a threshold is specified as a negative number it represents the maximum number of uncovered entities allowed.\n'); util.error('For example, --statements 90 implies minimum statement coverage is 90%.'); util.error(' --statements -10 implies that no more than 10 uncovered statements are allowed\n'); util.error('<include-pattern> is a fileset pattern that can be used to select one or more coverage files ' + 'for merge. This defaults to "**/coverage*.json"'); util.error('\n'); }, run: function (args, callback) { var config = { root: path, dir: path, statements: Number, lines: Number, branches: Number, functions: Number, verbose: Boolean }, opts = nopt(config, { v : '--verbose' }, args, 0), includePattern = '**/coverage*.json', root, collector = new Collector(), errors = []; if (opts.argv.remain.length > 0) { includePattern = opts.argv.remain[0]; } root = opts.root || process.cwd(); filesFor({ root: root, includes: [ includePattern ] }, function (err, files) { if (err) { throw err; } files.forEach(function (file) { var coverageObject = JSON.parse(fs.readFileSync(file, 'utf8')); collector.add(coverageObject); }); var thresholds = { statements: opts.statements || 0, branches: opts.branches || 0, lines: opts.lines || 0, functions: opts.functions || 0 }, actuals = utils.summarizeCoverage(collector.getFinalCoverage()); if (opts.verbose) { console.log('Compare actuals against thresholds'); console.log(JSON.stringify({ actuals: actuals, thresholds: thresholds }, undefined, 4)); } Object.keys(thresholds).forEach(function (key) { var actual = actuals[key].pct, actualUncovered = actuals[key].total - actuals[key].covered, threshold = thresholds[key]; if (threshold < 0) { if (threshold * -1 < actualUncovered) { errors.push('ERROR: Uncovered count for ' + key + ' (' + actualUncovered + ') exceeds threshold (' + -1 * threshold + ')'); } } else { if (actual < threshold) { errors.push('ERROR: Coverage for ' + key + ' (' + actual + '%) does not meet threshold (' + threshold + '%)'); } } }); return callback(errors.length === 0 ? null : errors.join("\n")); }); } }); module.exports = CheckCoverageCommand;
function getEmailVerificationMessage($email) { if (!$email) { return "Invalid email provided."; } $user = $this->getUserInstance(); // Assuming $this refers to the current class instance if ($user->isVerified($email)) { return "Your email is already verified. Just go ahead and log in to your account."; } else { return "Please check your email to verify your account."; } }
<reponame>coffeeandhops/spree_wholesale #insert_before :account_my_orders, 'hooks/wholesale_customer' Deface::Override.new(:virtual_path => 'spree/users/show', :name => 'wholesale-my-orders', :insert_before => "[data-hook='account_my_orders'], #account_my_orders[data-hook]", :partial => "spree/hooks/wholesale_customer", :disabled => false)
var portal = function(args) { this.x = args.x; this.y = args.y; this.direction = args.direction; this.x1 = args.x1; this.y1 = args.y1; }; module.exports = portal;
#!/usr/bin/env bash set -eo pipefail postTagSystemRoot=$(cd "$(dirname "$0")" && pwd) cd "$postTagSystemRoot" lsfilesOptions=( --cached --others # untracked files --exclude-standard # exclude .gitignore '*' ':(exclude)*.png' ':(exclude)Dependencies/*' ':(exclude)libPostTagSystem/WolframHeaders/*' ':(exclude)*.xcodeproj/*' # Xcode manages these automatically # data files ':(exclude)*.postcrib' ':(exclude)*.postinit' ':(exclude)*.postresult' ) mapfile -t filesToLint < <(LC_ALL=C comm -13 <(git ls-files --deleted) <(git ls-files "${lsfilesOptions[@]}")) formatInPlace=0 for arg in "$@"; do case $arg in -i) formatInPlace=1 shift ;; *) echo "Argument $arg is not recognized." echo echo "Usage: ./lint.sh [-i]" echo "Analyze the C++ code with clang-format and cpplint." echo echo "Options:" echo " -i Inplace edit files with clang-format." exit 1 ;; esac done exitStatus=0 for file in "${filesToLint[@]}"; do if [[ "$file" == *.cpp || "$file" == *.hpp || "$file" == *.h ]]; then cppFiles+=("$file") elif grep -rIl '^#![[:blank:]]*/usr/bin/env bash' "$file" >/dev/null; then # Some bash files don't use .sh extension, so find by shebang bashFiles+=("$file") elif [[ "$file" == *.md ]]; then markdownFiles+=("$file") else remainingFiles+=("$file") fi done # Formatting red="\\\033[0;31m" green="\\\033[0;32m" endColor="\\\033[0m" function formatWithCommand() { local command="$1" local file="$2" diff=$(diff -U0 --label "$file" "$file" --label formatted <("$command" "$file") || :) if [ $formatInPlace -eq 1 ]; then "$command" -i "$file" fi if [[ -n "$diff" ]]; then echo -e "$(echo -e "$diff\n\n" | sed "s|^-|$red-|g" | sed "s|^+|$green+|g" | sed "s|$|$endColor|g")" exitStatus=1 fi } for file in "${cppFiles[@]}"; do formatWithCommand clang-format "$file" done for file in "${remainingFiles[@]}"; do formatWithCommand ./scripts/whitespaceFormat.sh "$file" done if [ $exitStatus -eq 1 ]; then echo "Found formatting errors. Run ./lint.sh -i to automatically fix by applying the printed patch." fi # Linting for file in "${cppFiles[@]}"; do cpplint --quiet --extensions=hpp,cpp "$file" || exitStatus=1 done widthLimit=120 checkLineWidthOutput=$( for file in "${remainingFiles[@]}" "${bashFiles[@]}"; do ./scripts/checkLineWidth.sh "$file" "$widthLimit" done ) if [ -n "$checkLineWidthOutput" ]; then exitStatus=1 echo "Found lines exceeding the maximum allowed length of $widthLimit:" echo "$checkLineWidthOutput" fi exit $exitStatus
#!/bin/bash # ============================================================================ # File : bcvTally.bash # Project : BibleVox # Date : 2016.06.24 # Author : MEAdams # Purpose : scan bible text and create a book, chapter and verse lookup table # --------:------------------------------------------------------------------- # Depends : 1. Either pre-extracted Bible text file or diatheke application # --------:------------------------------------------------------------------- # Notes & : 1. The number of books, their names, numbers of chapters and # Assumes : verses can differ among bible translations, printed texts and # : electronic texts. This script produces a precise table to be # : associated with a specific electronic rendering of the bible. # : Any bible text accessible to the CrossWire "diatheke" command # : line SWORD project frontend can be scanned using this script # : to produce a table suitable for use within software needing # : access to such information. # --------:------------------------------------------------------------------- # To Do : 1. # ============================================================================ # Script name scr=$(basename "$0") # Load utility helper scripts source ../scrhlp.bash > /dev/null 2>&1 if [ $? -gt 0 ]; then echo "ABORT: ${scr} can't find scrhlp.bash"; exit 1; fi # User help usage() { printf "${_eko}" " Usage: ./${scr} txtFile Where: prjName = diatheke -b module name argument (e.g. ESV, KJV) \n" \ 1>&2; exit 1; } if [ -z "${1}" ]; then usage; fi # Text file base name argument PRJ="${1}" # initializations BCV="./${PRJ}bcv.menu" LOG="./${PRJ}tmp.log" TXT="../Texts/${PRJ}.copyrighted" fmt="%-20s%-4d%s\n" # clear any old files for a new run cat /dev/null > "${BCV}" cat /dev/null > "${LOG}" # retrieve all book names and chapter numbers for requested translation # Alternately can be done with diatheke if text file not available (slower) # diatheke -b ${PRJ} -k Genesis - | \ cat "${TXT}" | \ sed -e 's/^III /3 /' -e 's/^II /2 /' -e 's/^I /1 /' | \ grep -o -G '^[1-3]* *[a-Z ]* [0-9]*:[0-9]*' > "${LOG}" oldbook="" oldchap="" oldvers="" versarr="" passflg="" while read line || [[ -n "$line" ]]; do chppart=$(echo `expr "$line" : '^[1-3]* *[a-Z ]*'`) verpart=$(echo `expr "$line" : '^.*[0-9]*:'`) chpwdth=$(( $verpart - $chppart -1 )) newbook=$( echo $(echo `expr "$line" : '^\([1-3]* *[a-Z ]*\)'`) | \ sed 's/ //g') newchap=$(echo ${line:$chppart:$chpwdth}) newvers=$(echo ${line:$verpart}) # get book name if [[ "${newbook}" = "${oldbook}" ]] then # get number of chapters in book if [[ ${newchap} -lt ${oldchap} ]] then oldchap=0 else # get number of verses in chapter if [[ ${newvers} -lt ${oldvers} ]] then versarr="${versarr} ${oldvers}" oldvers=0 else oldvers="${newvers}" fi oldchap="${newchap}" fi else versarr="${versarr} ${oldvers}" [[ ${passflg} -eq 1 ]] && \ printf "$fmt" "${oldbook}" "${oldchap}" "${versarr}" >> "${BCV}" ! [[ ${newbook} = "" ]] && oldbook="${newbook}" ! [[ ${newchap} = "" ]] && oldchap="${newchap}" ! [[ ${newvers} = "" ]] && oldvers="${newvers}" versarr="" passflg=1 fi done < "${LOG}" # program logic holds last results after EOF, so print it as well [[ ${?} -eq 0 ]] && rm "${LOG}" printf "$fmt" "${oldbook}" "${oldchap}" "${versarr} ${oldvers}" >> "${BCV}"
package com.iterlife.zeus.spring.core; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URL; public interface Resource extends InputStreamSource { boolean isExist(); boolean isReadable(); boolean isOpen(); URL getURL() throws IOException; URI getURI() throws IOException; File getFile() throws IOException; long getContentLength() throws IOException; long lastModified() throws IOException; String getFileName(); String getDescription(); File createRelativeFile(String relativePath) throws IOException; }