text
stringlengths
3
1.05M
const WOQLTableConfig = require("./tableConfig"); const UTILS = require('../utils'); const WOQLRule = require('../woqlRule'); const WOQLResult = require('../woqlResult'); /** * @file WOQL Table * @license Apache Version 2 */ function WOQLTable(client, config){ this.client = client; this.config = (config ? config : new WOQLTableConfig()); return this; } WOQLTable.prototype.options = function(config){ this.config = config; return this; } WOQLTable.prototype.setResult = function(res){ this.result = res; return this; } WOQLTable.prototype.count = function(){ return this.result.count(); } WOQLTable.prototype.first = function(){ return this.result.first(); } WOQLTable.prototype.prev = function(){ return this.result.prev(); } WOQLTable.prototype.next = function(){ return this.result.next(); } WOQLTable.prototype.canAdvancePage = function(){ return (this.result.count() == this.result.query.getLimit()); } WOQLTable.prototype.canChangePage = function(){ return this.canAdvancePage() || this.canRetreatPage(); } WOQLTable.prototype.canRetreatPage = function(){ return (this.result.query.getPage() > 1); } WOQLTable.prototype.getPageSize = function(){ return this.result.query.getLimit() ; } WOQLTable.prototype.setPage = function(l){ return this.result.query.setPage(l); } WOQLTable.prototype.getPage = function(){ return this.result.query.getPage(); } WOQLTable.prototype.setPageSize = function(l){ return this.update(this.result.query.setPageSize(l)); } WOQLTable.prototype.nextPage = function(){ return this.update(this.result.query.nextPage()); } WOQLTable.prototype.firstPage = function(){ return this.update(this.result.query.firstPage()); } WOQLTable.prototype.previousPage = function(){ return this.update(this.result.query.previousPage()); } WOQLTable.prototype.getColumnsToRender = function(){ if(this.hasColumnOrder()){ var cols = this.getColumnOrder(); } else if(this.result.query.hasSelect()){ var cols = this.result.query.getSelectVariables(); } else { var cols = this.result.getVariableList(); } var self = this; return (cols ? cols.filter(col => !self.hidden(col)) : []); } WOQLTable.prototype.getColumnHeaderContents = function(colid){ colid = UTILS.addNamespaceToVariable(colid); let hr = new WOQLRule().matchColumn(this.config.rules, colid, "header"); if(hr && hr.length){ let h = hr[hr.length-1].rule.header; if(typeof h == "string"){ return document.createTextNode(h); } else if(typeof h == "function"){ return h(colid); } else return h; } var clab = UTILS.labelFromURL(colid); return document.createTextNode(clab); } WOQLTable.prototype.hidden = function(col){ colid = UTILS.addNamespaceToVariable(col); let matched_rules = new WOQLRule().matchColumn(this.config.rules, colid, "hidden"); if(matched_rules.length){ return matched_rules[matched_rules.length-1].rule.hidden; } return false; } /** * Called when you want to change the query associated with the table. */ WOQLTable.prototype.update = function(nquery){ return nquery.execute(this.client).then((results) => { var nresult = new WOQLResult(results, nquery); this.setResult(nresult); if(this.notify) this.notify(nresult); return nresult; }); } WOQLTable.prototype.hasDefinedEvent = function(row, key, scope, action, rownum){ if(scope == "row"){ var matched_rules = new WOQLRule().matchRow(this.config.rules, row, this.result.cursor, action); } else { var matched_rules = new WOQLRule().matchCell(this.config.rules, row, key, this.result.cursor, action); } if(matched_rules && matched_rules.length) return true; return false; } WOQLTable.prototype.getDefinedEvent = function(row, key, scope, action, rownum){ if(scope == "row"){ var matched_rules = new WOQLRule().matchRow(this.config.rules, row, this.result.cursor, action); } else { var matched_rules = new WOQLRule().matchCell(this.config.rules, row, key, this.result.cursor, action); } if(matched_rules && matched_rules.length) { var l = (matched_rules.length - 1); return matched_rules[l].rule[action]; } } WOQLTable.prototype.getRowClick = function(row){ let re = this.getDefinedEvent(row, false, "row", "click"); return re; } WOQLTable.prototype.getCellClick = function(row, col){ let cc = this.getDefinedEvent(row, col, "column", "click"); return cc; } WOQLTable.prototype.getRowHover = function(row){ return this.getDefinedEvent(row, false, "row", "hover"); } WOQLTable.prototype.getCellHover = function(row, key){ return this.getDefinedEvent(row, key, "column", "hover"); } WOQLTable.prototype.getColumnOrder = function(){ return this.config.column_order(); } WOQLTable.prototype.hasColumnOrder = WOQLTable.prototype.getColumnOrder; WOQLTable.prototype.hasCellClick = WOQLTable.prototype.getCellClick; WOQLTable.prototype.hasRowClick = WOQLTable.prototype.getRowClick; WOQLTable.prototype.hasCellHover = WOQLTable.prototype.getCellHover; WOQLTable.prototype.hasRowHover = WOQLTable.prototype.getRowHover; WOQLTable.prototype.getRenderer = function(key, row, rownum){ var rend = this.getSpecificRender(key, row); if(rend) return rend; let renderer = this.getDefinedEvent(row, key, "column", "renderer", rownum); let args = this.getDefinedEvent(row, key, "column", "args", rownum); if(!renderer){ let r = this.getRendererForDatatype(row[key]); renderer = r.name; if(!args) args = r.args; } if(renderer){ return this.datatypes.createRenderer(renderer, args); } } WOQLTable.prototype.renderValue = function(renderer, val, key, row){ if(val && val['@type']){ renderer.type = val['@type']; var dv = new DataValue(val['@value'], val['@type'], key, row); } else if(val && val['@language']){ renderer.type = "xsd:string"; var dv = new DataValue(val['@value'], renderer.type, key, row); } else if(val && typeof val == "string"){ renderer.type = "id"; var dv = new DataValue(val, "id", key, row); } if(dv) return renderer.renderValue(dv); return ""; } function DataValue(val, type){ this.datavalue = (val == "unknown" ? "" : val); this.datatype = type; } DataValue.prototype.value = function(nvalue){ if(nvalue) { this.datavalue = nvalue; return this; } return this.datavalue; } WOQLTable.prototype.getRendererForDatatype = function(val){ if(val && val['@type']){ return this.datatypes.getRenderer(val['@type'], val['@value']); } else if(val && val['@language']){ return this.datatypes.getRenderer("xsd:string", val['@value']); } else if(val && typeof val == "string"){ return this.datatypes.getRenderer("id", val); } return false; } WOQLTable.prototype.getSpecificRender = function(key, row){ let rend = this.getDefinedEvent(row, key, "column", "render"); return rend; } module.exports = WOQLTable;
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Upgrader for Python scripts from 1.* TensorFlow to 2.0 TensorFlow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse from tensorflow.tools.compatibility import ast_edits from tensorflow.tools.compatibility import renames_v2 class TFAPIChangeSpec(ast_edits.APIChangeSpec): """List of maps that describe what changed in the API.""" def __init__(self): # Maps from a function name to a dictionary that describes how to # map from an old argument keyword to the new argument keyword. self.function_keyword_renames = { "tf.expand_dims": { "dim": "axis", }, "tf.convert_to_tensor": { "preferred_dtype": "dtype_hint" }, "tf.math.count_nonzero": { "input_tensor": "input", "keep_dims": "keepdims", "reduction_indices": "axis", }, "tf.nn.pool": { "dilation_rate": "dilations" }, "tf.nn.separable_conv2d": { "rate": "dilations" }, "tf.nn.sufficient_statistics": { "keep_dims": "keepdims" }, "tf.debugging.assert_all_finite": { "t": "x", "msg": "message", }, "tf.sparse.split": { "split_dim": "axis", }, "tf.multinomial": { "output_dtype": "dtype", }, "tf.random.multinomial": { "output_dtype": "dtype", }, "tf.nn.conv3d": { "filter": "filters" }, "tf.nn.conv3d_transpose": { "value": "input", "filter": "filters", }, "tf.nn.convolution": { "filter": "filters", "dilation_rate": "dilations", }, "tf.gfile.Exists": { "filename": "path", }, "tf.random.stateless_multinomial": { "output_dtype": "dtype", }, } # Mapping from function to the new name of the function self.symbol_renames = renames_v2.renames # pylint: disable=line-too-long # Add additional renames not in renames_v2.py here. # IMPORTANT: For the renames in here, if you also need to add to # function_reorders or function_keyword_renames, use the OLD function name. # These renames happen after the arguments have been processed. self.symbol_renames.update({ "tf.contrib.data.AUTOTUNE": "tf.data.experimental.AUTOTUNE", "tf.contrib.data.Counter": "tf.data.experimental.Counter", "tf.contrib.data.CheckpointInputPipelineHook": "tf.data.experimental.CheckpointInputPipelineHook", "tf.contrib.data.CsvDataset": "tf.data.experimental.CsvDataset", "tf.contrib.data.Optional": "tf.data.experimental.Optional", "tf.contrib.data.RandomDataset": "tf.data.experimental.RandomDataset", "tf.contrib.data.Reducer": "tf.data.experimental.Reducer", "tf.contrib.data.SqlDataset": "tf.data.experimental.SqlDataset", "tf.contrib.data.StatsAggregator": "tf.data.experimental.StatsAggregator", "tf.contrib.data.TFRecordWriter": "tf.data.experimental.TFRecordWriter", "tf.contrib.data.assert_element_shape": "tf.data.experimental.assert_element_shape", "tf.contrib.data.batch_and_drop_remainder": "tf.compat.v1.contrib.data.batch_and_drop_remainder", "tf.contrib.data.bucket_by_sequence_length": "tf.data.experimental.bucket_by_sequence_length", "tf.contrib.data.choose_from_datasets": "tf.data.experimental.choose_from_datasets", "tf.contrib.data.copy_to_device": "tf.data.experimental.copy_to_device", "tf.contrib.data.dense_to_sparse_batch": "tf.data.experimental.dense_to_sparse_batch", "tf.contrib.data.enumerate_dataset": "tf.data.experimental.enumerate_dataset", "tf.contrib.data.get_next_as_optional": "tf.data.experimental.get_next_as_optional", "tf.contrib.data.get_single_element": "tf.data.experimental.get_single_element", "tf.contrib.data.group_by_reducer": "tf.data.experimental.group_by_reducer", "tf.contrib.data.group_by_window": "tf.data.experimental.group_by_window", "tf.contrib.data.ignore_errors": "tf.data.experimental.ignore_errors", "tf.contrib.data.latency_stats": "tf.data.experimental.latency_stats", "tf.contrib.data.make_batched_features_dataset": "tf.data.experimental.make_batched_features_dataset", "tf.contrib.data.make_csv_dataset": "tf.data.experimental.make_csv_dataset", "tf.contrib.data.make_saveable_from_iterator": "tf.data.experimental.make_saveable_from_iterator", "tf.contrib.data.map_and_batch": "tf.data.experimental.map_and_batch", "tf.contrib.data.padded_batch_and_drop_remainder": "tf.compat.v1.contrib.data.padded_batch_and_drop_remainder", "tf.contrib.data.parallel_interleave": "tf.data.experimental.parallel_interleave", "tf.contrib.data.parse_example_dataset": "tf.data.experimental.parse_example_dataset", "tf.contrib.data.prefetch_to_device": "tf.data.experimental.prefetch_to_device", "tf.contrib.data.read_batch_features": "tf.compat.v1.contrib.data.read_batch_features", "tf.contrib.data.reduce_dataset": "tf.compat.v1.contrib.data.reduce_dataset", "tf.contrib.data.rejection_resample": "tf.data.experimental.rejection_resample", "tf.contrib.data.sample_from_datasets": "tf.data.experimental.sample_from_datasets", "tf.contrib.data.scan": "tf.data.experimental.scan", "tf.contrib.data.set_stats_aggregator": "tf.data.experimental.set_stats_aggregator", "tf.contrib.data.shuffle_and_repeat": "tf.data.experimental.shuffle_and_repeat", "tf.contrib.data.sliding_window_batch": "tf.compat.v1.contrib.data.sliding_window_batch", "tf.contrib.data.sloppy_interleave": "tf.compat.v1.contrib.data.sloppy_interleave", "tf.contrib.data.unbatch": "tf.data.experimental.unbatch", "tf.contrib.data.unique": "tf.data.experimental.unique", "tf.quantize_v2": "tf.quantization.quantize", "tf.sparse_concat": "tf.sparse.concat", "tf.sparse_split": "tf.sparse.split", "tf.multinomial": "tf.random.categorical", "tf.random.multinomial": "tf.random.categorical", "tf.load_file_system_library": "tf.load_library", }) # pylint: enable=line-too-long # For custom behavior and if auto-generate rename in renames_v2.py # is incorrect, add the op name here to exclude it from renames_v2.py. excluded_renames = [ ] # Variables that should be changed to functions. self.change_to_function = {} # Functions that were reordered should be changed to the new keyword args # for safety, if positional arguments are used. If you have reversed the # positional arguments yourself, this could do the wrong thing. # IMPORTANT: order here should correspond to OLD argument order. # We just prepend "arg_name=" to all arguments in function calls. self.function_reorders = { "tf.argmax": ["input", "axis", "name", "dimension", "output_type"], "tf.argmin": ["input", "axis", "name", "dimension", "output_type"], "tf.boolean_mask": ["tensor", "mask", "name", "axis"], "tf.convert_to_tensor": ["value", "dtype", "name", "preferred_dtype"], "tf.nn.convolution": [ "input", "filter", "padding", "strides", "dilation_rate", "name", "data_format"], "tf.nn.crelu": ["features", "name", "axis"], "tf.nn.pool": [ "input", "window_shape", "pooling_type", "padding", "dilation_rate", "strides", "name", "data_format" ], "tf.nn.depthwise_conv2d": [ "input", "filter", "strides", "padding", "rate", "name", "data_format" ], "tf.multinomial": [ "logits", "num_samples", "seed", "name", "output_dtype" ], "tf.random.multinomial": [ "logits", "num_samples", "seed", "name", "output_dtype" ], "tf.pad": ["tensor", "paddings", "mode", "name", "constant_values"], "tf.quantize_v2": [ "input", "min_range", "max_range", "T", "mode", "name", "round_mode" ], "tf.shape": ["input", "name", "out_type"], "tf.size": ["input", "name", "out_type"], "tf.sparse.concat": [ "axis", "sp_inputs", "name", "expand_nonconcat_dim", "concat_dim" ], "tf.random.poisson": ["lam", "shape", "dtype", "seed", "name"], "tf.sparse.segment_mean": [ "data", "indices", "segment_ids", "name", "num_segments" ], "tf.sparse.segment_sqrt_n": [ "data", "indices", "segment_ids", "name", "num_segments" ], "tf.sparse.segment_sum": [ "data", "indices", "segment_ids", "name", "num_segments" ], "tf.strings.length": ["input", "name", "unit"], } # Specially handled functions. self.function_handle = {} decay_function_comment = ( "ERROR: <function name> has been changed to return a callable instead " "of a tensor when graph building, but its functionality remains " "unchanged during eager execution (returns a callable like " "before). The converter cannot detect and fix this reliably, so " "you need to inspect this usage manually.\n" ) # TODO(b/118888586): add default value change to update script. default_loss_reduction_changed = ( "WARNING: default value of loss_reduction has been changed to " "SUM_OVER_BATCH_SIZE.\n" ) assert_return_type_comment = ( "WARNING: assert_* functions have been changed to return None, the " "data argument has been removed, and arguments have been reordered." ) assert_rank_comment = ( "WARNING: assert_rank_* functions have been changed to return None, and" " the data and summarize arguments have been removed." ) # Function warnings. <function name> placeholder inside warnings will be # replaced by function name. self.function_warnings = { "tf.assert_greater": assert_return_type_comment, "tf.assert_equal": assert_return_type_comment, "tf.assert_less": assert_return_type_comment, "tf.assert_rank": assert_rank_comment, "tf.debugging.assert_equal": assert_return_type_comment, "tf.debugging.assert_greater": assert_return_type_comment, "tf.debugging.assert_greater_equal": assert_return_type_comment, "tf.debugging.assert_integer": assert_return_type_comment, "tf.debugging.assert_less": assert_return_type_comment, "tf.debugging.assert_less_equal": assert_return_type_comment, "tf.debugging.assert_near": assert_return_type_comment, "tf.debugging.assert_negative": assert_return_type_comment, "tf.debugging.assert_non_negative": assert_return_type_comment, "tf.debugging.assert_non_positive": assert_return_type_comment, "tf.debugging.assert_none_equal": assert_return_type_comment, "tf.debugging.assert_positive": assert_return_type_comment, "tf.debugging.assert_rank": assert_rank_comment, "tf.debugging.assert_rank_at_least": assert_rank_comment, "tf.debugging.assert_rank_in": assert_rank_comment, "tf.train.exponential_decay": decay_function_comment, "tf.train.piecewise_constant": decay_function_comment, "tf.train.polynomial_decay": decay_function_comment, "tf.train.natural_exp_decay": decay_function_comment, "tf.train.inverse_time_decay": decay_function_comment, "tf.train.cosine_decay": decay_function_comment, "tf.train.cosine_decay_restarts": decay_function_comment, "tf.train.linear_cosine_decay": decay_function_comment, "tf.train.noisy_linear_cosine_decay": decay_function_comment, "tf.estimator.LinearClassifier": default_loss_reduction_changed, "tf.estimator.LinearRegressor": default_loss_reduction_changed, "tf.estimator.DNNLinearCombinedClassifier": default_loss_reduction_changed, "tf.estimator.DNNLinearCombinedRegressor": default_loss_reduction_changed, "tf.estimator.DNNRegressor": default_loss_reduction_changed, "tf.estimator.DNNClassifier": default_loss_reduction_changed, "tf.estimator.BaselineClassifier": default_loss_reduction_changed, "tf.estimator.BaselineRegressor": default_loss_reduction_changed, "tf.nn.conv1d": "WARNING: use_cudnn_on_gpu argument has been removed and \"value\" was " "renamed to \"input\"", "tf.nn.conv2d": "WARNING: use_cudnn_on_gpu argument has been removed and \"filter\" " "was renamed to \"filters\"", "tf.nn.conv2d_backprop_filter": "WARNING: use_cudnn_on_gpu argument has been removed", "tf.nn.conv2d_backprop_input": "WARNING: use_cudnn_on_gpu argument has been removed and \"filter\" " "was renamed to \"filters\"", } # Right now we can't have both a rename and a warning. self.symbol_renames = { name: new_name for name, new_name in self.symbol_renames.items() if name not in self.function_warnings and name not in excluded_renames } if __name__ == "__main__": parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description="""Convert a TensorFlow Python file to 2.0 Simple usage: tf_convert_v2.py --infile foo.py --outfile bar.py tf_convert_v2.py --intree ~/code/old --outtree ~/code/new """) parser.add_argument( "--infile", dest="input_file", help="If converting a single file, the name of the file " "to convert") parser.add_argument( "--outfile", dest="output_file", help="If converting a single file, the output filename.") parser.add_argument( "--intree", dest="input_tree", help="If converting a whole tree of files, the directory " "to read from (relative or absolute).") parser.add_argument( "--outtree", dest="output_tree", help="If converting a whole tree of files, the output " "directory (relative or absolute).") parser.add_argument( "--copyotherfiles", dest="copy_other_files", help=("If converting a whole tree of files, whether to " "copy the other files."), type=bool, default=False) parser.add_argument( "--reportfile", dest="report_filename", help=("The name of the file where the report log is " "stored." "(default: %(default)s)"), default="report.txt") args = parser.parse_args() upgrade = ast_edits.ASTCodeUpgrader(TFAPIChangeSpec()) report_text = None report_filename = args.report_filename files_processed = 0 if args.input_file: if not args.output_file: raise ValueError( "--outfile=<output file> argument is required when converting a " "single file.") files_processed, report_text, errors = upgrade.process_file( args.input_file, args.output_file) files_processed = 1 elif args.input_tree: if not args.output_tree: raise ValueError( "--outtree=<output directory> argument is required when converting a " "file tree.") files_processed, report_text, errors = upgrade.process_tree( args.input_tree, args.output_tree, args.copy_other_files) else: parser.print_help() if report_text: open(report_filename, "w").write(report_text) print("TensorFlow 2.0 Upgrade Script") print("-----------------------------") print("Converted %d files\n" % files_processed) print("Detected %d errors that require attention" % len(errors)) print("-" * 80) print("\n".join(errors)) print("\nMake sure to read the detailed log %r\n" % report_filename)
import { v4 as uuid } from "uuid"; import { UPDATE_AUTH, SET_EXPIRED_SESSION, REMOVE_AUTH, UPDATE_ANILIST, } from "../actions"; // make UUIDs noticeable const genUUID = () => { let id = uuid(); const parts = id.split("-"); parts[1] = "NANI"; return parts.join("-"); }; const initialState = { token: "", expires: 8640000000000000, username: "", guest: true, premium: false, anilist: { username: "", token: "" }, expiredSession: "", uuid: genUUID(), }; export default function Auth(state = initialState, action) { switch (action.type) { case UPDATE_AUTH: return { ...state, ...action.payload, }; case UPDATE_ANILIST: return { ...state, anilist: action.payload, }; case REMOVE_AUTH: return { ...initialState, expiredSession: state.expiredSession, uuid: state.uuid, anilist: state.anilist, }; case SET_EXPIRED_SESSION: return { ...state, expiredSession: action.payload, }; default: return state; } }
# coding: utf-8 """ anchore_engine.services.policy_engine This is a policy evaluation service. It receives push-events from external systems for data updates and provides an api for requesting image policy checks OpenAPI spec version: 1.0.0 Contact: zach@anchore.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class FeedUpdateNotification(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'event_timestamp': 'datetime', 'feed_name': 'str', 'feed_group': 'str', 'data': 'list[object]' } attribute_map = { 'event_timestamp': 'event_timestamp', 'feed_name': 'feed_name', 'feed_group': 'feed_group', 'data': 'data' } def __init__(self, event_timestamp=None, feed_name=None, feed_group=None, data=None): """ FeedUpdateNotification - a model defined in Swagger """ self._event_timestamp = None self._feed_name = None self._feed_group = None self._data = None if event_timestamp is not None: self.event_timestamp = event_timestamp if feed_name is not None: self.feed_name = feed_name if feed_group is not None: self.feed_group = feed_group if data is not None: self.data = data @property def event_timestamp(self): """ Gets the event_timestamp of this FeedUpdateNotification. The time of the external event. Should be set to when the event occurred, to the delivery time :return: The event_timestamp of this FeedUpdateNotification. :rtype: datetime """ return self._event_timestamp @event_timestamp.setter def event_timestamp(self, event_timestamp): """ Sets the event_timestamp of this FeedUpdateNotification. The time of the external event. Should be set to when the event occurred, to the delivery time :param event_timestamp: The event_timestamp of this FeedUpdateNotification. :type: datetime """ self._event_timestamp = event_timestamp @property def feed_name(self): """ Gets the feed_name of this FeedUpdateNotification. :return: The feed_name of this FeedUpdateNotification. :rtype: str """ return self._feed_name @feed_name.setter def feed_name(self, feed_name): """ Sets the feed_name of this FeedUpdateNotification. :param feed_name: The feed_name of this FeedUpdateNotification. :type: str """ self._feed_name = feed_name @property def feed_group(self): """ Gets the feed_group of this FeedUpdateNotification. :return: The feed_group of this FeedUpdateNotification. :rtype: str """ return self._feed_group @feed_group.setter def feed_group(self, feed_group): """ Sets the feed_group of this FeedUpdateNotification. :param feed_group: The feed_group of this FeedUpdateNotification. :type: str """ self._feed_group = feed_group @property def data(self): """ Gets the data of this FeedUpdateNotification. :return: The data of this FeedUpdateNotification. :rtype: list[object] """ return self._data @data.setter def data(self, data): """ Sets the data of this FeedUpdateNotification. :param data: The data of this FeedUpdateNotification. :type: list[object] """ self._data = data def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, FeedUpdateNotification): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
/** * Add collapseable boxes to our editor screens. */ postboxes.add_postbox_toggles(pagenow); /** * The rest of our customizations. */ (function($) { if ('edit' === getParameterByName('action')) { // Store our original slug on page load for edit checking. var original_slug = $('#name').val(); } // Switch to newly selected post type or taxonomy automatically. $('#post_type').on('change',function(){ $('#cptui_select_post_type').submit(); }); $('#taxonomy').on('change',function(){ $( '#cptui_select_taxonomy' ).submit(); }); // Confirm our deletions $('#cpt_submit_delete').on('click',function() { if ( confirm( cptui_type_data.confirm ) ) { return true; } return false; }); // Toggles help/support accordions. $('#support .question').each(function() { var tis = $(this), state = false, answer = tis.next('div').slideUp(); tis.on('click keydown',function(e) { // Helps with accessibility and keyboard navigation. if(e.type==='keydown' && e.keyCode!==32 && e.keyCode!==13) { return; } e.preventDefault(); state = !state; answer.slideToggle(state); tis.toggleClass('active',state); tis.attr('aria-expanded', state.toString() ); tis.focus(); }); }); // Switch spaces for underscores on our slug fields. $('#name').on('keyup',function(e){ var value, original_value; value = original_value = $(this).val(); if ( e.keyCode !== 9 && e.keyCode !== 37 && e.keyCode !== 38 && e.keyCode !== 39 && e.keyCode !== 40 ) { value = value.replace(/ /g, "_"); value = value.toLowerCase(); value = replaceDiacritics(value); value = transliterate(value); value = replaceSpecialCharacters(value); if ( value !== original_value ) { $(this).attr('value', value); } } //Displays a message if slug changes. if(undefined != original_slug) { var $slugchanged = $('#slugchanged'); if(value != original_slug) { $slugchanged.removeClass('hidemessage'); } else { $slugchanged.addClass('hidemessage'); } } }); // Replace diacritic characters with latin characters. function replaceDiacritics(s) { var diacritics = [ /[\300-\306]/g, /[\340-\346]/g, // A, a /[\310-\313]/g, /[\350-\353]/g, // E, e /[\314-\317]/g, /[\354-\357]/g, // I, i /[\322-\330]/g, /[\362-\370]/g, // O, o /[\331-\334]/g, /[\371-\374]/g, // U, u /[\321]/g, /[\361]/g, // N, n /[\307]/g, /[\347]/g // C, c ]; var chars = ['A', 'a', 'E', 'e', 'I', 'i', 'O', 'o', 'U', 'u', 'N', 'n', 'C', 'c']; for (var i = 0; i < diacritics.length; i++) { s = s.replace(diacritics[i], chars[i]); } return s; } function replaceSpecialCharacters(s) { s = s.replace(/[^a-z0-9\s]/gi, '_'); return s; } var cyrillic = { "Ё": "YO", "Й": "I", "Ц": "TS", "У": "U", "К": "K", "Е": "E", "Н": "N", "Г": "G", "Ш": "SH", "Щ": "SCH", "З": "Z", "Х": "H", "Ъ": "'", "ё": "yo", "й": "i", "ц": "ts", "у": "u", "к": "k", "е": "e", "н": "n", "г": "g", "ш": "sh", "щ": "sch", "з": "z", "х": "h", "ъ": "'", "Ф": "F", "Ы": "I", "В": "V", "А": "a", "П": "P", "Р": "R", "О": "O", "Л": "L", "Д": "D", "Ж": "ZH", "Э": "E", "ф": "f", "ы": "i", "в": "v", "а": "a", "п": "p", "р": "r", "о": "o", "л": "l", "д": "d", "ж": "zh", "э": "e", "Я": "Ya", "Ч": "CH", "С": "S", "М": "M", "И": "I", "Т": "T", "Ь": "'", "Б": "B", "Ю": "YU", "я": "ya", "ч": "ch", "с": "s", "м": "m", "и": "i", "т": "t", "ь": "'", "б": "b", "ю": "yu" }; function transliterate(word) { return word.split('').map(function (char) { return cyrillic[char] || char; }).join(""); } if ( undefined != wp.media ) { var _custom_media = true, _orig_send_attachment = wp.media.editor.send.attachment; } function getParameterByName(name, url) { if (!url) url = window.location.href; name = name.replace(/[\[\]]/g, "\\$&"); var regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"), results = regex.exec(url); if (!results) return null; if (!results[2]) return ''; return decodeURIComponent(results[2].replace(/\+/g, " ")); } $('#cptui_choose_icon').on('click',function(e){ e.preventDefault(); var button = $(this); var id = jQuery('#menu_icon').attr('id'); _custom_media = true; wp.media.editor.send.attachment = function (props, attachment) { if (_custom_media) { $("#" + id).val(attachment.url); } else { return _orig_send_attachment.apply(this, [props, attachment]); } }; wp.media.editor.open(button); return false; }); $('#togglelabels').on('click',function(e){ e.preventDefault(); $('#labels_expand').toggleClass('toggledclosed'); }); $('#togglesettings').on('click',function(e) { e.preventDefault(); $('#settings_expand').toggleClass('toggledclosed'); }); $('#labels_expand,#settings_expand').on('focus',function(e) { if ( $(this).hasClass('toggledclosed') ) { $(this).toggleClass('toggledclosed'); } }); $('#labels_expand legend,#settings_expand legend').on('click',function(e){ $(this).parent().toggleClass('toggledclosed'); }); $('.cptui-help').on('click',function(e){ e.preventDefault(); }); $('.cptui-taxonomy-submit').on('click',function(e){ if ( $('.cptui-table :checkbox:checked').length == 0 ) { e.preventDefault(); alert( cptui_tax_data.no_associated_type ); } }); })(jQuery);
"""客户端上传客户端号和分数(注意:并不会上传排名,客户端无法上传排名),同一个客户端可以多次上传分数,取最新的一次分数""" def uploading(): """客户端上传客户编号和分数""" client_no = input("请输入您的客户编号:") score = int(input("请输入您的分数:")) return client_no, score uploading() # 创建客户表,存储客户编号,分数和排名 class Client(BaseModel):
# Copyright (c) 2020, Michael Boyle # See LICENSE file for details: # <https://github.com/moble/quaternionic/blob/master/LICENSE> """Essential functions for quaternion algebra. These functions form the basic algebraic behavior of quaternions — addition, multiplication, exp, log, etc. Each function takes one or two array-like inputs — depending on whether it is unary or binary — as the first parameter or two, and another array-like object for output as the final parameter. Even for functions that return a single float or bool, the output must be array-like so that it can be modified inside the function. These functions are JIT-compiled by numba's `guvectorize` function, meaning that they can also act on arbitrary arrays just like standard numpy arrays, as long as the final dimension of any quaternion-valued input has size 4 to represent the components of the quaternion. These functions are generic, meaning that they can be used without the `quaternionic.array` object. However, these functions are implemented as the "ufunc"s of those arrays, meaning that we can perform algebra directly on them in natural ways — as in `q1+q2`, `q1*q2`, etc. — and using the standard numpy functions — as in `np.exp(q)`, `np.log(q)`, etc. For this purpose, we implement as many of [the standard ufuncs](https://numpy.org/doc/stable/reference/ufuncs.html) as make sense for quaternions. For the most part, this means ignoring operations related to integers, remainders, ordering, or trigonometric functions. The bit-twiddling functions are re-interpreted as they usually are in geometric algebra to denote geometric operations. All functions in this module are magically compiled to ufuncs in `__init__.py` and placed attached to the `algebra_ufuncs` object. """ import numpy as np from . import float64, boolean from .utilities import attach_typelist_and_signature _quaternion_resolution = 10 * np.finfo(float).resolution @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def add(q1, q2, qout): """Add two quaternions q1+q2""" qout[0] = q1[0] + q2[0] qout[1] = q1[1] + q2[1] qout[2] = q1[2] + q2[2] qout[3] = q1[3] + q2[3] @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def subtract(q1, q2, qout): """Subtract quaternion q1-q2""" qout[0] = q1[0] - q2[0] qout[1] = q1[1] - q2[1] qout[2] = q1[2] - q2[2] qout[3] = q1[3] - q2[3] @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def multiply(q1, q2, qout): """Multiply quaternions q1*q2""" a = q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2] - q1[3]*q2[3] b = q1[0]*q2[1] + q1[1]*q2[0] + q1[2]*q2[3] - q1[3]*q2[2] c = q1[0]*q2[2] - q1[1]*q2[3] + q1[2]*q2[0] + q1[3]*q2[1] d = q1[0]*q2[3] + q1[1]*q2[2] - q1[2]*q2[1] + q1[3]*q2[0] qout[0] = a qout[1] = b qout[2] = c qout[3] = d @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def divide(q1, q2, qout): """Divide quaternions q1/q2 = q1 * q2.inverse""" q2norm = q2[0]**2 + q2[1]**2 + q2[2]**2 + q2[3]**2 a = (+q1[0]*q2[0] + q1[1]*q2[1] + q1[2]*q2[2] + q1[3]*q2[3]) / q2norm b = (-q1[0]*q2[1] + q1[1]*q2[0] - q1[2]*q2[3] + q1[3]*q2[2]) / q2norm c = (-q1[0]*q2[2] + q1[1]*q2[3] + q1[2]*q2[0] - q1[3]*q2[1]) / q2norm d = (-q1[0]*q2[3] - q1[1]*q2[2] + q1[2]*q2[1] + q1[3]*q2[0]) / q2norm qout[0] = a qout[1] = b qout[2] = c qout[3] = d true_divide = divide @attach_typelist_and_signature([(float64, float64[:], float64[:])], '(),(n)->(n)') def multiply_scalar(s, q, qout): """Multiply scalar by quaternion s*q""" qout[0] = s * q[0] qout[1] = s * q[1] qout[2] = s * q[2] qout[3] = s * q[3] @attach_typelist_and_signature([(float64, float64[:], float64[:])], '(),(n)->(n)') def divide_scalar(s, q, qout): """Divide scalar by quaternion s/q = s * q.inverse""" qnorm = q[0]**2 + q[1]**2 + q[2]**2 + q[3]**2 qout[0] = s * q[0] / qnorm qout[1] = -s * q[1] / qnorm qout[2] = -s * q[2] / qnorm qout[3] = -s * q[3] / qnorm true_divide_scalar = divide_scalar @attach_typelist_and_signature([(float64[:], float64, float64[:])], '(n),()->(n)') def scalar_multiply(q, s, qout): """Multiply quaternion by scalar q*s""" qout[0] = q[0] * s qout[1] = q[1] * s qout[2] = q[2] * s qout[3] = q[3] * s @attach_typelist_and_signature([(float64[:], float64, float64[:])], '(n),()->(n)') def scalar_divide(q, s, qout): """Divide quaternion by scalar q/s""" qout[0] = q[0] / s qout[1] = q[1] / s qout[2] = q[2] / s qout[3] = q[3] / s scalar_true_divide = scalar_divide @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def negative(q, qout): """Return negative quaternion -q""" qout[0] = -q[0] qout[1] = -q[1] qout[2] = -q[2] qout[3] = -q[3] @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def positive(q, qout): """Return input quaternion q""" qout[0] = q[0] qout[1] = q[1] qout[2] = q[2] qout[3] = q[3] @attach_typelist_and_signature([(float64[:], float64, float64[:])], '(n),()->(n)') def float_power(q, s, qout): """Raise quaternion to scalar power exp(log(q)*s)""" b = np.sqrt(q[1]**2 + q[2]**2 + q[3]**2) if np.abs(b) <= _quaternion_resolution * np.abs(q[0]): if q[0] < 0.0: if np.abs(q[0] + 1) > _quaternion_resolution: qout[0] = np.log(-q[0]) qout[1] = np.pi qout[2] = 0.0 qout[3] = 0.0 else: qout[0] = 0.0 qout[1] = np.pi qout[2] = 0.0 qout[3] = 0.0 else: qout[0] = np.log(q[0]) qout[1] = 0.0 qout[2] = 0.0 qout[3] = 0.0 else: v = np.arctan2(b, q[0]) f = v / b qout[0] = np.log(q[0] * q[0] + b * b) / 2.0 qout[1] = f * q[1] qout[2] = f * q[2] qout[3] = f * q[3] qout *= s vnorm = np.sqrt(qout[1]**2 + qout[2]**2 + qout[3]**2) if vnorm > _quaternion_resolution: e = np.exp(qout[0]) qout[0] = e * np.cos(vnorm) qout[1:] *= e * np.sin(vnorm) / vnorm else: qout[0] = np.exp(qout[0]) qout[1] = 0.0 qout[2] = 0.0 qout[3] = 0.0 @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->()') def absolute(q, qout): """Return absolute value of quaternion |q|""" qout[0] = np.sqrt(q[0]**2 + q[1]**2 + q[2]**2 + q[3]**2) @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def conj(q, qout): """Return quaternion-conjugate of quaternion q̄""" qout[0] = +q[0] qout[1] = -q[1] qout[2] = -q[2] qout[3] = -q[3] conjugate = conj @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def exp(q, qout): """Return exponential of input quaternion exp(q)""" vnorm = np.sqrt(q[1]**2 + q[2]**2 + q[3]**2) if vnorm > _quaternion_resolution: s = np.sin(vnorm) / vnorm e = np.exp(q[0]) qout[0] = e * np.cos(vnorm) qout[1] = e * s * q[1] qout[2] = e * s * q[2] qout[3] = e * s * q[3] else: qout[0] = np.exp(q[0]) qout[1] = 0.0 qout[2] = 0.0 qout[3] = 0.0 @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def log(q, qout): """Return logarithm of input quaternion log(q)""" b = np.sqrt(q[1]**2 + q[2]**2 + q[3]**2) if b <= _quaternion_resolution * np.abs(q[0]): if q[0] < 0.0: if np.abs(q[0] + 1) > _quaternion_resolution: qout[0] = np.log(-q[0]) qout[1] = np.pi qout[2] = 0.0 qout[3] = 0.0 else: qout[0] = 0.0 qout[1] = np.pi qout[2] = 0.0 qout[3] = 0.0 else: qout[0] = np.log(q[0]) qout[1] = 0.0 qout[2] = 0.0 qout[3] = 0.0 else: v = np.arctan2(b, q[0]) f = v / b qout[0] = np.log(q[0] * q[0] + b * b) / 2.0 qout[1] = f * q[1] qout[2] = f * q[2] qout[3] = f * q[3] @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def sqrt(q, qout): """Return square-root of input quaternion √q. The general formula whenever the denominator is nonzero is ``` √q = (|q| + q) / √(2|q| + 2q.w) ``` This can be proven by expanding `q` as `q.w + q.vec` and multiplying the expression above by itself. When the denominator is zero, the quaternion is a pure-real negative number. It is not clear what the appropriate square-root is in this case (because the quaternions come with infinitely many elements that square to -1), so we arbitrarily choose the result to be proportional to the `x` quaternion. """ # √Q = (a + Q) / √(2*a + 2*Q[0]) a = np.sqrt(q[0]**2 + q[1]**2 + q[2]**2 + q[3]**2) if np.abs(a + q[0]) < _quaternion_resolution * a: qout[0] = 0.0 qout[1] = np.sqrt(a) qout[2] = 0.0 qout[3] = 0.0 else: c = np.sqrt(0.5 / (a + q[0])) qout[0] = (a + q[0]) * c qout[1] = q[1] * c qout[2] = q[2] * c qout[3] = q[3] * c @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def square(q, qout): """Return square of quaternion q*q""" a = q[0]**2 - q[1]**2 - q[2]**2 - q[3]**2 b = 2*q[0]*q[1] c = 2*q[0]*q[2] d = 2*q[0]*q[3] qout[0] = a qout[1] = b qout[2] = c qout[3] = d @attach_typelist_and_signature([(float64[:], float64[:])], '(n)->(n)') def reciprocal(q, qout): """Return reciprocal (inverse) of quaternion q.inverse""" norm = q[0]**2 + q[1]**2 + q[2]**2 + q[3]**2 qout[0] = q[0] / norm qout[1] = -q[1] / norm qout[2] = -q[2] / norm qout[3] = -q[3] / norm @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def bitwise_or(q1, q2, qout): """Return scalar product of quaternions q1|q2. If we denote by `⟨a⟩ₛ` the grade-s component of the general multivector `a`, we can express this product as ``` a | b = Σₛ,ᵣ ⟨⟨a⟩ₛ ⟨b⟩ᵣ⟩₀ ``` Note that this is different from the "Hestenes dot" product where the sum runs over s≠0 and r≠0; that is the product returned by `galgebra` using this operator. """ qout[0] = q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2] - q1[3]*q2[3] qout[1] = 0.0 qout[2] = 0.0 qout[3] = 0.0 @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def bitwise_xor(q1, q2, qout): """Return outer product of quaternions q1^q2. This is the generalized outer product of geometric algebra. If we denote by `⟨a⟩ₛ` the grade-s component of the general multivector `a`, we can express this product as ``` a ^ b = Σₛ,ᵣ ⟨⟨a⟩ₛ ⟨b⟩ᵣ⟩ₛ₊ᵣ ``` Note that the result may seem surprising because we sometimes think of quaternions as """ a = q1[0]*q2[0] b = q1[0]*q2[1] + q1[1]*q2[0] c = q1[0]*q2[2] + q1[2]*q2[0] d = q1[0]*q2[3] + q1[3]*q2[0] qout[0] = a qout[1] = b qout[2] = c qout[3] = d invert = conj # reversion (= conjugate for quaternion algebra) @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def left_shift(q1, q2, qout): """Return left-contraction of quaternions q1<<q2 = q1⌋q1. For all quaternions `a`, `b`, `c`, we have ``` (a ^ b) * c = a * (b ⌋ c) ``` """ a = q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2] - q1[3]*q2[3] b = q1[0]*q2[1] c = q1[0]*q2[2] d = q1[0]*q2[3] qout[0] = a qout[1] = b qout[2] = c qout[3] = d @attach_typelist_and_signature([(float64[:], float64[:], float64[:])], '(n),(n)->(n)') def right_shift(q1, q2, qout): """Return right-contraction of quaternions q1>>q2 = q1⌊q2. For all quaternions `a`, `b`, `c`, we have ``` c * (b ^ a) = (c ⌊ b) * a ``` """ a = q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2] - q1[3]*q2[3] b = q1[1]*q2[0] c = q1[2]*q2[0] d = q1[3]*q2[0] qout[0] = a qout[1] = b qout[2] = c qout[3] = d @attach_typelist_and_signature([(float64[:], float64[:], boolean[:])], '(n),(n)->()') def not_equal(q1, q2, bout): bout[0] = np.any(q1[:] != q2[:]) @attach_typelist_and_signature([(float64[:], float64[:], boolean[:])], '(n),(n)->()') def equal(q1, q2, bout): bout[0] = np.all(q1[:] == q2[:]) @attach_typelist_and_signature([(float64[:], float64[:], boolean[:])], '(n),(n)->()') def logical_and(q1, q2, bout): bout[0] = np.any(q1[:]) and np.any(q2[:]) @attach_typelist_and_signature([(float64[:], float64[:], boolean[:])], '(n),(n)->()') def logical_or(q1, q2, bout): bout[0] = np.any(q1[:]) or np.any(q2[:]) @attach_typelist_and_signature([(float64[:], boolean[:])], '(n)->()') def isfinite(qin, bout): bout[0] = np.isfinite(qin[0]) and np.isfinite(qin[1]) and np.isfinite(qin[2]) and np.isfinite(qin[3]) @attach_typelist_and_signature([(float64[:], boolean[:])], '(n)->()') def isinf(qin, bout): bout[0] = np.isinf(qin[0]) or np.isinf(qin[1]) or np.isinf(qin[2]) or np.isinf(qin[3]) @attach_typelist_and_signature([(float64[:], boolean[:])], '(n)->()') def isnan(qin, bout): bout[0] = np.isnan(qin[0]) or np.isnan(qin[1]) or np.isnan(qin[2]) or np.isnan(qin[3])
/* eslint import/no-extraneous-dependencies: 0, no-console: 0 */ import express from 'express'; import webpack from 'webpack'; const { webpackPort, webpackHost } = require('../config/env'); const webpackConfig = require('../config/webpack-dev.config'); const compiler = webpack(webpackConfig); const serverOptions = { contentBase: `http://${webpackHost}:${webpackPort}`, quiet: true, noInfo: true, hot: true, inline: true, lazy: false, publicPath: webpackConfig.output.publicPath, headers: { 'Access-Control-Allow-Origin': '*' }, stats: { colors: true }, watchOptions: { aggregateTimeout: 300, poll: true } }; const app = express(); app.use(require('webpack-dev-middleware')(compiler, serverOptions)); app.use(require('webpack-hot-middleware')(compiler)); app.listen(webpackPort, (err) => { if (err) { console.error(err); } else { console.info(`Webpack development server listening on port ${webpackPort}`); } });
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef __cplusplus # error This header can only be compiled as C++. #endif #ifndef __INCLUDED_PROTOCOL_H__ #define __INCLUDED_PROTOCOL_H__ #include "serialize.h" #include "netbase.h" #include <string> #include "uint256.h" extern bool fTestNet; static inline unsigned short GetDefaultPort(const bool testnet = fTestNet) { return testnet ? 38696 : 18696; } extern unsigned char pchMessageStart[4]; /** Message header. * (4) message start. * (12) command. * (4) size. * (4) checksum. */ class CMessageHeader { public: CMessageHeader(); CMessageHeader(const char* pszCommand, unsigned int nMessageSizeIn); std::string GetCommand() const; bool IsValid() const; IMPLEMENT_SERIALIZE ( READWRITE(FLATDATA(pchMessageStart)); READWRITE(FLATDATA(pchCommand)); READWRITE(nMessageSize); READWRITE(nChecksum); ) // TODO: make private (improves encapsulation) public: enum { MESSAGE_START_SIZE=sizeof(::pchMessageStart), COMMAND_SIZE=12, MESSAGE_SIZE_SIZE=sizeof(int), CHECKSUM_SIZE=sizeof(int), MESSAGE_SIZE_OFFSET=MESSAGE_START_SIZE+COMMAND_SIZE, CHECKSUM_OFFSET=MESSAGE_SIZE_OFFSET+MESSAGE_SIZE_SIZE, HEADER_SIZE=MESSAGE_START_SIZE+COMMAND_SIZE+MESSAGE_SIZE_SIZE+CHECKSUM_SIZE }; char pchMessageStart[MESSAGE_START_SIZE]; char pchCommand[COMMAND_SIZE]; unsigned int nMessageSize; unsigned int nChecksum; }; /** nServices flags */ enum { NODE_NETWORK = (1 << 0), NODE_BLOOM = (1 << 1), }; /** A CService with information about it as peer */ class CAddress : public CService { public: CAddress(); explicit CAddress(CService ipIn, uint64 nServicesIn=NODE_NETWORK); void Init(); IMPLEMENT_SERIALIZE ( CAddress* pthis = const_cast<CAddress*>(this); CService* pip = (CService*)pthis; if (fRead) pthis->Init(); if (nType & SER_DISK) READWRITE(nVersion); if ((nType & SER_DISK) || (nVersion >= CADDR_TIME_VERSION && !(nType & SER_GETHASH))) READWRITE(nTime); READWRITE(nServices); READWRITE(*pip); ) void print() const; // TODO: make private (improves encapsulation) public: uint64 nServices; // disk and network only unsigned int nTime; // memory only int64 nLastTry; }; /** inv message data */ class CInv { public: CInv(); CInv(int typeIn, const uint256& hashIn); CInv(const std::string& strType, const uint256& hashIn); IMPLEMENT_SERIALIZE ( READWRITE(type); READWRITE(hash); ) friend bool operator<(const CInv& a, const CInv& b); bool IsKnownType() const; const char* GetCommand() const; std::string ToString() const; void print() const; // TODO: make private (improves encapsulation) public: int type; uint256 hash; }; enum { MSG_TX = 1, MSG_BLOCK, // Nodes may always request a MSG_FILTERED_BLOCK in a getdata, however, // MSG_FILTERED_BLOCK should not appear in any invs except as a part of getdata. MSG_FILTERED_BLOCK, }; #endif // __INCLUDED_PROTOCOL_H__
# Copyright 1999-2020 Alibaba Group Holding Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import concurrent.futures as futures from typing import Any, Callable, Coroutine, Dict, Type from urllib.parse import urlparse from ....utils import implements, classproperty from .base import Channel, ChannelType, Server, Client from .core import register_client, register_server from .errors import ChannelClosed DEFAULT_DUMMY_ADDRESS = 'dummy://0' class DummyChannel(Channel): """ Channel for communications in same process. """ __slots__ = '_in_queue', '_out_queue', '_closed' name = 'dummy' def __init__(self, in_queue: asyncio.Queue, out_queue: asyncio.Queue, local_address: str = None, dest_address: str = None, compression=None): super().__init__(local_address=local_address, dest_address=dest_address, compression=compression) self._in_queue = in_queue self._out_queue = out_queue self._closed = asyncio.Event() @property @implements(Channel.type) def type(self) -> ChannelType: return ChannelType.local @implements(Channel.send) async def send(self, message: Any): if self._closed.is_set(): # pragma: no cover raise ChannelClosed('Channel already closed, cannot send message') # put message directly into queue await self._out_queue.put(message) @implements(Channel.recv) async def recv(self): if self._closed.is_set(): # pragma: no cover raise ChannelClosed('Channel already closed, cannot write message') try: return await self._in_queue.get() except RuntimeError: if self._closed.is_set(): pass @implements(Channel.close) async def close(self): self._closed.set() @property @implements(Channel.closed) def closed(self) -> bool: return self._closed.is_set() @register_server class DummyServer(Server): __slots__ = '_closed', _address_to_instances: Dict[str, "DummyServer"] = dict() scheme = 'dummy' def __init__(self, address: str, channel_handler: Callable[[Channel], Coroutine] = None): super().__init__(address, channel_handler) self._closed = asyncio.Event() @classmethod def get_instance(cls, address: str): return cls._address_to_instances[address] @classproperty @implements(Server.client_type) def client_type(self) -> Type["Client"]: return DummyClient @property @implements(Server.channel_type) def channel_type(self) -> ChannelType: return ChannelType.local @staticmethod @implements(Server.create) async def create(config: Dict) -> "DummyServer": config = config.copy() address = config.pop('address', DEFAULT_DUMMY_ADDRESS) handle_channel = config.pop('handle_channel') if urlparse(address).scheme != DummyServer.scheme: # pragma: no cover raise ValueError(f'Address for DummyServer ' f'should be starts with "dummy://", ' f'got {address}') if config: # pragma: no cover raise TypeError(f'Creating DummyServer got unexpected ' f'arguments: {",".join(config)}') try: return DummyServer.get_instance(address) except KeyError: server = DummyServer(address, handle_channel) DummyServer._address_to_instances[address] = server return server @implements(Server.start) async def start(self): # nothing needs to do for dummy server pass @implements(Server.join) async def join(self, timeout=None): wait_coro = self._closed.wait() try: await asyncio.wait_for(wait_coro, timeout=timeout) except (futures.TimeoutError, asyncio.TimeoutError): pass @implements(Server.on_connected) async def on_connected(self, *args, **kwargs): channel = args[0] assert isinstance(channel, DummyChannel) if kwargs: # pragma: no cover raise TypeError(f'{type(self).__name__} got unexpected ' f'arguments: {",".join(kwargs)}') await self.channel_handler(channel) @implements(Server.stop) async def stop(self): self._closed.set() del DummyServer._address_to_instances[self.address] @property @implements(Server.stopped) def stopped(self) -> bool: return self._closed.is_set() @register_client class DummyClient(Client): __slots__ = '_task', scheme = DummyServer.scheme def __init__(self, local_address: str, dest_address: str, channel: Channel): super().__init__(local_address, dest_address, channel) self._task = None @staticmethod @implements(Client.connect) async def connect(dest_address: str, local_address: str = None, **kwargs) -> "Client": if urlparse(dest_address).scheme != DummyServer.scheme: # pragma: no cover raise ValueError(f'Destination address should start with "dummy://" ' f'for DummyClient, got {dest_address}') server = DummyServer.get_instance(dest_address) if server is None: # pragma: no cover raise RuntimeError('DummyServer needs to be created ' 'first before DummyClient') q1, q2 = asyncio.Queue(), asyncio.Queue() client_channel = DummyChannel(q1, q2) server_channel = DummyChannel(q2, q1) conn_coro = server.on_connected(server_channel) task = asyncio.create_task(conn_coro) client = DummyClient(local_address, dest_address, client_channel) client._task = task return client @implements(Client.close) async def close(self): await super().close() self._task.cancel() self._task = None
const flickr = { flickr: { width: 1536, height: 1792, paths: [{ d: 'M1248 128q119 0 203.5 84.5t84.5 203.5v960q0 119-84.5 203.5t-203.5 84.5h-960q-119 0-203.5-84.5t-84.5-203.5v-960q0-119 84.5-203.5t203.5-84.5h960zM698 896q0-88-62-150t-150-62-150 62-62 150 62 150 150 62 150-62 62-150zM1262 896q0-88-62-150t-150-62-150 62-62 150 62 150 150 62 150-62 62-150z' }] } }; export default flickr;
from typing import Union from jsonrpcclient.requests import Request class Engine: def __init__(self, client): self.client = client def get_coinbase(self): payload = Request("engine_getCoinbase") response = self.client.send(payload) return response.data.result def get_block_reward(self, block_number: Union[int, None]): payload = Request("engine_getBlockReward", block_number) response = self.client.send(payload) return response.data.result def get_recommended_confirmation(self): payload = Request("engine_getRecommendedConfirmation") response = self.client.send(payload) return response.data.result def get_custom_action_data( self, handler_id: int, data_bytes: str, block_number: Union[int, None] ): payload = Request( "engine_getCustomActionData", handler_id, data_bytes, block_number, ) response = self.client.send(payload) return response.data.result
const { Client } = require("@elastic/elasticsearch"); const client = new Client({ node: "http://localhost:9200" }); async function run() { // Let's start by indexing some data await client.index({ index: "game-of-thrones", body: { character: "Ned Stark", quote: "Winter is coming.", }, }); await client.index({ index: "game-of-thrones", body: { character: "Daenerys Targaryen", quote: "I am the mother of dragons.", }, }); await client.index({ index: "game-of-thrones", // here we are forcing an index refresh, // otherwise we will not get any result // in the consequent search refresh: true, body: { character: "Tyrion Lannister", quote: "A mind needs books like a sword needs a whetstone.", }, }); // Let's search! const { body } = await client.search({ index: "game-of-thrones", body: { query: { match: { quote: "winter", }, }, }, }); console.log(body.hits.hits); } run().catch(console.log);
""" Django settings for myproject project. Generated by 'django-admin startproject' using Django 2.0.3. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os from decouple import config, Csv import dj_database_url # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! # SECRET_KEY = 'j(up0zin%yzi(%ig0=mxzh+r$ufw023s@etkk7x8owu21f+bj-' SECRET_KEY = config('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! # DEBUG = True DEBUG = config('DEBUG', default=False, cast=bool) # ALLOWED_HOSTS = [] ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv()) # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.humanize', # <- here 'widget_tweaks', # 'boards.templatetags.form_tags', 'boards', # 板块 'accounts', # 账户 ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'myproject.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, 'templates') ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'myproject.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # DATABASES = { # 'default': dj_database_url.config( # default=config('DATABASE_URL') # ) # } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = '/static/' # STATICFILES_DIRS = [ # os.path.join(BASE_DIR, 'static'), # ] STATIC_ROOT = os.path.join(BASE_DIR, 'static') LOGOUT_REDIRECT_URL = 'home' LOGIN_REDIRECT_URL = 'home' EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' LOGIN_URL = 'login'
r""" Solve S-unit equation x + y = 1 Inspired by work of Tzanakis--de Weger, Baker--Wustholz and Smart, we use the LLL methods in Sage to implement an algorithm that returns all S-unit solutions to the equation $x + y = 1$. REFERENCES: - [MR2016]_ - [Sma1995]_ - [Sma1998]_ - [Yu2007]_ - [AKMRVW]_ AUTHORS: - Alejandra Alvarado, Angelos Koutsianas, Beth Malmskog, Christopher Rasmussen, David Roe, Christelle Vincent, Mckenzie West (2018-04-25 to 2018-11-09): original version EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import solve_S_unit_equation, eq_up_to_order sage: K.<xi> = NumberField(x^2+x+1) sage: S = K.primes_above(3) sage: expected = [((0, 1), (4, 0), xi + 2, -xi - 1), ....: ((1, -1), (0, -1), 1/3*xi + 2/3, -1/3*xi + 1/3), ....: ((1, 0), (5, 0), xi + 1, -xi), ....: ((2, 0), (5, 1), xi, -xi + 1)] sage: sols = solve_S_unit_equation(K, S, 200) sage: eq_up_to_order(sols, expected) True .. TODO:: - Use Cython to improve timings on the sieve """ # **************************************************************************** # Copyright (C) 2020 Alejandra Alvarado <aalvarado2 at eiu.edu> # Angelos Koutsianas <koutsis.jr at gmail.com> # Beth Malmskog <beth.malmskog at gmail.com> # Christopher Rasmussen <crasmussen at wesleyan.edu> # Christelle Vincent <christelle.vincent at uvm.edu> # Mckenzie West <westmr at uwec.edu> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # https://www.gnu.org/licenses/ # **************************************************************************** from sage.rings.all import Infinity from sage.symbolic.ring import SR from sage.rings.integer import Integer from sage.rings.integer_ring import ZZ from sage.rings.real_mpfr import RealField, RR from sage.rings.complex_mpfr import ComplexField from sage.functions.log import exp from sage.rings.rational_field import QQ from sage.rings.number_field.number_field import is_real_place, refine_embedding from sage.rings.number_field.unit_group import UnitGroup from sage.rings.finite_rings.integer_mod_ring import Integers from sage.rings.finite_rings.integer_mod import mod from sage.rings.padics.factory import Qp from sage.combinat.combination import Combinations from sage.misc.all import prod from sage.arith.all import factorial from sage.matrix.constructor import matrix, identity_matrix, vector, block_matrix, zero_matrix from sage.modules.free_module_element import zero_vector from itertools import combinations_with_replacement from sage.arith.all import gcd, lcm, CRT from copy import copy import itertools def column_Log(SUK, iota, U, prec=106): r""" Return the log vector of ``iota``; i.e., the logs of all the valuations. INPUT: - ``SUK`` -- a group of `S`-units - ``iota`` -- an element of ``K`` - ``U`` -- a list of places (finite or infinite) of ``K`` - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The log vector as a list of real numbers EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import column_Log sage: K.<xi> = NumberField(x^3-3) sage: S = tuple(K.primes_above(3)) sage: SUK = UnitGroup(K, S=S) sage: phi_complex = K.places()[1] sage: v_fin = S[0] sage: U = [phi_complex, v_fin] sage: column_Log(SUK, xi^2, U) # abs tol 1e-29 [1.464816384890812968648768625966, -2.197224577336219382790490473845] REFERENCES: - [Sma1995]_ p. 823 """ R = RealField(prec) return [ R(SUK.number_field().abs_val(v, iota, prec)).log() for v in U] def c3_func(SUK, prec=106): r""" Return the constant `c_3` from [AKMRVW]_. INPUT: - ``SUK`` -- a group of `S`-units - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``c3``, as a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import c3_func sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: c3_func(SUK) # abs tol 1e-29 0.4257859134798034746197327286726 .. NOTE:: The numerator should be as close to 1 as possible, especially as the rank of the `S`-units grows large REFERENCES: - [AKMRVW]_ arXiv:1903.00977 """ R = RealField(prec) all_places = list(SUK.primes()) + SUK.number_field().places(prec) Possible_U = Combinations(all_places, SUK.rank()) c1 = R(1) # guarantees final c1 >= 1 for U in Possible_U: # first, build the matrix C_{i,U} columns_of_C = [] for unit in SUK.fundamental_units(): columns_of_C.append(column_Log(SUK, unit, U, prec)) C = matrix(SUK.rank(), SUK.rank(), columns_of_C) # Is it invertible? if abs(C.determinant()) > 10**(-10): poss_c1 = C.inverse().apply_map(abs).norm(Infinity) c1 = R(max(poss_c1, c1)) return R(0.9999999) / (c1*SUK.rank()) def c4_func(SUK, v, A, prec=106): r""" Return the constant `c_4` from Smart's TCDF paper, [Sma1995]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- a place of ``K``, finite (a fractional ideal) or infinite (element of ``SUK.number_field().places(prec)``) - ``A`` -- the set of the product of the coefficients of the ``S``-unit equation with each root of unity of ``K`` - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``c4``, as a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import c4_func sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: phi_real = K.places()[0] sage: phi_complex = K.places()[1] sage: v_fin = tuple(K.primes_above(3))[0] sage: A = K.roots_of_unity() sage: c4_func(SUK,phi_real,A) 1.000000000000000000000000000000 sage: c4_func(SUK,phi_complex,A) 1.000000000000000000000000000000 sage: c4_func(SUK,v_fin,A) 1.000000000000000000000000000000 REFERENCES: - [Sma1995]_ p. 824 """ return max(SUK.number_field().abs_val(v, alpha, prec) for alpha in A) def beta_k(betas_and_ns): r""" Return a pair `[\beta_k,|beta_k|_v]`, where `\beta_k` has the smallest nonzero valuation in absolute value of the list ``betas_and_ns``. INPUT: - ``betas_and_ns`` -- a list of pairs ``[beta,val_v(beta)]`` outputted from the function where ``beta`` is an element of ``SUK.fundamental_units()`` OUTPUT: The pair ``[beta_k,v(beta_k)]``, where ``beta_k`` is an element of ``K`` and ``val_v(beta_k)`` is a integer EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import beta_k sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: v_fin = tuple(K.primes_above(3))[0] sage: betas = [ [beta, beta.valuation(v_fin)] for beta in SUK.fundamental_units() ] sage: beta_k(betas) [xi, 1] REFERENCES: - [Sma1995]_ pp. 824-825 """ for pair in betas_and_ns: if abs( pair[1] ) != 0: good_pair = pair break for pair in betas_and_ns: if ( abs(pair[1]) != 0 and abs(pair[1]) < abs(good_pair[1]) ): good_pair = pair return good_pair def mus(SUK, v): r""" Return a list `[\mu]`, for `\mu` defined in [AKMRVW]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- a finite place of ``K`` OUTPUT: A list ``[mus]`` where each ``mu`` is an element of ``K`` EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import mus sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: v_fin = tuple(K.primes_above(3))[0] sage: mus(SUK, v_fin) [xi^2 - 2] REFERENCES: - [AKMRVW]_ """ betas = SUK.fundamental_units() beta_and_ns = [[beta,beta.valuation(v)] for beta in betas] if all(pair[1]==0 for pair in beta_and_ns): return betas else: good_pair = beta_k(beta_and_ns) temp = [(beta[0]**good_pair[1])*(good_pair[0]**(-beta[1])) for beta in beta_and_ns] temp.remove(1) return temp def possible_mu0s(SUK, v): r""" Return a list `[\mu_0]` of all possible `\mu_0` values defined in [AKMRVW]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- a finite place of ``K`` OUTPUT: A list ``[mu0s]`` where each ``mu0`` is an element of ``K`` EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import possible_mu0s sage: K.<xi> = NumberField(x^3-3) sage: S = tuple(K.primes_above(3)) sage: SUK = UnitGroup(K, S=S) sage: v_fin = S[0] sage: possible_mu0s(SUK,v_fin) [-1, 1] .. NOTE:: `n_0` is the valuation of the coefficient `\alpha_d` of the `S`-unit equation such that `|\alpha_d \tau_d|_v = 1` We have set `n_0 = 0` here since the coefficients are roots of unity `\alpha_0` is not defined in the paper, we set it to be 1 REFERENCES: - [AKMRVW]_ - [Sma1995]_ pp. 824-825, but we modify the definition of ``sigma`` (``sigma_tilde``) to make it easier to code """ beta_and_ns = [[beta,beta.valuation(v)] for beta in SUK.fundamental_units()] betak, nk = beta_k(beta_and_ns) ns = [beta[1] for beta in beta_and_ns if beta[0] != betak] betas = [beta[0] for beta in beta_and_ns if beta[0] != betak] mu0s = [] for rs in combinations_with_replacement(range(abs(nk)), len(betas)): # n_0 = valuation_v of one of the coefficients of the equation = 0 for x + y = 1 p. 824 n_rs = zip(ns, rs) sigma_tilde = -(sum([n_r[0]*n_r[1] for n_r in n_rs])) if sigma_tilde % nk == 0: beta_rs = zip(betas, rs) temp_prod = prod([beta_r[0]**beta_r[1] for beta_r in beta_rs]) * betak**(sigma_tilde/nk) for alpha0 in SUK.roots_of_unity(): if alpha0*temp_prod not in mu0s: mu0s.append(alpha0*temp_prod) return mu0s def Yu_a1_kappa1_c1(p, dK, ep): r""" Compute the constants a(1), kappa1, and c(1) of [Yu2007]_. INPUT: - ``p`` -- a rational prime number - ``dK`` -- the absolute degree of some number field `K` - ``ep`` -- the absolute ramification index of some prime `frak_p` of `K` lying above `p` OUTPUT: The constants a(1), kappa1, and c(1). EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import Yu_a1_kappa1_c1 sage: Yu_a1_kappa1_c1(5, 10, 3) (16, 20, 319) REFERENCES: - [Yu2007]_ """ # For readability, we compute a(1) and kappa1 first. if p == 2: a1 = 32 kappa1 = 40 elif p == 3: a1 = 16 kappa1 = 20 else: if ep >= 2: a1 = 16 kappa1 = 20 else: a1 = 8*(p-1)/(p-2) kappa1 = 10 # Next we compute c(1), which has more cases to consider. if p == 2: c1 = 160 elif p == 3: if dK == 1: c1 = 537 else: c1 = 759 elif p == 5: if ep == 1: c1 = 1473 else: c1 = 319 elif p%4 == 1: if ep == 1: c1 = 1473 else: c1 = 1502 else: # p > 5 and p % 4 == 3 if ep == 1: if dK == 1: c1 = 1288 else: c1 = 1282 else: c1 = 2190 return a1, kappa1, c1 def Yu_condition_115(K, v): r""" Return ``True`` or ``False``, as the number field ``K`` and the finite place ``v`` satisfy condition (1.15) of [Yu2007]_. INPUT: - ``K`` -- a number field - ``v`` -- a finite place of ``K`` OUTPUT: ``True`` if (1.15) is satisfied, otherwise ``False``. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import Yu_condition_115 sage: K.<a> = NumberField(x^2 + 5) sage: v2 = K.primes_above(2)[0] sage: v11 = K.primes_above(11)[0] sage: Yu_condition_115(K, v2) False sage: Yu_condition_115(K, v11) True REFERENCES: - [Yu2007]_ p. 188 """ p = v.smallest_integer() f = v.residue_class_degree() w = K.number_of_roots_of_unity() # Determine q. if p == 2: q = 3 else: q = 2 # Check the condition. if q == 2: if p**f % 4 == 1: return True if w%4 == 0: return True else: if w%3 == 0: return True return False def Yu_modified_height(mu, n, v, prec=106): r""" Return the value of h(n)(mu) as appearing in [Yu2007]_ equation (1.21). INPUT: - ``mu`` -- an element of a field K - ``n`` -- number of mu_j to be considered in Yu's Theorem. - ``v`` -- a place of K - ``prec`` -- the precision of the real field OUTPUT: The value `h_p(mu)`. EXAMPLES:: sage: K.<a> = NumberField(x^2 + 5) sage: v11 = K.primes_above(11)[0] sage: from sage.rings.number_field.S_unit_solver import Yu_modified_height sage: Yu_modified_height(a, 3, v11) 0.8047189562170501873003796666131 If mu is a root of unity, the output is not zero. :: sage: Yu_modified_height(-1, 3, v11) 0.03425564675426243634374205111379 REFERENCES: - [Yu2007]_ p. 192 """ R = RealField(prec) K = v.number_field() dK = K.degree() p = v.smallest_integer() ep = v.ramification_index() fp = v.residue_class_degree() a1, kappa1, c1 = Yu_a1_kappa1_c1(p, dK, ep) h0 = mu.global_height(prec) h1 = R( fp * R(p).log() / (kappa1 * (n + 4) * dK) ) if h0 > h1: return h0 else: return h1 def Omega_prime(dK, v, mu_list, prec=106): r""" Return the constant Omega' appearing in [AKMRVW]_. INPUT: - ``dK`` -- the degree of a number field `K` - ``v`` -- a finite place of `K` - ``mu_list`` -- a list of nonzero elements of `K`. It is assumed that the sublist mu[1:] is multiplicatively independent. - ``prec`` -- the precision of the real field OUTPUT: The constant `Omega'`. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import mus, Omega_prime sage: K.<a> = NumberField(x^3 - 3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(6))) sage: v = K.primes_above(3)[0] sage: mu_list = [-1] + mus(SUK, v) sage: dK = K.degree() sage: Omega_prime(dK, v, mu_list) 0.000487349679922696 REFERENCES: - [AKMRVW]_ arXiv:1903:.00977 """ R = RealField(prec) omega_prime = R(1) for mu in mu_list[1:]: omega_prime *= mu.global_height() n = len(mu_list) omega_prime *= Yu_modified_height(mu_list[0], n, v, prec) return omega_prime def Yu_C1_star(n, v, prec=106): r""" Return the constant C_1^* appearing in [Yu2007]_ (1.23). INPUT: - ``n`` -- the number of generators of a multiplicative subgroup of a field `K` - ``v`` -- a finite place of `K` (a fractional ideal) - ``prec`` -- the precision of the real field OUTPUT: The constant `C1_star` as a real number. EXAMPLES:: sage: K.<a> = NumberField(x^2 + 5) sage: v11 = K.primes_above(11)[0] sage: from sage.rings.number_field.S_unit_solver import Yu_C1_star sage: Yu_C1_star(1,v11) 2.154667761574516556114215527020e6 REFERENCES: - [Yu2007]_ p.189,193 """ R = RealField(prec) K = v.number_field() dK = K.absolute_degree() p = v.smallest_integer() ep = v.ramification_index() fp = v.residue_class_degree() if p == 2: q = 3 else: q = 2 w = K.number_of_roots_of_unity() u = ZZ(w).valuation(q) a_paren_1, kappa1, c_paren_1 = Yu_a1_kappa1_c1(p, dK, ep) C1 = R(1) C1 *= c_paren_1 C1 *= a_paren_1**n C1 *= (n**n * (n+1)**(n+1))/factorial(n) C1 *= p**fp/(q**u) C1 *= ( dK / (fp * R(p).log()) )**(n+2) C1 *= R (max( dK, exp(1) )).log() C1 *= max( R(exp(4)*(n+1)*dK).log(), ep, fp * R(p).log() ) C1_star = R((n+1) * C1) return C1_star def Yu_bound(SUK, v, prec=106): r""" Return `c8` such that `c8 >= exp(2)/\log(2)` and `ord_p (\Theta - 1) < c8 \log B`, where `\Theta = \prod_{j=1}^n \alpha_j^{b_j}` and `B \geq \max_j |b_j|` and `B \geq 3`. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- a finite place of `K` (a fractional ideal) - ``prec`` -- the precision of the real field OUTPUT: The constant `c8` as a real number. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import Yu_bound sage: K.<a> = NumberField(x^2 + 11) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(6))) sage: v = K.primes_above(3)[0] sage: Yu_bound(SUK, v) 9.03984381033128e9 REFERENCES: - [Sma1995]_ p. 825 - [Yu2007]_ p. 189--193 esp. Theorem 1 - [AKMRVW]_ arXiv:1903.00977 """ # We are using Theorem 1 of "p-adic logarithmic forms and group varieties III" by Kunrui Yu. # We require the assumption of (1.18): B \geq max {|b_1|,...,|b_n|,3} # To be sure that the Lemma of Petho-de Weger is applicable in a later function, we always return a value >= exp(2)/log(2). R = RealField(prec) p = v.smallest_integer() K = SUK.number_field() dK = K.absolute_degree() mu_free_gens = mus(SUK, v) poss_mu0 = possible_mu0s(SUK, v) n = 1 + len(mu_free_gens) if Yu_condition_115(K,v): largest_Omega_prime = R(0) for mu0 in poss_mu0: current_Omega_prime = Omega_prime(dK, v, [mu0] + mu_free_gens[:], prec) largest_Omega_prime = max( current_Omega_prime, largest_Omega_prime ) C1star = Yu_C1_star(n, v, prec) return max( exp(R(2))/R(2).log(), largest_Omega_prime * C1star) else: # K and v don't satisfy the theorem hypotheses, and we must move to a quadratic extension L. # For justification of this next bound, see [AKMRVW]. x = SR.var('x') if p == 2: L_over_K = K.extension(x**2 + x + 1, 'xi0') else: L_over_K = K.extension(x**2 + 1, 'xi0') # pick any prime vL over v vL_0 = L_over_K.primes_above(v)[0] e_vL_v = vL_0.relative_ramification_index() # build absolute versions of L and vL L = L_over_K.absolute_field('xi_L') vL_gens = tuple( [L(z) for z in vL_0.gens()] ) vL = L.fractional_ideal( vL_gens ) dL = L.degree() largest_Omega_prime = R(0) for mu0 in poss_mu0: current_Omega_prime = Omega_prime(dL, vL, [mu0] + mu_free_gens[:], prec) largest_Omega_prime = max( current_Omega_prime, largest_Omega_prime ) C1star = Yu_C1_star(n, vL, prec) return max(exp(R(2))/R(2).log(), e_vL_v * largest_Omega_prime * C1star) def K0_func(SUK, A, prec=106): r""" Return the constant `K_0` from [AKMRVW]_. INPUT: - ``SUK`` -- a group of `S`-units - ``A`` -- the set of the products of the coefficients of the `S`-unit equation with each root of unity of ``K`` - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``K0``, a real number. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import K0_func sage: K.<a> = NumberField(x^2 + 11) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(6))) sage: v = K.primes_above(3)[0] sage: K0_func(SUK, K.roots_of_unity()) 8.84763586062272e12 REFERENCES: - [Sma1995]_ p. 824 - [AKMRVW]_ arXiv:1903.00977 """ R = RealField(prec) K0 = R(1) c3 = c3_func(SUK, prec) for v_l in SUK.primes(): e_l = v_l.residue_class_degree() Norm_v_l = v_l.absolute_norm() c5_l = c3/(e_l * R(Norm_v_l).log()) c8_l = Yu_bound(SUK, v_l, prec) K0_l = (2 * c8_l)/(e_l * c5_l) * R(c8_l / (e_l * c5_l)).log() K0 = max(K0, K0_l) return K0 def c11_func(SUK, v, A, prec=106): r""" Return the constant `c_{11}` from Smart's TCDF paper, [Sma1995]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- a place of ``K``, finite (a fractional ideal) or infinite (element of ``SUK.number_field().places(prec)``) - ``A`` -- the set of the product of the coefficients of the `S`-unit equation with each root of unity of ``K`` - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``c11``, a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import c11_func sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: phi_real = K.places()[0] sage: phi_complex = K.places()[1] sage: A = K.roots_of_unity() sage: c11_func(SUK, phi_real, A) # abs tol 1e-29 3.255848343572896153455615423662 sage: c11_func(SUK, phi_complex, A) # abs tol 1e-29 6.511696687145792306911230847323 REFERENCES: - [Sma1995]_ p. 825 """ R = RealField(prec) if is_real_place(v): return R(4*c4_func(SUK, v, A, prec)).log() / c3_func(SUK, prec) else: return 2*R(4*(c4_func(SUK, v, A, prec)).sqrt()).log() / c3_func(SUK, prec) def c13_func(SUK, v, prec=106): r""" Return the constant `c_{13}` from Smart's TCDF paper, [Sma1995]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- an infinite place of ``K`` (element of ``SUK.number_field().places(prec)``) - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``c13``, as a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import c13_func sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: phi_real = K.places()[0] sage: phi_complex = K.places()[1] sage: c13_func(SUK, phi_real) # abs tol 1e-29 0.4257859134798034746197327286726 sage: c13_func(SUK, phi_complex) # abs tol 1e-29 0.2128929567399017373098663643363 It is an error to input a finite place. :: sage: phi_finite = K.primes_above(3)[0] sage: c13_func(SUK, phi_finite) Traceback (most recent call last): ... TypeError: Place must be infinite REFERENCES: - [Sma1995]_ p. 825 """ try: v.codomain() except AttributeError: raise TypeError('Place must be infinite') if is_real_place(v): return c3_func(SUK, prec) else: return c3_func(SUK, prec)/2 def K1_func(SUK, v, A, prec=106): r""" Return the constant `K_1` from Smart's TCDF paper, [Sma1995]_. INPUT: - ``SUK`` -- a group of `S`-units - ``v`` -- an infinite place of ``K`` (element of ``SUK.number_field().places(prec)``) - ``A`` -- a list of all products of each potential ``a``, ``b`` in the $S$-unit equation ``ax + by + 1 = 0`` with each root of unity of ``K`` - ``prec`` -- the precision of the real field (default: 106) OUTPUT: The constant ``K1,`` a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import K1_func sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(3))) sage: phi_real = K.places()[0] sage: phi_complex = K.places()[1] sage: A = K.roots_of_unity() sage: K1_func(SUK, phi_real, A) 4.483038368145048508970350163578e16 sage: K1_func(SUK, phi_complex, A) 2.073346189067285101984136298965e17 REFERENCES: - [Sma1995]_ p. 825 """ R = RealField(prec) # [Sma1995]_ p. 825 if is_real_place(v): c11 = R(4*c4_func(SUK, v, A, prec)).log() / c3_func(SUK, prec) else: c11 = 2*( R(4*(c4_func(SUK,v, A, prec)).sqrt()).log() ) / c3_func(SUK, prec) # [Sma1995]_ p. 825 if is_real_place(v): c12 = R(2 * c4_func(SUK, v, A, prec)) else: c12 = R(2 * c4_func(SUK, v, A, prec).sqrt()) # [Sma1998]_ p. 225, Theorem A.1 d = SUK.number_field().degree() t = SUK.rank() Baker_C = R(18 * factorial(t+2) * (t+1)**(t+2) * (32*d)**(t+3) * R(2*(t+1) * d).log()) def hprime(SUK, alpha, v): # [Sma1998]_ p. 225 return R(max(alpha.global_height(), 1/SUK.number_field().degree(), abs( v(alpha).log() ) / SUK.number_field().degree())) # [Sma1995]_ p. 825 and [Sma1998]_ p. 225, Theorem A.1 c14 = Baker_C * prod([hprime(SUK, alpha, v) for alpha in SUK.gens_values()]) # [Sma1995]_ p. 825 c13 = c13_func(SUK,v,prec) w = len(SUK.roots_of_unity()) c15 = (2/c13)*(c12.log()+c14*(((t+1)*w*c14/c13).log())) return max([c11, c15]) def minimal_vector(A, y, prec=106): r""" INPUT: - ``A`` : a square n by n non-singular integer matrix whose rows generate a lattice `\mathcal L` - ``y`` : a row (1 by n) vector with integer coordinates - ``prec`` : precision of real field (default: 106) OUTPUT: A lower bound for the square of .. MATH:: \ell (\mathcal L,\vec y) = \begin{cases} \displaystyle\min_{\vec x\in\mathcal L}\Vert\vec x-\vec y\Vert &, \vec y\not\in\mathcal L. \\ \displaystyle\min_{0\neq\vec x\in\mathcal L}\Vert\vec x\Vert &,\vec y\in\mathcal L. \end{cases}` ALGORITHM: The algorithm is based on V.9 and V.10 of [Sma1998]_ EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import minimal_vector sage: B = matrix(ZZ, 2, [1,1,1,0]) sage: y = vector(ZZ, [2,1]) sage: minimal_vector(B, y) 1/2 :: sage: B = random_matrix(ZZ, 3) sage: while not B.determinant(): ....: B = random_matrix(ZZ, 3) sage: B # random [-2 -1 -1] [ 1 1 -2] [ 6 1 -1] sage: y = vector([1, 2, 100]) sage: minimal_vector(B, y) # random 15/28 """ if A.is_singular(): raise ValueError('The matrix A is singular') R = RealField(prec) n = len(y) c1 = 2**(n-1) ALLL = A.LLL() ALLLinv = ALLL.inverse() ybrace = [ abs(R(a-a.round())) for a in y * ALLLinv if (a-a.round()) != 0] if len(ybrace) == 0: return (ALLL.rows()[0].norm())**2 / c1 else: sigma = ybrace[len(ybrace)-1] return ((ALLL.rows()[0].norm())**2 * sigma) / c1 def reduction_step_complex_case(place, B0, list_of_gens, torsion_gen, c13): r""" INPUT: - ``place`` -- (ring morphism) an infinite place of a number field `K` - ``B0`` -- the initial bound - ``list_of_gens`` -- a set of generators of the free part of the group - ``torsion_gen`` -- an element of the torsion part of the group - ``c13`` -- a positive real number OUTPUT: A tuple consisting of: 1. a new upper bound, an integer 2. a boolean value, ``True`` if we have to increase precision, otherwise ``False`` .. NOTE:: The constant ``c13`` in Section 5, [AKMRVW]_ This function does handle both real and non-real infinite places. REFERENCES: See [Sma1998]_, [AKMRVW]_. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import reduction_step_complex_case sage: K.<a> = NumberField([x^3-2]) sage: SK = sum([K.primes_above(p) for p in [2,3,5]],[]) sage: G = [g for g in K.S_unit_group(S=SK).gens_values() if g.multiplicative_order()==Infinity] sage: p1 = K.places(prec=100)[1] sage: reduction_step_complex_case(p1, 10^5, G, -1, 2) (18, False) """ prec = place.codomain().precision() R = RealField(prec) CF = ComplexField(prec) n = len(list_of_gens) w = torsion_gen.multiplicative_order() real_part_log_gens = [ R(CF(place(g).log()).real_part()) for g in list_of_gens] imag_part_log_gens = [ R(CF(place(g).log()).imag_part()) for g in list_of_gens] real_part_log_gens += [R(0)] imag_part_log_gens += [2*R.pi()/w] abs_log_parts = [abs(part) for part in real_part_log_gens]+[abs(part) for part in imag_part_log_gens] max_part_log = max(abs_log_parts) npi = [] # we collect the list of indices of log(g) which are not pure imaginary # if this list is empty, we have to take a special case for i in range(len(real_part_log_gens)): lg = real_part_log_gens[i] if abs(lg) > 2**(-place.codomain().precision()): npi.append(i) # someday make this a separate function if not npi: # this is the pure imaginary case. # we have only imaginary numbers C = ZZ(1) S = n * B0**2 T = (n+w+n*w)*B0 / 2 finish = False while not finish: A = identity_matrix(ZZ, n+1) A[n] = vector([(g * C).round() for g in imag_part_log_gens]) if A.is_singular(): C = ZZ(2*C) else: # We have to work with rows because of the .LLL() function A = A.transpose() # Note that l is the an lower bound on the square of the magnitude of the shortest non-zero vector in the lattice generated by A l = minimal_vector(A, zero_vector(ZZ,n+1)) # Checking hypotheses of Lemma 5.3 in our paper: if l <= T**2+S: C = ZZ(2*C) # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: Bnew = ((R(C * 2).log() - ((l**2-S).sqrt()-T)).log() / c13).round() finish = True return max(4,w,Bnew), False elif is_real_place(place): # this is the case when we are working with a real embedding, we get savings here C = R(1) S = (n-1) * B0**2 w = place.domain().number_of_roots_of_unity() T = (n*B0+1)/R(2) finish = False while not finish: A = copy(identity_matrix(ZZ, n+1)) # We redefine the imaginary parts in case any generator was negative new_imag_part_log_gens = [0 for i in imag_part_log_gens[:-1]]+[imag_part_log_gens[-1]] A[n-1] = vector([(g*C).round() for g in real_part_log_gens]) A[n] = vector([(g*C).round() for g in new_imag_part_log_gens]) if A.is_singular(): C *= 2 else: # We apply Lemma 5.3 from [AKMRVW] A = A.transpose() l = minimal_vector(A, zero_vector(ZZ,n+1)) # Note that l is the a lower bound on the square of the magnitude of the shortest non-zero vector in the lattice generated by A # Checking hypothesis of lemma 5.3 in [AKMRVW] if l <= T**2 + S: C *= 2 # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: Bnew = ((R(C * 2).log() - ((l-S).sqrt()-T).log()) / c13).round() finish = True return max(4,w,Bnew), False else: # the case when the real part is not 0 for all log(a_i), see Lemma 5.2 in [AKMRVW] C = R(1) S = (n-1) * B0**2 w = place.domain().number_of_roots_of_unity() T = (n+w+n*w)*B0/R(2).sqrt() finish = False # we reorder the generators to that the real part of the last non-torsion generator is not 0: if n-1 not in npi: new_last_gen_index = npi[0] old_last_gen_real = real_part_log_gens[n-1] old_last_gen_imag = imag_part_log_gens[n-1] real_part_log_gens[n-1] = real_part_log_gens[new_last_gen_index] imag_part_log_gens[n-1] = imag_part_log_gens[new_last_gen_index] real_part_log_gens[new_last_gen_index] = old_last_gen_real imag_part_log_gens[new_last_gen_index] = old_last_gen_imag while not finish: A = copy(identity_matrix(ZZ, n+1)) A[n-1] = vector([(g*C).round() for g in real_part_log_gens]) A[n] = vector([(g*C).round() for g in imag_part_log_gens]) if A.is_singular(): C *= 2 else: # We apply Lemma 5.2 from [AKMRVW] A = A.transpose() l = minimal_vector(A, zero_vector(ZZ,n+1)) # Note that l is the a lower bound on the square of the magnitude of the shortest non-zero vector in the lattice generated by A # Checking hypothesis of lemma 5.2 in [AKMRVW] if l <= T**2 + S: C *= 2 # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: # Need to check precision: must be at least two more than the number of digits in largest entry in A to ensure that we get true rounding-- if prec < R(C*max_part_log).log()/R(2).log()+3: return 0, True else: Bnew = ((R(C * 2).log() - ((l-S).sqrt()-T).log()) / c13).round() finish = True return max(4,w,Bnew), False def cx_LLL_bound(SUK, A, prec=106): r""" Return the maximum of all of the `K_1`'s as they are LLL-optimized for each infinite place `v`. INPUT: - ``SUK`` -- a group of `S`-units - ``A`` -- a list of all products of each potential ``a``, ``b`` in the `S`-unit equation ``ax + by + 1 = 0`` with each root of unity of ``K`` - ``prec`` -- precision of real field (default: 106) OUTPUT: A bound for the exponents at the infinite place, as a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import cx_LLL_bound sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K,S=tuple(K.primes_above(3))) sage: A = K.roots_of_unity() sage: cx_LLL_bound(SUK,A) # long time 35 """ cx_LLL = 0 # initialize a bound, a bad guess, as we iterate over the places of the number field, we will replace its value with the largest complex LLL bound we've found across the places for v in SUK.number_field().places(prec=prec): prec_v = prec c13_LLL = c13_func(SUK, v, prec_v) cx_bound = K1_func(SUK, v, A, prec_v) # cx_bound is the LLL bound according to this place, it will be replaced as LLL gives us smaller bounds new_bound, inc_prec = reduction_step_complex_case(v, cx_bound, SUK.fundamental_units(), SUK.zeta(), c13_LLL) while inc_prec: v = refine_embedding(v) c13_LLL = c13_func(SUK, v, prec_v) cx_bound = K1_func(SUK, v, A, prec_v) new_bound, inc_prec = reduction_step_complex_case(v, cx_bound, SUK.fundamental_units(), SUK.zeta(), c13_LLL) counter = 0 while abs(cx_bound - new_bound) > .5*cx_bound and counter < 15: # We fear a loop that is not convergent, this is the purpose of the counter # Repeat complex LLL until we get essentially no change from it cx_bound = min(cx_bound, new_bound) new_bound, inc_prec = reduction_step_complex_case(v, cx_bound, SUK.fundamental_units(), SUK.zeta(), c13_LLL) while inc_prec: v = refine_embedding(v) c13_LLL = c13_func(SUK, v, prec_v) new_bound, inc_prec = reduction_step_complex_case(v, cx_bound, SUK.fundamental_units(), SUK.zeta(), c13_LLL) counter += 1 cx_bound = min(cx_bound, new_bound) # for this place the complex LLL bound is cx_bound cx_LLL = max(cx_bound, cx_LLL) # compare this value with the complex LLL bounds we have found for the previous places, if it is bigger, replace that bound return cx_LLL def log_p(a, prime, prec): r""" INPUT: - ``a`` -- an element of a number field `K` - ``prime`` -- a prime ideal of the number field `K` - ``prec`` -- a positive integer OUTPUT: An element of `K` which is congruent to the ``prime``-adic logarithm of ``a`` with respect to ``prime`` modulo ``p^prec``, where ``p`` is the rational prime below ``prime`` .. NOTE:: Here we take into account the other primes in `K` above `p` in order to get coefficients with small values EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import log_p sage: K.<a> = NumberField(x^2+14) sage: p1 = K.primes_above(3)[0] sage: p1 Fractional ideal (3, a + 1) sage: log_p(a+2, p1, 20) 8255385638/3*a + 15567609440/3 :: sage: K.<a> = NumberField(x^4+14) sage: p1 = K.primes_above(5)[0] sage: p1 Fractional ideal (5, a + 1) sage: log_p(1/(a^2-4), p1, 30) -42392683853751591352946/25*a^3 - 113099841599709611260219/25*a^2 - 8496494127064033599196/5*a - 18774052619501226990432/25 """ if a == 0: raise ValueError('a is the zero element') if a.valuation(prime) != 0: raise ValueError('The valuation of a with respect to prime is not zero') K = prime.ring() p = prime.smallest_integer() # In order to get an approximation with small coefficients we have to take into account the other primes above p # with negative valuation. For example, say prime2 is another (principal ideal) prime above p, and a=(unit)(prime2)^(-k) for some unit and k # a positive integer, and let tilde(a):=a(prime2)^k. Then log_p(a)=log_p(tilde(a))-k(log_p(prime2)), where the series representations # of these two logs will have smaller coefficients. primes = [(-(a.valuation(pr)),pr) for pr in K.primes_above(p) if a.valuation(pr) < 0] local_terms = [] for (val, pr) in primes: # for its pair in primes we find an element in K such that it is divisible only by pr and not by any other ideal above p. Then we take this element in the correct exponent if pr.is_principal(): local_terms.append(pr.gens_reduced()[0]**val) else: local_terms.append(pr.gens()[1]**val) return log_p_series_part(a*prod(local_terms), prime, prec) - sum([log_p_series_part(b, prime, prec) for b in local_terms]) def log_p_series_part(a, prime, prec): r""" INPUT: - ``a`` -- an element of a number field `K` - ``prime`` -- a prime ideal of the number field `K` - ``prec`` -- a positive integer OUTPUT: The ``prime``-adic logarithm of ``a`` and accuracy ``p^prec``, where ``p`` is the rational prime below ``prime`` ALGORITHM: The algorithm is based on the algorithm on page 30 of [Sma1998]_ EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import log_p_series_part sage: K.<a> = NumberField(x^2-5) sage: p1 = K.primes_above(3)[0] sage: p1 Fractional ideal (3) sage: log_p_series_part(a^2-a+1, p1, 30) 120042736778562*a + 263389019530092 :: sage: K.<a> = NumberField(x^4+14) sage: p1 = K.primes_above(5)[0] sage: p1 Fractional ideal (5, a + 1) sage: log_p_series_part(1/(a^2-4), p1, 30) 5628940883264585369224688048459896543498793204839654215019548600621221950915106576555819252366183605504671859902129729380543157757424169844382836287443485157589362653561119898762509175000557196963413830027960725069496503331353532893643983455103456070939403472988282153160667807627271637196608813155377280943180966078/1846595723557147156151786152499366687569722744011302407020455809280594038056223852568951718462474153951672335866715654153523843955513167531739386582686114545823305161128297234887329119860255600972561534713008376312342295724191173957260256352612807316114669486939448006523889489471912384033203125*a^2 + 2351432413692022254066438266577100183514828004415905040437326602004946930635942233146528817325416948515797296867947688356616798913401046136899081536181084767344346480810627200495531180794326634382675252631839139904967037478184840941275812058242995052383261849064340050686841429735092777331963400618255005895650200107/1846595723557147156151786152499366687569722744011302407020455809280594038056223852568951718462474153951672335866715654153523843955513167531739386582686114545823305161128297234887329119860255600972561534713008376312342295724191173957260256352612807316114669486939448006523889489471912384033203125 """ if a.valuation(prime) != 0: raise ValueError('The valuation of a with respect to prime is not zero') K = prime.ring() g = K.gen() p = prime.smallest_integer() f = prime.residue_class_degree() e = prime.absolute_ramification_index() q = p**f - 1 R = RealField(prec) divisor = q.divisors() order = min(d for d in divisor if (a**d - 1).valuation(prime) > 0) gamma= a**order t = 0 while (gamma-1).valuation(prime) <= e: t += 1 gamma = gamma**p prec += t # since later we divide by p^t, we must increase the precision by t at this point. m = (gamma-1).valuation(prime) / e n = Integer(1) step = 10 ** (R(prec).log()/R(10).log()).floor() while n < (R(n).log()/R(p).log() + prec)/m: n += step # could use smaller stepsize to get actual smallest integer n, however this seems to run faster. w = (R(prec).log()/R(p).log()).floor() gamma = sum([ZZ(gi % (p**(prec+w))) * g**i if gi.valuation(p) >= 0 else ZZ((gi * p**(-gi.valuation(p))) % (p**(prec+w-gi.valuation(p)))) * p**(gi.valuation(p)) * g**i for i,gi in enumerate(gamma) if gi != 0]) beta = 0 delta = 1 - gamma for i in range(1, n+1): beta -= delta / i delta *= (1 - gamma) delta = sum([ZZ(di % (p**(prec+w))) * g**b if di.valuation(p) >= 0 else ZZ((di * p**(-di.valuation(p))) % (p**(prec + w - di.valuation(p)))) * p**(di.valuation(p)) * g**b for b,di in enumerate(delta) if di != 0]) beta = beta / (order * p**t) # we try to make the coefficients small logp = 0 for i,b in enumerate(beta.list()): val = b.valuation(p) if val < 0: t = b * p**(-val) t = ZZ(mod(t, p**(prec-val))) t = t * p**val else: t = ZZ(mod(b, p**prec)) logp = logp + t * g**i return logp def defining_polynomial_for_Kp(prime, prec=106): r""" INPUT: - ``prime`` -- a prime ideal of a number field `K` - ``prec`` -- a positive natural number (default: 106) OUTPUT: A polynomial with integer coefficients that is equivalent ``mod p^prec`` to a defining polynomial for the completion of `K` associated to the specified prime. .. NOTE:: `K` has to be an absolute extension EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import defining_polynomial_for_Kp sage: K.<a> = QuadraticField(2) sage: p2 = K.prime_above(7); p2 Fractional ideal (-2*a + 1) sage: defining_polynomial_for_Kp(p2, 10) x + 266983762 :: sage: K.<a> = QuadraticField(-6) sage: p2 = K.prime_above(2); p2 Fractional ideal (2, a) sage: defining_polynomial_for_Kp(p2, 100) x^2 + 6 sage: p5 = K.prime_above(5); p5 Fractional ideal (5, a + 2) sage: defining_polynomial_for_Kp(p5, 100) x + 3408332191958133385114942613351834100964285496304040728906961917542037 """ K = prime.ring() if not K.is_absolute(): raise ValueError('The number field is not an absolute extension') theta = K.gen() f = K.defining_polynomial() p = prime.smallest_integer() e = prime.absolute_ramification_index() N = prec while True: RQp = Qp(p, prec=N, type='capped-rel', print_mode='series') # We factor f in Integers(p**(precision)) using the factorization in Qp factors = f.change_ring(RQp).factor() # We are going to find which factor of f is related to the prime ideal 'prime' L = [g.change_ring(ZZ) for g, _ in factors] A = [g for g in L if (g(theta)).valuation(prime) >= e*N/2] # We narrow down the list unitl only one value remains if len(A) == 1: return A[0].change_ring(Integers(p**prec)).change_ring(ZZ) else: N += 1 def embedding_to_Kp(a, prime, prec): r""" INPUT: - ``a`` -- an element of a number field `K` - ``prime`` -- a prime ideal of `K` - ``prec`` -- a positive natural number OUTPUT: An element of `K` that is equivalent to ``a`` modulo ``p^(prec)`` and the generator of `K` appears with exponent less than `e \cdot f`, where ``p`` is the rational prime below ``prime`` and `e,f` are the ramification index and residue degree, respectively. .. NOTE:: `K` has to be an absolute number field EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import embedding_to_Kp sage: K.<a> = QuadraticField(17) sage: p = K.prime_above(13); p Fractional ideal (-a + 2) sage: embedding_to_Kp(a-3, p, 15) -20542890112375827 :: sage: K.<a> = NumberField(x^4-2) sage: p = K.prime_above(7); p Fractional ideal (-a^2 + a - 1) sage: embedding_to_Kp(a^3-3, p, 15) -1261985118949117459462968282807202378 """ K = prime.ring() if not K.is_absolute(): raise ValueError('K has to be an absolute extension') g = defining_polynomial_for_Kp(prime, prec).change_ring(QQ) gen = K.gen() f = K(a).lift() return K( sum([b*gen**j for j,b in enumerate(f.mod(g))]) ) def p_adic_LLL_bound_one_prime(prime, B0, M, M_logp, m0, c3, prec=106): r""" INPUT: - ``prime`` -- a prime ideal of a number field `K` - ``B0`` -- the initial bound - ``M`` -- a list of elements of `K`, the `\mu_i`'s from Lemma IX.3 of [Sma1998]_ - ``M_logp`` -- the p-adic logarithm of elements in `M` - ``m0`` -- an element of `K`, this is `\mu_0` from Lemma IX.3 of [Sma1998]_ - ``c3`` -- a positive real constant - ``prec`` -- the precision of the calculations (default: 106), i.e., values are known to O(p^prec) OUTPUT: A pair consisting of: 1. a new upper bound, an integer 2. a boolean value, ``True`` if we have to increase precision, otherwise ``False`` .. NOTE:: The constant `c_5` is the constant `c_5` at the page 89 of [Sma1998]_ which is equal to the constant `c_{10}` at the page 139 of [Sma1995]_. In this function, the `c_i` constants are in line with [Sma1998]_, but generally differ from the constants in [Sma1995]_ and other parts of this code. EXAMPLES: This example indicates a case where we must increase precision:: sage: from sage.rings.number_field.S_unit_solver import p_adic_LLL_bound_one_prime sage: prec = 50 sage: K.<a> = NumberField(x^3-3) sage: S = tuple(K.primes_above(3)) sage: SUK = UnitGroup(K, S=S) sage: v = S[0] sage: A = SUK.roots_of_unity() sage: K0_old = 9.4755766731093e17 sage: Mus = [a^2 - 2] sage: Log_p_Mus = [185056824593551109742400*a^2 + 1389583284398773572269676*a + 717897987691852588770249] sage: mu0 = K(-1) sage: c3_value = 0.42578591347980 sage: m0_Kv_new, increase_precision = p_adic_LLL_bound_one_prime(v, K0_old, Mus, Log_p_Mus, mu0, c3_value, prec) sage: m0_Kv_new 0 sage: increase_precision True And now we increase the precision to make it all work:: sage: prec = 106 sage: K0_old = 9.475576673109275443280257946930e17 sage: Log_p_Mus = [1029563604390986737334686387890424583658678662701816*a^2 + 661450700156368458475507052066889190195530948403866*a] sage: c3_value = 0.4257859134798034746197327286726 sage: m0_Kv_new, increase_precision = p_adic_LLL_bound_one_prime(v, K0_old, Mus, Log_p_Mus, mu0, c3_value, prec) sage: m0_Kv_new 476 sage: increase_precision False """ if any(g.valuation(prime) != 0 for g in M+[m0]): raise ValueError('There is an element with non zero valuation') K = prime.ring() w = K.number_of_roots_of_unity() p = prime.smallest_integer() f = prime.residue_class_degree() e = prime.absolute_ramification_index() R = RealField(prec) c5 = c3 / (f*e*R(p).log()) theta = K.gen() # if M is empty then it is easy to give an upper bound if len(M) == 0: if m0 != 1: return max(4,w, R(max(R(p).log()*f*(m0-1).valuation(prime)/c3, 0)).floor()), False else: return 0, False # we evaluate the p-adic logarithms of m0 and we embed it in the completion of K with respect to prime m0_logp = log_p(m0, prime, prec) m0_logp = embedding_to_Kp(m0_logp, prime, prec) n = len(M_logp) # Below we implement paragraph VI.4.2 of [Sma1998], pages 89-93 # we evaluate the order of discriminant of theta Theta = [theta**i for i in range(K.absolute_degree())] ordp_Disc = (K.disc(Theta)).valuation(p) # We evaluate Lambda c8 = min(min(a.valuation(p) for a in g) for g in M_logp) lam = p**c8 # we apply lemma VI.5 of [Sma1998] page 90 # c6 is 0 here because we seek to solve the equation x+y=1, so our set A # is contained in the roots of unity of K # In one very extreme case (p = 2 and all other constants as small as possible), # low_bound = 1/c5 is not quite enough to give strict inequality. So we add 1 to be safe. low_bound = (1/c5).round() + 1 for a in m0_logp: if a != 0 and c8 > a.valuation(p): B1 = (c8 + ordp_Disc/2) / c5 if B1 > low_bound: return max(4,w,RR(B1).floor()), False else: return max(4,w,low_bound), False c8 = min([a.valuation(p) for a in m0_logp] + [c8]) B = [g/lam for g in M_logp] b0 = m0_logp / lam c9 = c8 + ordp_Disc/2 # We evaluate 'u' and we construct the matrix A m = e * f u = 1 while True: if prec <= u + c8: return 0, True # We construct the matrix A as a block matrix A11 = identity_matrix(ZZ, n) A12 = zero_matrix(ZZ, n, m) A21 = zero_matrix(ZZ, n, m) A22 = p**u * identity_matrix(ZZ, m) for i,b in enumerate(B): A21[i] = vector([mod(b[j],p**u) for j in range(m)]) A = block_matrix( [[A11,A12], [A21.transpose(),A22]] ) y = zero_vector(ZZ, n+m) for i in range(m): y[i+n] = -mod(b0[i], p**u) # This refers to c10 from Smart c10squared = minimal_vector(A.transpose(), y) if c10squared > n * B0**2: B2 = (u+c9) / c5 if B2 > low_bound: return max(4,w,R(B2).floor()),False else: return max(4,w,low_bound),False else: u += 1 def p_adic_LLL_bound(SUK, A, prec=106): r""" Return the maximum of all of the `K_0`'s as they are LLL-optimized for each finite place `v`. INPUT: - ``SUK`` -- a group of `S`-units - ``A`` -- a list of all products of each potential ``a``, ``b`` in the `S`-unit equation ``ax + by + 1 = 0`` with each root of unity of ``K`` - ``prec``-- precision for p-adic LLL calculations (default: 106) OUTPUT: A bound for the max of exponents in the case that extremal place is finite (see [Sma1995]_) as a real number EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import p_adic_LLL_bound sage: K.<xi> = NumberField(x^3-3) sage: SUK = UnitGroup(K,S=tuple(K.primes_above(3))) sage: A = SUK.roots_of_unity() sage: prec = 100 sage: p_adic_LLL_bound(SUK,A, prec) 89 """ S = SUK.primes() K0_old = K0_func(SUK, A, prec) LLL_K0_by_finite_place = [] for i,v in enumerate(S): # Kv_old = K0_by_finite_place[0] Mus0 = possible_mu0s(SUK, v) Mus = mus(SUK, v) Log_p_Mus = [log_p(a, v, prec) for a in Mus] local_prec = prec val = 0 for m0 in Mus0: m0_Kv_old = K0_old m0_Kv_new, increase_precision = p_adic_LLL_bound_one_prime(v, m0_Kv_old, Mus, Log_p_Mus, m0, c3_func(SUK, local_prec), local_prec) while increase_precision: local_prec *= 2 Log_p_Mus = [log_p(a, v, local_prec) for a in Mus] Log_p_Mus = [embedding_to_Kp(a, v, local_prec) for a in Log_p_Mus] m0_Kv_new, increase_precision = p_adic_LLL_bound_one_prime(v, m0_Kv_old, Mus, Log_p_Mus, m0, c3_func(SUK, local_prec), local_prec) while m0_Kv_new < m0_Kv_old: m0_Kv_old = m0_Kv_new m0_Kv_new, increase_precision = p_adic_LLL_bound_one_prime(v, m0_Kv_old, Mus, Log_p_Mus, m0, c3_func(SUK,local_prec), local_prec) while increase_precision: local_prec *= 2 Log_p_Mus = [log_p(a, v, local_prec) for a in Mus] Log_p_Mus = [embedding_to_Kp(a, v, local_prec) for a in Log_p_Mus] m0_Kv_new,increase_precision = p_adic_LLL_bound_one_prime(v, m0_Kv_old, Mus, Log_p_Mus, m0, c3_func(SUK, local_prec), local_prec) if m0_Kv_old > val: val = m0_Kv_old LLL_K0_by_finite_place.append(val) return max(LLL_K0_by_finite_place) def split_primes_large_lcm(SUK, bound): r""" Return a list ``L`` of rational primes `q` which split completely in `K` and which have desirable properties (see NOTE). INPUT: - ``SUK`` -- the `S`-unit group of an absolute number field `K`. - ``bound`` -- a positive integer OUTPUT: A list `L` of rational primes `q`, with the following properties: - each prime `q` in `L` splits completely in `K` - if `Q` is a prime in `S` and `q` is the rational prime below `Q`, then `q` is **not** in `L` - the value ``lcm { q-1 : q in L }`` is greater than or equal to ``2*bound + 1``. .. NOTE:: - A series of compatible exponent vectors for the primes in `L` will lift to **at most** one integer exponent vector whose entries `a_i` satisfy `|a_i|` is less than or equal to ``bound``. - The ordering of this set is not very intelligent for the purposes of the later sieving processes. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import split_primes_large_lcm sage: K.<xi> = NumberField(x^3 - 3*x + 1) sage: S = K.primes_above(3) sage: SUK = UnitGroup(K,S=tuple(S)) sage: split_primes_large_lcm(SUK, 200) [17, 19, 37, 53] With a tiny bound, Sage may ask you to increase the bound. :: sage: from sage.rings.number_field.S_unit_solver import split_primes_large_lcm sage: K.<xi> = NumberField(x^2 + 163) sage: SUK = UnitGroup(K, S=tuple(K.primes_above(23))) sage: split_primes_large_lcm(SUK, 8) Traceback (most recent call last): ... ValueError: Not enough split primes found. Increase bound. """ K = SUK.number_field() # we recover the rational primes below S: S0 = set(prime_ideal.smallest_integer() for prime_ideal in SUK.primes()) split_prime_list = K.completely_split_primes(4*bound + 4) lcm_list = [] L = 1 while L < 2*bound + 1: if split_prime_list == []: # Need More Primes! raise ValueError('Not enough split primes found. Increase bound.') q = split_prime_list.pop(0) # only use q if it is *not* below a prime in S -- that is, # only if q does *not* appear in S0. if q not in S0: L = lcm(L, q-1) lcm_list.append(q) return lcm_list def sieve_ordering(SUK, q): r""" Returns ordered data for running sieve on the primes in `SUK` over the rational prime `q`. INPUT: - ``SUK`` -- the `S`-unit group of a number field `K` - ``q`` -- a rational prime number which splits completely in `K` OUTPUT: A list of tuples, ``[ideals_over_q, residue_fields, rho_images, product_rho_orders]``, where 1. ``ideals_over_q`` is a list of the `d = [K:\mathbb{Q}]` ideals in `K` over `q` 2. ``residue_fields[i]`` is the residue field of ``ideals_over_q[i]`` 3. ``rho_images[i]`` is a list of the reductions of the generators in of the `S`-unit group, modulo ``ideals_over_q[i]`` 4. ``product_rho_orders[i]`` is the product of the multiplicative orders of the elements in ``rho_images[i]`` .. NOTE:: - The list ``ideals_over_q`` is sorted so that the product of orders is smallest for ``ideals_over_q[0]``, as this will make the later sieving steps more efficient. - The primes of ``S`` must not lie over ``q``. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import sieve_ordering sage: K.<xi> = NumberField(x^3 - 3*x + 1) sage: SUK = K.S_unit_group(S=3) sage: sieve_data = list(sieve_ordering(SUK, 19)) sage: sieve_data[0] (Fractional ideal (xi - 3), Fractional ideal (-2*xi^2 + 3), Fractional ideal (2*xi + 1)) sage: sieve_data[1] (Residue field of Fractional ideal (xi - 3), Residue field of Fractional ideal (-2*xi^2 + 3), Residue field of Fractional ideal (2*xi + 1)) sage: sieve_data[2] ([18, 7, 16, 4], [18, 9, 12, 8], [18, 3, 10, 10]) sage: sieve_data[3] (486, 648, 11664) """ K = SUK.number_field() rho = SUK.gens_values() d = K.absolute_degree() primes_over_q = K.primes_above(q) # q must split completely. if len(primes_over_q) != d: raise ValueError('The prime q is not completely split.') for P in SUK.primes(): if P in primes_over_q: raise ValueError('There is a prime in S over q.') q_data = [] for Qi in primes_over_q: resfield = Qi.residue_field() rho_mod_Qi = [resfield(rho_j) for rho_j in rho] orderprod = prod(rho_ij.multiplicative_order() for rho_ij in rho_mod_Qi) q_data.append([Qi, resfield, rho_mod_Qi, orderprod]) q_data.sort(key=lambda X: [X[3],X[0],X[1],X[2]]) # zip() will change the list of n list of length m to m tuples of length n return zip(*q_data) def clean_rfv_dict(rfv_dictionary): r""" Given a residue field vector dictionary, removes some impossible keys and entries. INPUT: - ``rfv_dictionary`` -- a dictionary whose keys are exponent vectors and whose values are residue field vectors OUTPUT: None. But it removes some keys from the input dictionary. .. NOTE:: - The keys of a residue field vector dictionary are exponent vectors modulo ``(q-1)`` for some prime ``q``. - The values are residue field vectors. It is known that the entries of a residue field vector which comes from a solution to the S-unit equation cannot have 1 in any entry. EXAMPLES: In this example, we use a truncated list generated when solving the `S`-unit equation in the case that `K` is defined by the polynomial `x^2+x+1` and `S` consists of the primes above 3:: sage: from sage.rings.number_field.S_unit_solver import clean_rfv_dict sage: rfv_dict = {(1, 3): [3, 2], (3, 0): [6, 6], (5, 4): [3, 6], (2, 1): [4, 6], (5, 1): [3, 1], (2, 5): [1, 5], (0, 3): [1, 6]} sage: len(rfv_dict) 7 sage: clean_rfv_dict(rfv_dict) sage: len(rfv_dict) 4 sage: rfv_dict {(1, 3): [3, 2], (2, 1): [4, 6], (3, 0): [6, 6], (5, 4): [3, 6]} """ for a, val in list(rfv_dictionary.items()): if 1 in val: rfv_dictionary.pop(a) def construct_rfv_to_ev(rfv_dictionary, q, d, verbose=False): r""" Return a reverse lookup dictionary, to find the exponent vectors associated to a given residue field vector. INPUT: - ``rfv_dictionary`` -- a dictionary whose keys are exponent vectors and whose values are the associated residue field vectors - ``q`` -- a prime (assumed to split completely in the relevant number field) - ``d`` -- the number of primes in `K` above the rational prime ``q`` - ``verbose`` -- a boolean flag to indicate more detailed output is desired (default: False) OUTPUT: A dictionary ``P`` whose keys are residue field vectors and whose values are lists of all exponent vectors which correspond to the given residue field vector. .. NOTE:: - For example, if ``rfv_dictionary[ e0 ] = r0``, then ``P[ r0 ]`` is a list which contains ``e0``. - During construction, some residue field vectors can be eliminated as coming from solutions to the `S`-unit equation. Such vectors are dropped from the keys of the dictionary ``P``. EXAMPLES: In this example, we use a truncated list generated when solving the `S`-unit equation in the case that `K` is defined by the polynomial `x^2+x+1` and `S` consists of the primes above 3:: sage: from sage.rings.number_field.S_unit_solver import construct_rfv_to_ev sage: rfv_dict = {(1, 3): [3, 2], (3, 0): [6, 6], (5, 4): [3, 6], (2, 1): [4, 6], (4, 0): [4, 2], (1, 2): [5, 6]} sage: construct_rfv_to_ev(rfv_dict,7,2,False) {(3, 2): [(1, 3)], (4, 2): [(4, 0)], (4, 6): [(2, 1)], (5, 6): [(1, 2)]} """ # The keys in P are just the possible first entries of a residue field vector. # The values (all empty lists now) will be added in the next step. P = {(v,) : [] for v in range(2, q)} # Step 1. Populate the empty lists in P[(v,)]. # Loop through the keys in rfv_dictionary. For each, look at the output rf_vector. # Find the key in P which matches the first entry of the rf_vector. # Dump the **rest** of the rf_vector into a pair [exp_vec, rf_vec[1:]], # and append this pair into the dictionary P at the key (rf_vec[0], ). # Now, P[(v,)] = [ [a_0, e_0], [a_1, e_1], ...] # # The relationship between v, a_i, and e_i is as follows: # # a_i is an exponent vector, whose associated residue field vector is the # concatenation of v with e_i. for exponent_vector in rfv_dictionary: residue_field_vector = rfv_dictionary[exponent_vector] rf_vector_start = (residue_field_vector[0], ) rf_vector_end = residue_field_vector[1:] P[rf_vector_start].append([exponent_vector, rf_vector_end]) if verbose: print("Populated P. Currently it has ", len(P), "keys.") # Step 2: We build a new dictionary, P_new, from P. # # This is a step that will be repeated, once for each of the d primes over q. # # P is a dictionary whose keys are tuples of length m, representing the beginning of known residue field vectors. # # For any such beginning `s`, # # P[s] = [ [a_0, e_0], [a_1, e_1], ...] # # where for any exponent vector a_i, the associated residue field vector is the concatenation s + e_i. # # The dictionary P_new is constructed from the dictionary P. The new keys will be tuples of length m + 1. # # During the construction, we look for impossible entries for S-unit solutions, and drop them from the dictionary as needed. for j in range(d-1): if verbose: print("Constructing ", j, " th place of the residue field vectors, out of ", d-1, " total.") P_new = {} garbage = {} # we loop over each key of P. for rf_vector_start in P: # each key of P provides q-2 possible keys for P_new, which we introduce and assign an empty list. for w in range(2, q): new_rf_vector_start = tuple(list(rf_vector_start) + [w]) P_new[new_rf_vector_start] = [] # we populate P_new[ new_rf_vector_start ] using P[rf_vector_start] for exponent_vector, rf_vector_end in P[rf_vector_start]: new_rf_vector_end = rf_vector_end[1:] w = rf_vector_end[0] new_rf_vector_start = tuple(list(rf_vector_start) + [w]) P_new[new_rf_vector_start].append([exponent_vector, new_rf_vector_end]) if verbose: print("P_new is populated with ", len(P_new), " keys.") # we now loop over the keys of P_new, looking for incompatible entries. for rf_vector_start in P_new: # the final entry of rf_vector_start or rf_vector_complement_start must be < (q+3)/2. # No loss to insist that it is rf_vector_start. if rf_vector_start[-1] < (q+3)/2: # we find the complement to rf_vector_start: rf_vector_complement_start = tuple([ q+1-j for j in rf_vector_start]) if P_new[ rf_vector_start ] == [] or P_new[rf_vector_complement_start] == []: # these can't be solutions. Mark them for deletion. garbage[rf_vector_start] = True garbage[rf_vector_complement_start] = True # garbage removal for rf_vector_start in garbage: P_new.pop(rf_vector_start, 0) if verbose: print("After removing incompatible entries, P_new is down to ", len(P_new), " keys.") # Time to move on to the next dictionary. P = P_new.copy() # Now, we just clean up P. for residue_field_vector in P: # at this instant, P[ residue_field_vector ] is a list of pairs: [ [a0,e0], ... ] # We only care about the exponent vectors a0,... P[residue_field_vector] = [a[0] for a in P[residue_field_vector]] if verbose: print("Returning dictionary P with ", len(P), " keys.") return P.copy() def construct_comp_exp_vec(rfv_to_ev_dict, q): r""" Constructs a dictionary associating complement vectors to residue field vectors. INPUT: - ``rfv_to_ev_dict`` -- a dictionary whose keys are residue field vectors and whose values are lists of exponent vectors with the associated residue field vector. - ``q`` -- the characteristic of the residue field OUTPUT: A dictionary whose typical key is an exponent vector ``a``, and whose associated value is a list of complementary exponent vectors to ``a``. EXAMPLES: In this example, we use the list generated when solving the `S`-unit equation in the case that `K` is defined by the polynomial `x^2+x+1` and `S` consists of the primes above 3 :: sage: from sage.rings.number_field.S_unit_solver import construct_comp_exp_vec sage: rfv_to_ev_dict = {(6, 6): [(3, 0)], (5, 6): [(1, 2)], (5, 4): [(5, 3)], (6, 2): [(5, 5)], (2, 5): [(0, 1)], (5, 5): [(3, 4)], (4, 4): [(0, 2)], (6, 3): [(1, 4)], (3, 6): [(5, 4)], (2, 2): [(0, 4)], (3, 5): [(1, 0)], (6, 4): [(1, 1)], (3, 2): [(1, 3)], (2, 6): [(4, 5)], (4, 5): [(4, 3)], (2, 3): [(2, 3)], (4, 2): [(4, 0)], (6, 5): [(5, 2)], (3, 3): [(3, 2)], (5, 3): [(5, 0)], (4, 6): [(2, 1)], (3, 4): [(3, 5)], (4, 3): [(0, 5)], (5, 2): [(3, 1)], (2, 4): [(2, 0)]} sage: construct_comp_exp_vec(rfv_to_ev_dict, 7) {(0, 1): [(1, 4)], (0, 2): [(0, 2)], (0, 4): [(3, 0)], (0, 5): [(4, 3)], (1, 0): [(5, 0)], (1, 1): [(2, 0)], (1, 2): [(1, 3)], (1, 3): [(1, 2)], (1, 4): [(0, 1)], (2, 0): [(1, 1)], (2, 1): [(4, 0)], (2, 3): [(5, 2)], (3, 0): [(0, 4)], (3, 1): [(5, 4)], (3, 2): [(3, 4)], (3, 4): [(3, 2)], (3, 5): [(5, 3)], (4, 0): [(2, 1)], (4, 3): [(0, 5)], (4, 5): [(5, 5)], (5, 0): [(1, 0)], (5, 2): [(2, 3)], (5, 3): [(3, 5)], (5, 4): [(3, 1)], (5, 5): [(4, 5)]} """ comp_exp_vec_dict = {} for residue_field_vector in rfv_to_ev_dict: rf_vector_complement = tuple([q + 1 - j for j in residue_field_vector]) exponent_vector_list = rfv_to_ev_dict[ residue_field_vector ][:] exponent_vector_complement_list = rfv_to_ev_dict[rf_vector_complement][:] for exponent_vector in exponent_vector_list: comp_exp_vec_dict[exponent_vector] = exponent_vector_complement_list return comp_exp_vec_dict def drop_vector(ev, p, q, complement_ev_dict): r""" Determines if the exponent vector, ``ev``, may be removed from the complement dictionary during construction. This will occur if ``ev`` is not compatible with an exponent vector mod ``q-1``. INPUT: - ``ev`` -- an exponent vector modulo ``p - 1`` - ``p`` -- the prime such that ev is an exponent vector modulo ``p-1`` - ``q`` -- a prime, distinct from ``p``, that is a key in the ``complement_ev_dict`` - ``complement_ev_dict`` -- a dictionary of dictionaries, whose keys are primes ``complement_ev_dict[q]`` is a dictionary whose keys are exponent vectors modulo ``q-1`` and whose values are lists of complementary exponent vectors modulo ``q-1`` OUTPUT: Returns ``True`` if ``ev`` may be dropped from the complement exponent vector dictionary, and ``False`` if not. .. NOTE:: - If ``ev`` is not compatible with any of the vectors modulo ``q-1``, then it can no longer correspond to a solution of the `S`-unit equation. It returns ``True`` to indicate that it should be removed. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import drop_vector sage: drop_vector((1, 2, 5), 7, 11, {11: {(1, 1, 3): [(1, 1, 3),(2, 3, 4)]}}) True :: sage: P={3: {(1, 0, 0): [(1, 0, 0), (0, 1, 0)], (0, 1, 0): [(1, 0, 0), (0, 1, 0)]}, 7: {(0, 3, 4): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], (1, 2, 4): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], (0, 1, 2): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], (0, 5, 4): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], (1, 4, 2): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], (1, 0, 4): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], (0, 3, 2): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], (1, 0, 0): [(0, 5, 4), (0, 3, 2), (0, 1, 0)], (1, 2, 0): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], (0, 1, 0): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], (0, 5, 0): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], (1, 2, 2): [(0, 5, 4), (0, 3, 2), (0, 1, 0)], (1, 4, 0): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], (1, 0, 2): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], (1, 4, 4): [(0, 5, 4), (0, 3, 2), (0, 1, 0)]}} sage: drop_vector((0,1,0),3,7,P) False """ # returns True if it is OK to drop exp_vec given the current comp_exp_vec dictionary associated to some q. # returns False otherwise # loop over the possible compatible vectors in the other modulus g = gcd(p-1, q-1) for compatible_exp_vec in compatible_vectors(ev, p-1, q-1, g): # do they appear in the other dictionary? if compatible_exp_vec in complement_ev_dict[q]: # OK, but the complements need to be compatible, too! ev_complement_list = complement_ev_dict[p][ev] for ev_comp in ev_complement_list: for compatible_cv in compatible_vectors(ev_comp, p-1, q-1, g): if compatible_cv in complement_ev_dict[q][compatible_exp_vec]: return False return True def construct_complement_dictionaries(split_primes_list, SUK, verbose=False): r""" A function to construct the complement exponent vector dictionaries. INPUT: - ``split_primes_list`` -- a list of rational primes which split completely in the number field `K` - ``SUK`` -- the `S`-unit group for a number field `K` - ``verbose`` -- a boolean to provide additional feedback (default: False) OUTPUT: A dictionary of dictionaries. The keys coincide with the primes in ``split_primes_list`` For each ``q``, ``comp_exp_vec[q]`` is a dictionary whose keys are exponent vectors modulo ``q-1``, and whose values are lists of exponent vectors modulo ``q-1`` If ``w`` is an exponent vector in ``comp_exp_vec[q][v]``, then the residue field vectors modulo ``q`` for ``v`` and ``w`` sum to ``[1,1,...,1]`` .. NOTE:: - The data of ``comp_exp_vec`` will later be lifted to `\mathbb{Z}` to look for true `S`-Unit equation solutions. - During construction, the various dictionaries are compared to each other several times to eliminate as many mod `q` solutions as possible. - The authors acknowledge a helpful discussion with Norman Danner which helped formulate this code. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import construct_complement_dictionaries sage: f = x^2 + 5 sage: H = 10 sage: K.<xi> = NumberField(f) sage: SUK = K.S_unit_group(S=K.primes_above(H)) sage: split_primes_list = [3, 7] sage: actual = construct_complement_dictionaries(split_primes_list, SUK) sage: expected = {3: {(0, 1, 0): [(1, 0, 0), (0, 1, 0)], ....: (1, 0, 0): [(1, 0, 0), (0, 1, 0)]}, ....: 7: {(0, 1, 0): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], ....: (0, 1, 2): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], ....: (0, 3, 2): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], ....: (0, 3, 4): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], ....: (0, 5, 0): [(0, 1, 2), (0, 3, 4), (0, 5, 0)], ....: (0, 5, 4): [(1, 0, 0), (1, 4, 4), (1, 2, 2)], ....: (1, 0, 0): [(0, 5, 4), (0, 3, 2), (0, 1, 0)], ....: (1, 0, 2): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], ....: (1, 0, 4): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], ....: (1, 2, 0): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], ....: (1, 2, 2): [(0, 5, 4), (0, 3, 2), (0, 1, 0)], ....: (1, 2, 4): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], ....: (1, 4, 0): [(1, 0, 4), (1, 4, 2), (1, 2, 0)], ....: (1, 4, 2): [(1, 2, 4), (1, 4, 0), (1, 0, 2)], ....: (1, 4, 4): [(0, 5, 4), (0, 3, 2), (0, 1, 0)]}} sage: all(set(actual[p][vec]) == set(expected[p][vec]) for p in [3,7] for vec in expected[p]) True """ # We initialize some dictionaries. rho = SUK.gens_values() rho_length = len(rho) rho_images_dict = {} rho_orders_dict = {} K = SUK.number_field() for q in split_primes_list: ideals_over_q, residue_fields, rho_images, product_rho_orders = sieve_ordering(SUK, q) rho_images_dict[q] = rho_images rho_orders_dict[q] = product_rho_orders nK = K.absolute_degree() w0 = rho[0].multiplicative_order() # We build a dictionary of dictionaries. # rfv_to_ev[q] is the 'mod q' residue field vector to exponent vector dictionary. rfv_to_ev = {} # We build a second dictionary of dictionaries. # comp_exp_vec[q] is the dictionary mod q which assigns to each exponent vector # a list of 'complementary' exponent vectors. comp_exp_vec = {} q0 = split_primes_list[0] if verbose: print("Using the following primes: ", split_primes_list) for q in split_primes_list: rho_images = rho_images_dict[q] if verbose: print("q = ", q) def epsilon_q(a, i): # a is an exponent vector # i is an index for one of the primes over q # returns the value of rho_j^a_j inside the # residue field of Qi. (Necessarily isomorphic to F_q.) # rho_images[i][j] == rho[j] modulo Q[i] eps_value = rho_images[i][0]**a[0] for j in range(1, rho_length): eps_value *= rho_images[i][j]**a[j] return eps_value if verbose: print("The evaluation function epsilon has been defined using rho_images = ", rho_images) # Now, we run through the vectors in the iterator, but only keep the ones # which are compatible with the previously constructed dictionaries. That is, # in order to keep an exp_vec mod q, there must exist a compatible exp_vec mod p # in the keys of the rfv_to_ev[p] dictionary for each completely split prime # p appearing prior to q in split_primes_list. if q == q0: # for the first prime, there is no filtering possible, and we just build the exponent vector # iterator. # This should consist of all vectors (a0,...,a_{t-1}), where # a0 is in the range 0 .. w_0 - 1 and # aj is in the range 0 .. q - 2 (for j > 0) ranges = [range(w0)] + [range(q-1) for _ in range(rho_length-1)] ev_iterator = itertools.product(*ranges) # With the iterator built, we construct the exponent vector to residue field dictionary. ev_to_rfv_dict = {ev : [epsilon_q(ev, i) for i in range(nK)] for ev in ev_iterator} if verbose: print("The residue field dictionary currently has ", len(ev_to_rfv_dict), " exponent vector keys.") else: ev_to_rfv_dict = {} # We use compatibility requirements to keep the size of the dictionary down. # Later on, we'll compare all dictionaries pairwise. But for now, we just # check against the first. # That is, rather than loop over every possible exponent vector mod q-1, # we only consider those evs which are compatible with the mod q0 - 1 vectors. # Loop over exponent vectors modulo q0 - 1 g = gcd(q0-1, q-1) for exp_vec_mod_q0 in comp_exp_vec[q0]: # Loop only over exponent vectors modulo q-1 which are compatible with exp_vec_mod_q0 for exp_vec in compatible_vectors(exp_vec_mod_q0, q0-1, q-1, g): # fill the dictionary with the residue field vectors using the evaluation function. ev_to_rfv_dict[exp_vec] = [epsilon_q(exp_vec, i) for i in range(nK)] if verbose: print("The residue field dictionary currently has ", len(ev_to_rfv_dict), " exponent vector keys.") # At this point, we now have a dictionary ev_to_rfv_dict, which attaches # to each exponent vector a 'residue field vector,' which is a tuple of the # nK values epsilon_q(a,0),...,epsilon_q(a,nK-1). clean_rfv_dict( ev_to_rfv_dict ) if verbose: print("clean_rfv_dict executed.") print("The residue field dictionary currently has ", len(ev_to_rfv_dict), " exponent vector keys.") # We essentially construct an inverse dictionary: one whose keys are residue field vectors, # and whose values are the exponent vectors that yield each key rfv_to_ev[q] = construct_rfv_to_ev(ev_to_rfv_dict, q, nK, verbose=verbose) if verbose: print("construct_rfv_to_ev executed.") print("The rfv_to_ev dictionary currently has ", len(rfv_to_ev[q]), "rfv keys.") comp_exp_vec[q] = construct_comp_exp_vec(rfv_to_ev[q], q) if verbose: print("construct_comp_exp_vec executed.") print("Size of comp_exp_vec[q]: ", len(comp_exp_vec[q])) # Now that we have a new dictionary, we compare all the dictionaries pairwise, # looking for opportunities to remove 'impossible' solutions. for p in comp_exp_vec.keys(): if p == q: continue if verbose: print("Comparing dictionaries for p = ", p, "and q = ", q, ".") old_size_p = len(comp_exp_vec[p]) if verbose: print("Size of comp_exp_vec[p] is: ", old_size_p, ".") cv_size = ((q-1)/gcd(p-1, q-1)) ** (rho_length - 1) print("Length of compatible_vectors: ", cv_size, ".") print("Product: ", old_size_p*cv_size) for exp_vec in list(comp_exp_vec[p]): if drop_vector(exp_vec, p, q, comp_exp_vec): comp_exp_vec[p].pop(exp_vec) if verbose: print("Shrunk dictionary p from ", old_size_p, " to ", len(comp_exp_vec[p])) # Now, repeat, but swap p and q. old_size_q = len(comp_exp_vec[q]) if verbose: print("Size of comp_exp_vec[q] is: ", old_size_q, ".") cv_size = ((p-1)/gcd(p-1, q-1)) ** (rho_length - 1) print("Length of compatible_vectors: ", cv_size, ".") print("Product: ", old_size_q * cv_size) for exp_vec in list(comp_exp_vec[q]): if drop_vector(exp_vec, q, p, comp_exp_vec): comp_exp_vec[q].pop(exp_vec) if verbose: print("Shrunk dictionary q from ", old_size_q, " to ", len(comp_exp_vec[q])) return comp_exp_vec def compatible_vectors_check(a0, a1, g, l): r""" Given exponent vectors with respect to two moduli, determines if they are compatible. INPUT: - ``a0`` -- an exponent vector modulo ``m0`` - ``a1`` -- an exponent vector modulo ``m1`` (must have the same length as ``a0``) - ``g`` -- the gcd of ``m0`` and ``m1`` - ``l`` -- the length of ``a0`` and of ``a1`` OUTPUT: True if there is an integer exponent vector a satisfying .. MATH:: \begin{aligned} a[0] &== a0[0] == a1[0]\\ a[1:] &== a0[1:] \mod m_0\\ a[1:] &== a1[1:] \mod m_1 \end{aligned} and False otherwise. .. NOTE:: - Exponent vectors must agree exactly in the first coordinate. - If exponent vectors are different lengths, an error is raised. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import compatible_vectors_check sage: a0 = (3, 1, 8, 11) sage: a1 = (3, 5, 6, 13) sage: a2 = (5, 5, 6, 13) sage: compatible_vectors_check(a0, a1, gcd(12, 22), 4r) True sage: compatible_vectors_check(a0, a2, gcd(12, 22), 4r) False """ # exponent vectors must agree exactly in the 0th coordinate. return a0[0] == a1[0] and all((x0 - x1) % g == 0 for x0,x1 in zip(itertools.islice(a0, 1, l), itertools.islice(a1, 1, l))) def compatible_vectors(a, m0, m1, g): r""" Given an exponent vector ``a`` modulo ``m0``, returns an iterator over the exponent vectors for the modulus ``m1``, such that a lift to the lcm modulus exists. INPUT: - ``a`` -- an exponent vector for the modulus ``m0`` - ``m0`` -- a positive integer (specifying the modulus for ``a``) - ``m1`` -- a positive integer (specifying the alternate modulus) - ``g`` -- the gcd of m0 and m1 OUTPUT: A list of exponent vectors modulo ``m1`` which are compatible with ``a``. .. NOTE:: - Exponent vectors must agree exactly in the 0th position in order to be compatible. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import compatible_vectors sage: a = (3, 1, 8, 1) sage: list(compatible_vectors(a, 18, 12, gcd(18,12))) [(3, 1, 2, 1), (3, 1, 2, 7), (3, 1, 8, 1), (3, 1, 8, 7), (3, 7, 2, 1), (3, 7, 2, 7), (3, 7, 8, 1), (3, 7, 8, 7)] The order of the moduli matters. :: sage: len(list(compatible_vectors(a, 18, 12, gcd(18,12)))) 8 sage: len(list(compatible_vectors(a, 12, 18, gcd(18,12)))) 27 """ # recall that the 0th entry must be an exact match. ranges = [[a[0]]] + [range(a[i]%g, (a[i]%g) + m1, g) for i in range(1, len(a))] return itertools.product(*ranges) def compatible_systems(split_prime_list, complement_exp_vec_dict): r""" Given dictionaries of complement exponent vectors for various primes that split in K, compute all possible compatible systems. INPUT: - ``split_prime_list`` -- a list of rational primes that split completely in `K` - ``complement_exp_vec_dict`` -- a dictionary of dictionaries. The keys are primes from ``split_prime_list``. OUTPUT: A list of compatible systems of exponent vectors. .. NOTE:: - For any ``q`` in ``split_prime_list``, ``complement_exp_vec_dict[q]`` is a dictionary whose keys are exponent vectors modulo ``q-1`` and whose values are lists of exponent vectors modulo ``q-1`` which are complementary to the key. - an item in system_list has the form ``[ [v0, w0], [v1, w1], ..., [vk, wk] ]``, where:: - ``qj = split_prime_list[j]`` - ``vj`` and ``wj`` are complementary exponent vectors modulo ``qj - 1`` - the pairs are all simultaneously compatible. - Let ``H = lcm( qj - 1 : qj in split_primes_list )``. Then for any compatible system, there is at most one pair of integer exponent vectors ``[v, w]`` such that:: - every entry of ``v`` and ``w`` is bounded in absolute value by ``H`` - for any ``qj``, ``v`` and ``vj`` agree modulo ``(qj - 1)`` - for any ``qj``, ``w`` and ``wj`` agree modulo ``(qj - 1)`` EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import compatible_systems sage: split_primes_list = [3, 7] sage: checking_dict = {3: {(0, 1, 0): [(1, 0, 0)]}, 7: {(0, 1, 0): [(1, 0, 0)]}} sage: compatible_systems(split_primes_list, checking_dict) [[[(0, 1, 0), (1, 0, 0)], [(0, 1, 0), (1, 0, 0)]]] """ S0 = split_prime_list system_list = [] if len(S0) == 1: q = S0[0] for exponent_vector in complement_exp_vec_dict[q]: for complementary_vector in complement_exp_vec_dict[q][exponent_vector]: pair = [[exponent_vector, complementary_vector]] system_list.append(pair) elif len(S0) > 1: S1 = S0[:-1] old_systems = compatible_systems(S1, complement_exp_vec_dict) q = S0[-1] gcds = [gcd(q-1, qj-1) for qj in S1] for exp_vec in complement_exp_vec_dict[q]: l = len(exp_vec) for comp_vec in complement_exp_vec_dict[q][exp_vec]: for old_system in old_systems: if all((compatible_vectors_check(exp_vec, exp_vec_qj, g, l) and compatible_vectors_check(comp_vec, comp_vec_qj, g, l)) for g, (exp_vec_qj, comp_vec_qj) in zip(gcds, old_system)): # build the new system and append it to the list. new_system = old_system + [[exp_vec, comp_vec]] system_list.append(new_system) return system_list def compatible_system_lift(compatible_system, split_primes_list): r""" Given a compatible system of exponent vectors and complementary exponent vectors, return a lift to the integers. INPUT: - ``compatible_system`` -- a list of pairs ``[ [v0, w0], [v1, w1], .., [vk, wk] ]`` where [vi, wi] is a pair of complementary exponent vectors modulo ``qi - 1``, and all pairs are compatible. - ``split_primes_list`` -- a list of primes ``[ q0, q1, .., qk ]`` OUTPUT: A pair of vectors ``[v, w]`` satisfying: 1. ``v[0] == vi[0]`` for all ``i`` 2. ``w[0] == wi[0]`` for all ``i`` 3. ``v[j] == vi[j]`` modulo ``qi - 1`` for all ``i`` and all ``j > 0`` 4. ``w[j] == wi[j]`` modulo ``qi - 1`` for all ``i`` and all `j > 0`` 5. every entry of ``v`` and ``w`` is bounded by ``L/2`` in absolute value, where ``L`` is the least common multiple of ``{qi - 1 : qi in split_primes_list }`` EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import compatible_system_lift sage: split_primes_list = [3, 7] sage: comp_sys = [[(0, 1, 0), (0, 1, 0)], [(0, 3, 4), (0, 1, 2)]] sage: compatible_system_lift(comp_sys, split_primes_list) [(0, 3, -2), (0, 1, 2)] """ if len(split_primes_list) != len(compatible_system): raise ValueError("The number of primes does not match the length of the given exponent vectors.") # the first entries are already determined. exponent_vector_lift = [ZZ(compatible_system[0][0][0])] complement_vector_lift = [ZZ(compatible_system[0][1][0])] # fill in exponent_vector_lift moduli_list = [q-1 for q in split_primes_list] L = lcm(moduli_list) t = len(compatible_system[0][0]) for i in range(1,t): exp_coord_residues = [pair[0][i] for pair in compatible_system] comp_coord_residues = [pair[1][i] for pair in compatible_system] ev_lift_coordinate = CRT(exp_coord_residues, moduli_list) cv_lift_coordinate = CRT(comp_coord_residues, moduli_list) # these values lie in the range [0, L-1], so we must shift them if they are bigger than L/2. if ev_lift_coordinate > L/2: ev_lift_coordinate -= L if cv_lift_coordinate > L/2: cv_lift_coordinate -= L exponent_vector_lift.append(ev_lift_coordinate) complement_vector_lift.append(cv_lift_coordinate) return [tuple(exponent_vector_lift), tuple(complement_vector_lift)] def solutions_from_systems(SUK, bound, cs_list, split_primes_list): r""" Lifts compatible systems to the integers and returns the S-unit equation solutions the lifts yield. INPUT: - ``SUK`` -- the group of `S`-units where we search for solutions - ``bound`` -- a bound for the entries of all entries of all lifts - ``cs_list`` -- a list of compatible systems of exponent vectors modulo `q-1` for various primes `q` - ``split_primes_list`` -- a list of primes giving the moduli of the exponent vectors in ``cs_list`` OUTPUT: A list of solutions to the S-unit equation. Each solution is a list: 1. an exponent vector over the integers, ``ev`` 2. an exponent vector over the integers, ``cv`` 3. the S-unit corresponding to ``ev``, ``iota_exp`` 4. the S-unit corresponding to ``cv``, ``iota_comp`` .. NOTE:: - Every entry of ``ev`` is less than or equal to bound in absolute value - every entry of ``cv`` is less than or equal to bound in absolute value - ``iota_exp + iota_comp == 1`` EXAMPLES: Given a single compatible system, a solution can be found. :: sage: from sage.rings.number_field.S_unit_solver import solutions_from_systems sage: K.<xi> = NumberField(x^2-15) sage: SUK = K.S_unit_group(S=K.primes_above(2)) sage: split_primes_list = [7, 17] sage: a_compatible_system = [[[(0, 0, 5), (0, 0, 5)], [(0, 0, 15), (0, 0, 15)]]] sage: solutions_from_systems( SUK, 20, a_compatible_system, split_primes_list ) [((0, 0, -1), (0, 0, -1), 1/2, 1/2)] """ solutions = [] for system in cs_list: ev, cv = compatible_system_lift(system, split_primes_list) if all(abs(x) <= bound for x in ev[1:] + cv[1:]): # the entries are all below the bound, so there is nothing left to do # except construct the elements and see if they are solutions to # the S-unit equation iota_exp = SUK.exp( ev ) iota_comp = SUK.exp( cv ) if iota_exp + iota_comp == 1: sol = ( ev, cv, iota_exp, iota_comp ) solutions.append( sol ) return solutions def clean_sfs(sfs_list): r""" Given a list of S-unit equation solutions, remove trivial redundancies. INPUT: - ``sfs_list`` -- a list of solutions to the S-unit equation OUTPUT: A list of solutions to the S-unit equation .. NOTE:: The function looks for cases where ``x + y = 1`` and ``y + x = 1`` appear\ as separate solutions, and removes one. EXAMPLES: The function is not dependent on the number field and removes redundancies in any list. :: sage: from sage.rings.number_field.S_unit_solver import clean_sfs sage: sols = [((1, 0, 0), (0, 0, 1), -1, 2), ((0, 0, 1), (1, 0, 0), 2, -1)] sage: clean_sfs( sols ) [((1, 0, 0), (0, 0, 1), -1, 2)] """ # given the output from solutions_from_systems, # look for trivial redundancies: swapping exp_vec, comp_vec, particularly. new_sfs = [] for entry in sfs_list: swapped_entry = (entry[1], entry[0], entry[3], entry[2]) if entry not in new_sfs and swapped_entry not in new_sfs: new_sfs.append(entry) return new_sfs def sieve_below_bound(K, S, bound=10, bump=10, split_primes_list=[], verbose=False): r""" Return all solutions to the S-unit equation ``x + y = 1`` over K with exponents below the given bound. INPUT: - ``K`` -- a number field (an absolute extension of the rationals) - ``S`` -- a list of finite primes of ``K`` - ``bound`` -- a positive integer upper bound for exponents, solutions with exponents having absolute value below this bound will be found (default: 10) - ``bump`` -- a positive integer by which the minimum LCM will be increased if not enough split primes are found in sieving step (default: 10) - ``split_primes_list`` -- a list of rational primes that split completely in the extension K/Q, used for sieving. For complete list of solutions should have lcm of {(p_i-1)} for primes p_i greater than bound (default: []) - ``verbose`` -- an optional parameter allowing the user to print information during the sieving process (default: False) OUTPUT: A list of tuples ``[( A_1, B_1, x_1, y_1), (A_2, B_2, x_2, y_2), ... ( A_n, B_n, x_n, y_n)]`` such that: 1. The first two entries are tuples ``A_i = (a_0, a_1, ... , a_t)`` and ``B_i = (b_0, b_1, ... , b_t)`` of exponents. 2. The last two entries are ``S``-units ``x_i`` and ``y_i`` in ``K`` with ``x_i + y_i = 1``. 3. If the default generators for the ``S``-units of ``K`` are ``(rho_0, rho_1, ... , rho_t)``, then these satisfy ``x_i = \prod(rho_i)^(a_i)`` and ``y_i = \prod(rho_i)^(b_i)``. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import sieve_below_bound, eq_up_to_order sage: K.<xi> = NumberField(x^2+x+1) sage: SUK = UnitGroup(K,S=tuple(K.primes_above(3))) sage: S = SUK.primes() sage: sols = sieve_below_bound(K, S, 10) sage: expected = [ ....: ((1, -1), (0, -1), 1/3*xi + 2/3, -1/3*xi + 1/3), ....: ((0, 1), (4, 0), xi + 2, -xi - 1), ....: ((2, 0), (5, 1), xi, -xi + 1), ....: ((1, 0), (5, 0), xi + 1, -xi)] sage: eq_up_to_order(sols, expected) True """ SUK = UnitGroup(K, S=tuple(S)) initial_bound = bound while not split_primes_list: try: split_primes_list = split_primes_large_lcm(SUK, initial_bound) except ValueError: initial_bound += bump print("Couldn't find enough split primes. Bumping to ", initial_bound) if not K.is_absolute(): raise ValueError("K must be an absolute extension.") complement_exp_vec_dict = construct_complement_dictionaries(split_primes_list, SUK, verbose=verbose) cs_list = compatible_systems(split_primes_list, complement_exp_vec_dict) sfs_list = solutions_from_systems(SUK, bound, cs_list, split_primes_list) S_unit_solutions = clean_sfs(sfs_list) return S_unit_solutions def solve_S_unit_equation(K, S, prec=106, include_exponents=True, include_bound=False, proof=None, verbose=False): r""" Return all solutions to the S-unit equation ``x + y = 1`` over K. INPUT: - ``K`` -- a number field (an absolute extension of the rationals) - ``S`` -- a list of finite primes of ``K`` - ``prec`` -- precision used for computations in real, complex, and p-adic fields (default: 106) - ``include_exponents`` -- whether to include the exponent vectors in the returned value (default: True). - ``include_bound`` -- whether to return the final computed bound (default: False) - ``verbose`` -- whether to print information during the sieving step (default: False) OUTPUT: A list of tuples ``[( A_1, B_1, x_1, y_1), (A_2, B_2, x_2, y_2), ... ( A_n, B_n, x_n, y_n)]`` such that: 1. The first two entries are tuples ``A_i = (a_0, a_1, ... , a_t)`` and ``B_i = (b_0, b_1, ... , b_t)`` of exponents. These will be omitted if ``include_exponents`` is ``False``. 2. The last two entries are ``S``-units ``x_i`` and ``y_i`` in ``K`` with ``x_i + y_i = 1``. 3. If the default generators for the ``S``-units of ``K`` are ``(rho_0, rho_1, ... , rho_t)``, then these satisfy ``x_i = \prod(rho_i)^(a_i)`` and ``y_i = \prod(rho_i)^(b_i)``. If ``include_bound``, will return a pair ``(sols, bound)`` where ``sols`` is as above and ``bound`` is the bound used for the entries in the exponent vectors. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import solve_S_unit_equation, eq_up_to_order sage: K.<xi> = NumberField(x^2+x+1) sage: S = K.primes_above(3) sage: sols = solve_S_unit_equation(K, S, 200) sage: expected = [ ....: ((0, 1), (4, 0), xi + 2, -xi - 1), ....: ((1, -1), (0, -1), 1/3*xi + 2/3, -1/3*xi + 1/3), ....: ((1, 0), (5, 0), xi + 1, -xi), ....: ((2, 0), (5, 1), xi, -xi + 1)] sage: eq_up_to_order(sols, expected) True In order to see the bound as well use the optional parameter ``include_bound``:: sage: solutions, bound = solve_S_unit_equation(K, S, 100, include_bound=True) sage: bound 7 You can omit the exponent vectors:: sage: sols = solve_S_unit_equation(K, S, 200, include_exponents=False) sage: expected = [(xi + 2, -xi - 1), (1/3*xi + 2/3, -1/3*xi + 1/3), (-xi, xi + 1), (-xi + 1, xi)] sage: set(frozenset(a) for a in sols) == set(frozenset(b) for b in expected) True It is an error to use values in S that are not primes in K:: sage: solve_S_unit_equation(K, [3], 200) Traceback (most recent call last): ... ValueError: S must consist only of prime ideals, or a single element from which a prime ideal can be constructed. We check the case that the rank is 0:: sage: K.<xi> = NumberField(x^2+x+1) sage: solve_S_unit_equation(K, []) [((1,), (5,), xi + 1, -xi)] """ # Checks to make sure inputs are legal # K must be an absolute extension: if not K.is_absolute(): raise ValueError("K must be an absolute extension.") # S must be a finite set of primes try: SUK = UnitGroup(K, proof=proof, S=tuple(S)) except Exception: raise ValueError("S must consist only of prime ideals, or a single element from which a prime ideal can be constructed.") # Gather the roots of unity of the number field A = K.roots_of_unity() w = K.number_of_roots_of_unity() if SUK.rank() == 0: # Since the rank is 0, K is imaginary quadratic and S is empty # Only possibilities are combinations of roots of unity # and this can only occur when there are 6 roots of unity, when # (1+sqrt(-3))/2 + (1-sqrt(-3))/2 = 1 is the unique solution. if len(A) == 6: S_unit_solutions = [((ZZ(1),), (ZZ(5),), A[0], A[-2])] else: S_unit_solutions = [] else: # First find a bound using the LLL reduction method # That bound must exceed both 4 and w. (See [AKMRVW].) all_LLL_bounds = [4, w] all_LLL_bounds += [cx_LLL_bound(SUK, A, prec)] if S: # only need p-adic bound when S nonempty all_LLL_bounds.append(p_adic_LLL_bound(SUK, A, prec)) # Take the largest of all of the bounds we found final_LLL_bound = max(all_LLL_bounds) if verbose: print("The LLL bound is: ", final_LLL_bound) # Use the sieve to more easily find all bounds S_unit_solutions = sieve_below_bound(K, list(S), final_LLL_bound, verbose=verbose) if not include_exponents: S_unit_solutions = [sol[2:] for sol in S_unit_solutions] if include_bound: return S_unit_solutions, final_LLL_bound else: return S_unit_solutions def eq_up_to_order(A, B): """ If A and B are lists of four-tuples ``[a0,a1,a2,a3]`` and ``[b0,b1,b2,b3]``, checks that there is some reordering so that either ``ai=bi`` for all ``i`` or ``a0==b1``, ``a1==b0``, ``a2==b3``, ``a3==b2``. The entries must be hashable. EXAMPLES:: sage: from sage.rings.number_field.S_unit_solver import eq_up_to_order sage: L = [(1,2,3,4),(5,6,7,8)] sage: L1 = [L[1],L[0]] sage: L2 = [(2,1,4,3),(6,5,8,7)] sage: eq_up_to_order(L, L1) True sage: eq_up_to_order(L, L2) True sage: eq_up_to_order(L, [(1,2,4,3),(5,6,8,7)]) False """ # does not look very optimal Adup = set(A + [(a[1],a[0],a[3],a[2]) for a in A]) Bdup = set(B + [(b[1],b[0],b[3],b[2]) for b in B]) return Adup == Bdup
import React from 'react'; import Text from '../../components/Text'; import Link from '../../components/Link'; import { List, ListItem } from '../../components/List'; const list = [ { kind: 'simple', heading: 'Ordered lists' }, { kind: 'unordered', heading: 'Bullet lists' }, { kind: 'ordered', heading: 'Ordered lists' }, ]; const Normaltext = [ { kind: 'p', text: 'Body copy, paragraph', styling: '16px, Regular' }, { kind: 'p', text: 'Label (form)', styling: '14px, SemiBold' }, { kind: 'code', text: 'Code', styling: 'Monospace, 16px, Regular' }, { kind: 'sup', text: 'Sup', styling: '11px, Regular' }, { kind: 'p', text: 'Inline highlight', styling: '14px, SemiBold' }, ]; const colors = [ { color: '#031C2D', text: '#text-01' }, { color: '#5A6872', text: '#text-02' }, { color: '#DFE6EB', text: '#text-03' }, ]; const headingText = [ { kind: 'h1', text: 'Heading 1', styling: '29px, SemiBold' }, { kind: 'h2', text: 'Heading 2', styling: '25px, SemiBold' }, { kind: 'h3', text: 'Heading 3', styling: '22px, SemiBold' }, { kind: 'h4', text: 'Heading 4', styling: '20px, SemiBold' }, { kind: 'h5', text: 'Heading 5', styling: '18px, SemiBold' }, { kind: 'h6', text: 'Heading 6', styling: '16px, SemiBold' }, ]; const emphasisText = [ { kind: 'strong', text: 'Bold', styling: '16px, Bold' }, { kind: 'bold', text: 'SemiBold', styling: '16px, SemiBold' }, { kind: 'i', text: 'Italic', styling: '16px, Italic' }, ]; const Typography = () => { return ( <> <h3>Headings</h3> <p>Typographic scale when using headings in text component</p> <div className="wfp--card-box"> {headingText.map((e) => ( <div style={{ paddingLeft: '0.5rem' }}> <Text kind={e.kind}>{e.text}</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> {e.styling} </div> </div> ))} </div> <h3>Text</h3> <p>Typographic scale for normal text</p> <div className="wfp--card-box"> {Normaltext.map((e) => ( <div style={{ paddingLeft: '0.5rem' }}> <Text kind={e.kind}>{e.text}</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> {e.styling} </div> </div> ))} </div> <h3>Page title</h3> <p>Typographic scale for page title and overline</p> <div className="wfp--card-box"> <div style={{ paddingLeft: '0.5rem' }}> <Text kind="title">Title</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> 58px, Light </div> <Text kind="subtitle">Overline</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> 14px, SemiBold </div> </div> </div> <h3>Links</h3> <Text kind="p">Typographic scale for links</Text> <div className="wfp--card-box"> <div style={{ paddingLeft: '0.5rem' }}> <Text kind="a">Link</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> 16px, Regular </div> <Link className="wfp--link--hover">Link</Link> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> 16px, Regular </div> </div> </div> <h3>Emphasis</h3> <p>Typographic scale for to put empahasis on text</p> <div className="wfp--card-box"> {emphasisText.map((e) => ( <div style={{ paddingLeft: '0.5rem' }}> <Text kind={e.kind}>{e.text}</Text> <div style={{ color: '#A9A9A9', marginBottom: '2rem' }}> {e.styling} </div> </div> ))} </div> <h3>Lists</h3> <p> Typographic scale for lists of items int he different variations: ordered and unordered </p> <div className="wfp--card-box"> {list.map((e) => ( <div style={{ paddingLeft: '0.5rem' }}> <Text kind="h4">{e.heading}</Text> <List kind={e.kind}> <ListItem>Numbered List 1</ListItem> <ListItem>Numbered List 2</ListItem> <ListItem>Numbered List 3</ListItem> </List> </div> ))} </div> <h3>Colors</h3> <p>Color accents to use on normal text</p> <div className="wfp--card-box"> {colors.map((e) => ( <div style={{ display: 'flex', paddingLeft: '0.5rem' }}> <div> <Text>{e.text}</Text> <div style={{ color: '#A9A9A9', marginBottom: '3rem' }}> 16px, {e.color} </div> </div> <div style={{ backgroundColor: e.color, width: '50px', height: '50px', borderRadius: '50%', marginLeft: '1.5rem', }} ></div> </div> ))} </div> </> ); }; export default Typography;
export default Object.assign || function(target, ...sources) { sources.forEach((source) => { Object.keys(source).forEach((key) => (target[key] = source[key])); }); return target; };
import discord COR = 0x690FC3 msg_id = None msg_user = None client = discord.Client() @client.event async def on_ready(): print('BOT ONLINE !') print(client.user.name) print(client.user.id) print('------Vagner Tutorial----') print('------LabNegro-------') print('-----Tutorial Cargo Custom Emoji-----') """ link do emojis dos elos do league of legends para download . https://drive.google.com/drive/folders/1dFHnj0RWG23iR2JfoEr56RdUs9flN-Lf?usp=sharing Lembrado que o bot tem que estar no servidor com esse emojis. Como pegar o id do emoji? 1° adcione no servidor 2° vá no discord e digite \ e selecione o emoji . Ai ele irá retornár um o id exemplo : <:support:439639384418025475> 3° agora so trocar os emojis no codigo pelos do seu servidor. LEMBRANDO -OS NOMES DO CARGOS TEM QUE SER INDÊNTICO AO DO SERVIDOR -E PRA ADD CUSTO REACTION NA MESSAGEM NÃO PODE TER OS < > """ @client.event async def on_message(message): if message.content.lower().startswith("py_lol"): embedlol = discord.Embed( title='Escolha Seu Elo e Lane', color=COR, description='\n' '\n') embedlol.set_thumbnail(url='https://i.imgur.com/Mn08hTd.png') embedlol.add_field(name='Unranked', value='<:unraked:439639400666759180>', inline=True) embedlol.add_field(name='Top', value='<:top:439639384573214742>', inline=True) embedlol.add_field(name='Bronze', value='<:bronze:439639385017942036>', inline=True) embedlol.add_field(name='Jungle', value='<:jungle:439639384036474881>', inline=True) embedlol.add_field(name='Prata', value='<:prata:439639397001068544>', inline=True) embedlol.add_field(name='Mid', value='<:mid:439639384128618506>', inline=True) embedlol.add_field(name='Ouro', value='<:ouro:439639401685843987>', inline=True) embedlol.add_field(name='Adc', value='<:adc:439639377212080129>', inline=True) embedlol.add_field(name='Platina', value='<:platina:439639389900111872>', inline=True) embedlol.add_field(name='Suporte', value='<:support:439639384418025475>', inline=True) embedlol.add_field(name='Diamante', value='<:diamante:439639397273436160>', inline=True) botmsg = await client.send_message(message.channel, embed=embedlol) await client.add_reaction(botmsg, ":unraked:439639400666759180") await client.add_reaction(botmsg, ":bronze:439639385017942036") await client.add_reaction(botmsg, ":prata:439639397001068544") await client.add_reaction(botmsg, "::ouro:439639401685843987") await client.add_reaction(botmsg, ":platina:439639389900111872") await client.add_reaction(botmsg, ":diamante:439639397273436160") await client.add_reaction(botmsg, ":top:439639384573214742") await client.add_reaction(botmsg, ":jungle:439639384036474881") await client.add_reaction(botmsg, ":mid:439639384128618506") await client.add_reaction(botmsg, ":adc:439639377212080129") await client.add_reaction(botmsg, ":support:439639384418025475") global msg_id msg_id = botmsg.id global msg_user msg_user = message.author @client.event async def on_reaction_add(reaction, user): msg = reaction.message if reaction.custom_emoji and reaction.emoji.id == "439639400666759180": role = discord.utils.find(lambda r: r.name == "● Unranked", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639385017942036": role = discord.utils.find(lambda r: r.name == "● Bronze", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639397001068544": role = discord.utils.find(lambda r: r.name == "● Prata", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639401685843987": role = discord.utils.find(lambda r: r.name == "● Gold", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639389900111872": role = discord.utils.find(lambda r: r.name == "● Platina", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639397273436160": role = discord.utils.find(lambda r: r.name == "● Diamante", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384573214742": role = discord.utils.find(lambda r: r.name == "● Top", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384036474881": role = discord.utils.find(lambda r: r.name == "● Jg", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384128618506": role = discord.utils.find(lambda r: r.name == "● Mid", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639377212080129": role = discord.utils.find(lambda r: r.name == "● Adc", msg.server.roles) await client.add_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384418025475": role = discord.utils.find(lambda r: r.name == "● Suporte", msg.server.roles) await client.add_roles(user, role) @client.event async def on_reaction_remove(reaction, user): msg = reaction.message if reaction.custom_emoji and reaction.emoji.id == "439639400666759180": role = discord.utils.find(lambda r: r.name == "● Unranked", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639385017942036": role = discord.utils.find(lambda r: r.name == "● Bronze", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639397001068544": role = discord.utils.find(lambda r: r.name == "● Prata", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639401685843987": role = discord.utils.find(lambda r: r.name == "● Gold", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639389900111872": role = discord.utils.find(lambda r: r.name == "● Platina", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639397273436160": role = discord.utils.find(lambda r: r.name == "● Diamante", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384573214742": role = discord.utils.find(lambda r: r.name == "● Top", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384036474881": role = discord.utils.find(lambda r: r.name == "● Jg", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384128618506": role = discord.utils.find(lambda r: r.name == "● Mid", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639377212080129": role = discord.utils.find(lambda r: r.name == "● Adc", msg.server.roles) await client.remove_roles(user, role) if reaction.custom_emoji and reaction.emoji.id == "439639384418025475": role = discord.utils.find(lambda r: r.name == "● Suporte", msg.server.roles) await client.remove_roles(user, role) client.run('seu_token_aqui')
/** * Created by dengchongjing on 2017/5/11. */ import * as types from '../types' import { getTopic } from '@/api' const state = { article: {} } const actions = { getTopic ({commit}, id) { getTopic(id).then(data => { commit(types.UPDATE_TOPIC_DATA, data.data) }) } } const mutations = { // 更新当前文章 [types.UPDATE_TOPIC_DATA] (state, article) { state.article = article } } export default { state, mutations, actions }
/* * Copyright (C) 2017, Axis Communications AB, LUND, SWEDEN */ /* * Simple example application that demonstrates how axoverlay is used. */ #include <stdio.h> #include <string.h> #include <errno.h> #include <glib.h> #include <glib-unix.h> #include <cairo/cairo.h> #include <axoverlay.h> #define OVERLAY_WIDTH 144 #define OVERLAY_HEIGHT 144 #define PIE_RADIUS 64 #define ANIMATION_FPS 10 #define M_PI 3.14 static gint animation_timer = -1; static gdouble pie_angle = 0.0; static gint overlay_id = -1; static gboolean signal_handler(gpointer data) { GMainLoop *main_loop = (GMainLoop *) data; g_main_loop_quit(main_loop); return G_SOURCE_REMOVE; } static gboolean update_overlay_cb(gpointer data) { GError *error = NULL; /* Update the angle of the pie shape */ pie_angle += 5.0; if (pie_angle >= 360.0) { pie_angle -= 360.0; } /* Request a redraw of the overlay */ axoverlay_redraw(&error); if (error != NULL) { /* * If redraw fails then it is likely due to that overlayd has * crashed. Don't exit instead wait for overlayd to restart and * for axoverlay to restore the connection. */ printf("Failed to redraw overlay (%d): %s\n", error->code, error->message); g_error_free(error); } return G_SOURCE_CONTINUE; } static void render_overlay_cb(gpointer render_context, gint id, struct axoverlay_stream_data *stream, enum axoverlay_position_type postype, gfloat overlay_x, gfloat overlay_y, gint overlay_width, gint overlay_height, gpointer user_data) { cairo_t *cr = render_context; gdouble x = OVERLAY_WIDTH/2; gdouble y = OVERLAY_HEIGHT/2; gdouble radius = PIE_RADIUS; gdouble angle1 = 0; gdouble angle2 = pie_angle*(M_PI/180.0); /* Clear background */ cairo_set_source_rgba(cr, 0.0, 0.0, 0.0, 0.0); cairo_set_operator(cr, CAIRO_OPERATOR_SOURCE); cairo_rectangle(cr, 0, 0, OVERLAY_WIDTH, OVERLAY_HEIGHT); cairo_fill(cr); /* Draw a filled pie shape */ cairo_set_source_rgba(cr, 1.0, 0.0, 0.0, 1.0); cairo_set_operator(cr, CAIRO_OPERATOR_SOURCE); cairo_set_line_width(cr, 5.0); cairo_arc(cr, x, y, radius, angle1, angle2); cairo_line_to(cr, x, y); cairo_line_to(cr, x+PIE_RADIUS, y); cairo_fill(cr); } int main(int argc, char **argv) { GError *error = NULL; GMainLoop *main_loop = NULL; /* Create a glib main loop */ main_loop = g_main_loop_new(NULL, FALSE); g_unix_signal_add(SIGINT, signal_handler, main_loop); g_unix_signal_add(SIGTERM, signal_handler, main_loop); if(!axoverlay_is_backend_supported(AXOVERLAY_CAIRO_IMAGE_BACKEND)) { printf("AXOVERLAY_CAIRO_IMAGE_BACKEND is not supported"); return 1; } /* Initialize the library */ struct axoverlay_settings settings; axoverlay_init_axoverlay_settings(&settings); settings.render_callback = render_overlay_cb; settings.adjustment_callback = NULL; settings.select_callback = NULL; settings.backend = AXOVERLAY_CAIRO_IMAGE_BACKEND; axoverlay_init(&settings, &error); if (error != NULL) { printf("Failed to initialize axoverlay: %s", error->message); g_error_free(error); return 1; } /* Create an overlay */ struct axoverlay_overlay_data data; axoverlay_init_overlay_data(&data); data.postype = AXOVERLAY_CUSTOM_NORMALIZED; data.anchor_point = AXOVERLAY_ANCHOR_CENTER; data.x = 0.0; data.y = 0.0; data.width = OVERLAY_WIDTH; data.height = OVERLAY_WIDTH; data.colorspace = AXOVERLAY_COLORSPACE_ARGB32; overlay_id = axoverlay_create_overlay(&data, NULL, &error); if (error != NULL) { printf("Failed to create first overlay: %s", error->message); g_error_free(error); return 1; } /* Draw overlays */ axoverlay_redraw(&error); if (error != NULL) { printf("Failed to draw overlays: %s", error->message); axoverlay_destroy_overlay(overlay_id, &error); axoverlay_cleanup(); g_error_free(error); return 1; } /* Start animation timer */ animation_timer = g_timeout_add(1000/ANIMATION_FPS, update_overlay_cb, NULL); /* Enter main loop */ g_main_loop_run(main_loop); /* Destroy the overlay */ axoverlay_destroy_overlay(overlay_id, &error); if (error != NULL) { printf("Failed to destroy first overlay: %s", error->message); g_error_free(error); return 1; } /* Release library resources */ axoverlay_cleanup(); /* Release the animation timer */ g_source_remove(animation_timer); /* Release main loop */ g_main_loop_unref(main_loop); return 0; }
/****************************************************************************** Copyright (c) 2000 Microsoft Corporation Module Name: ProjectConstants.h Abstract: This file contains contants common to the whole project. Revision History: Davide Massarenti (Dmassare) 03/20/2000 created ******************************************************************************/ #if !defined(__INCLUDED___PCH___PROJECTCONSTANTS_H___) #define __INCLUDED___PCH___PROJECTCONSTANTS_H___ #ifndef DEBUG #undef NOJETBLUECOM #define NOJETBLUECOM #endif //////////////////////////////////////////////////////////////////////////////// #define HC_ROOT L"%WINDIR%\\PCHealth" #define HC_ROOT_HELPSVC HC_ROOT L"\\HelpCtr" #define HC_ROOT_HELPSVC_BINARIES HC_ROOT_HELPSVC L"\\Binaries" #define HC_ROOT_HELPSVC_CONFIG HC_ROOT_HELPSVC L"\\Config" #define HC_ROOT_HELPSVC_BATCH HC_ROOT_HELPSVC L"\\Batch" #define HC_ROOT_HELPSVC_DATACOLL HC_ROOT_HELPSVC L"\\DataColl" #define HC_ROOT_HELPSVC_LOGS HC_ROOT_HELPSVC L"\\Logs" #define HC_ROOT_HELPSVC_TEMP HC_ROOT_HELPSVC L"\\Temp" #define HC_ROOT_HELPSVC_OFFLINECACHE HC_ROOT_HELPSVC L"\\OfflineCache" #define HC_ROOT_HELPSVC_PKGSTORE HC_ROOT_HELPSVC L"\\PackageStore" #define HC_HELPSET_ROOT HC_ROOT_HELPSVC L"\\" #define HC_HELPSET_SUB_INSTALLEDSKUS L"InstalledSKUs" #define HC_HELPSET_SUB_DATABASE L"Database" #define HC_HELPSET_SUB_INDEX L"Indices" #define HC_HELPSET_SUB_SYSTEM L"System" #define HC_HELPSET_SUB_SYSTEM_OEM L"System_OEM" #define HC_HELPSET_SUB_VENDORS L"Vendors" #define HC_HELPSET_SUB_HELPFILES L"HelpFiles" #define HC_HELPSET_SUBSUB_DATAARCHIVE L"pchdata.cab" #define HC_HELPSET_SUBSUB_DATABASEFILE L"hcdata.edb" #define HC_HELPSET_SUBSUB_INDEXFILE L"merged.hhk" #define HC_HELPSVC_HELPFILES_DEFAULT L"%WINDIR%\\Help" // This is relative to CSIDL_LOCAL_APPDATA (i.e: C:\Documents and Settings\<username>\Local Settings\Application Data) #define HC_ROOT_HELPCTR L"Microsoft\\HelpCtr" #define HC_REGISTRY_BASE L"SOFTWARE\\Microsoft\\PCHealth" #define HC_REGISTRY_HELPSVC HC_REGISTRY_BASE L"\\HelpSvc" #define HC_REGISTRY_HELPHOST HC_REGISTRY_BASE L"\\HelpHost" #define HC_REGISTRY_HELPCTR HC_REGISTRY_BASE L"\\HelpCtr" #define HC_REGISTRY_PCHSVC HC_REGISTRY_BASE L"\\PchSvc" #define HC_REGISTRY_HELPCTR_USER HC_REGISTRY_HELPCTR L"\\UserSettings" #define HC_REGISTRY_HELPCTR_IE HC_REGISTRY_HELPCTR L"\\IESettings" //////////////////////////////////////// #define HC_HELPSVC_STORE_TRUSTEDCONTENTS HC_ROOT_HELPSVC_CONFIG L"\\Cntstore.bin" #define HC_HELPSVC_STORE_CHANNELS HC_ROOT_HELPSVC_CONFIG L"\\SAFStore.xml" #define HC_HELPSVC_STORE_INCIDENTITEMS HC_ROOT_HELPSVC_CONFIG L"\\incstore.bin" #define HC_HELPSVC_STORE_SKUS HC_ROOT_HELPSVC_PKGSTORE L"\\SkuStore.bin" #define HC_HCUPDATE_LOGNAME HC_ROOT_HELPSVC_LOGS L"\\hcupdate.log" #define HC_HCUPDATE_STORE_PACKAGES HC_ROOT_HELPSVC_PKGSTORE L"\\pchver.xml" #define HC_HCUPDATE_STORE_SE HC_ROOT_HELPSVC_CONFIG L"\\sereg.xml" #define HC_SEMGR_LOGNAME HC_ROOT_HELPSVC_LOGS L"\\semgr.log" //////////////////////////////////////// // OLD #define HC_HELPSVC_STORE_USERS HC_ROOT_HELPSVC_CONFIG L"\\UsersStore.cxml" //////////////////////////////////////// #define HC_HELPSVC_NAME L"helpsvc" #define HC_MICROSOFT_DN L"CN=Microsoft Corporation,L=Redmond,S=Washington,C=US" //////////////////////////////////////// #define HC_TIMEOUT_NETWORKALIVE 3000 #define HC_TIMEOUT_DESTINATIONREACHABLE 3000 #define HC_TIMEOUT_CONNECTIONCHECK 15000 #define HC_TIMEOUT_LINKCHECKER_FOREGROUND 15000 #define HC_TIMEOUT_LINKCHECKER_BACKGROUND 25000 //////////////////////////////////////// #endif // !defined(__INCLUDED___PCH___PROJECTCONSTANTS_H___)
import re from mat.utils.utils import Utils, Issue class Issue(Issue): TITLE = 'SSL Pinning Check' DESCRIPTION = 'Checks if SSL Pinning is not implemented' ID = 'ssl-pinning' ISSUE_TITLE = 'Application Does Not Implement SSL Pinning' FINDINGS = 'The Team found the application did not implement SSL Pinning' def dependencies(self): return self.ANALYSIS.UTILS.check_dependencies(['static']) def run(self): files = Utils.grep(r'X509TrustManager|getAcceptedIssuers|checkClientTrusted|checkServerTrusted', self.ANALYSIS.LOCAL_SMALI + '*') if not files: self.REPORT = True self.FINDINGS = 'No evidence of TrustManager being used was found.' self.DETAILS = '' for f in files: with open(f, 'r') as d: smali = d.read() if re.search(r'.method.*checkServerTrusted(.*\n)*?[ \t]*\.prologue\n(([\t ]*(\.line.*)?)\n)*[ \t]*return-void', smali): self.REPORT = True self.DETAILS += '\n* {file}:\n\n<code>\n{method}</code>\n'.format(file=f.replace(self.ANALYSIS.LOCAL_SMALI, 'smali'), method=self.ANALYSIS.UTILS.get_smali_method('checkServerTrusted', f)) if re.search(r'.method.*getAcceptedIssuers(.*\n)*?[ \t]*\.prologue\n(([\t ]*(\.line.*)?)\n)*[ \t]*const\/4 v0, 0x0\n[ \n\t]*return-object v0', smali): self.REPORT = True self.DETAILS += '\n* {file}:\n\n<code>\n{method}</code>\n'.format(file=f.replace(self.ANALYSIS.LOCAL_SMALI, 'smali'), method=self.ANALYSIS.UTILS.get_smali_method('getAcceptedIssuers', f))
from typing import Optional import aiohttp from apple.rpc.full_node_rpc_client import FullNodeRpcClient from apple.util.byte_types import hexstr_to_bytes from apple.util.config import load_config from apple.util.default_root import DEFAULT_ROOT_PATH from apple.util.ints import uint16 from apple.util.misc import format_bytes async def netstorge_async(rpc_port: Optional[int], delta_block_height: str, start: str) -> None: """ Calculates the estimated space on the network given two block header hashes. """ try: config = load_config(DEFAULT_ROOT_PATH, "config.yaml") self_hostname = config["self_hostname"] if rpc_port is None: rpc_port = config["full_node"]["rpc_port"] client = await FullNodeRpcClient.create(self_hostname, uint16(rpc_port), DEFAULT_ROOT_PATH, config) if delta_block_height: if start == "": blockchain_state = await client.get_blockchain_state() if blockchain_state["peak"] is None: print("No blocks in blockchain") client.close() await client.await_closed() return None newer_block_height = blockchain_state["peak"].height else: newer_block = await client.get_block_record(hexstr_to_bytes(start)) if newer_block is None: print("Block header hash", start, "not found.") client.close() await client.await_closed() return None else: print("newer_height", newer_block.height) newer_block_height = newer_block.height newer_block_header = await client.get_block_record_by_height(newer_block_height) older_block_height = max(0, newer_block_height - int(delta_block_height)) older_block_header = await client.get_block_record_by_height(older_block_height) network_space_bytes_estimate = await client.get_network_space( newer_block_header.header_hash, older_block_header.header_hash ) print( "Older Block\n" f"Block Height: {older_block_header.height}\n" f"Weight: {older_block_header.weight}\n" f"VDF Iterations: {older_block_header.total_iters}\n" f"Header Hash: 0x{older_block_header.header_hash}\n" ) print( "Newer Block\n" f"Block Height: {newer_block_header.height}\n" f"Weight: {newer_block_header.weight}\n" f"VDF Iterations: {newer_block_header.total_iters}\n" f"Header Hash: 0x{newer_block_header.header_hash}\n" ) print(format_bytes(network_space_bytes_estimate)) except Exception as e: if isinstance(e, aiohttp.ClientConnectorError): print(f"Connection error. Check if full node rpc is running at {rpc_port}") else: print(f"Exception {e}") client.close() await client.await_closed()
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals import textwrap from sqlalchemy import DDL from indico.core import signals @signals.db_schema_created.connect_via('categories') def _create_check_consistency_deleted(sender, connection, **kwargs): sql = textwrap.dedent(""" CREATE FUNCTION categories.check_consistency_deleted() RETURNS trigger AS $BODY$ DECLARE rows int; BEGIN CREATE TEMP TABLE IF NOT EXISTS _categories_consistency_deleted_checked (dummy bool) ON COMMIT DROP; IF EXISTS (SELECT 1 FROM _categories_consistency_deleted_checked) THEN RETURN NULL; ELSE INSERT INTO _categories_consistency_deleted_checked VALUES (true); END IF; -- use dynamic sql to prevent pg from preparing the statement with a crappy query plan EXECUTE $$ WITH RECURSIVE chains(id, path, is_deleted) AS ( SELECT id, ARRAY[id], is_deleted FROM categories.categories WHERE parent_id IS NULL UNION ALL SELECT cat.id, chains.path || cat.id, chains.is_deleted OR cat.is_deleted FROM categories.categories cat, chains WHERE cat.parent_id = chains.id ) SELECT 1 FROM events.events e JOIN chains ON (chains.id = e.category_id) WHERE NOT e.is_deleted AND chains.is_deleted; $$; GET DIAGNOSTICS rows = ROW_COUNT; IF rows != 0 THEN RAISE EXCEPTION SQLSTATE 'INDX1' USING MESSAGE = 'Categories inconsistent', DETAIL = 'Event inside deleted category'; END IF; EXECUTE $$ SELECT 1 FROM categories.categories cat JOIN categories.categories parent ON (parent.id = cat.parent_id) WHERE NOT cat.is_deleted AND parent.is_deleted; $$; GET DIAGNOSTICS rows = ROW_COUNT; IF rows != 0 THEN RAISE EXCEPTION SQLSTATE 'INDX1' USING MESSAGE = 'Categories inconsistent', DETAIL = 'Subcategory inside deleted category'; END IF; RETURN NULL; END; $BODY$ LANGUAGE plpgsql """) DDL(sql).execute(connection) @signals.db_schema_created.connect_via('categories') def _create_check_cycles(sender, connection, **kwargs): sql = textwrap.dedent(""" CREATE FUNCTION categories.check_cycles() RETURNS trigger AS $BODY$ DECLARE rows int; BEGIN -- use dynamic sql to prevent pg from preparing the statement with a crappy query plan EXECUTE $$ WITH RECURSIVE chains(id, path, is_cycle) AS ( SELECT id, ARRAY[id], false FROM categories.categories UNION ALL SELECT cat.id, chains.path || cat.id, cat.id = ANY(chains.path) FROM categories.categories cat, chains WHERE cat.parent_id = chains.id AND NOT chains.is_cycle ) SELECT 1 FROM chains WHERE is_cycle; $$; GET DIAGNOSTICS rows = ROW_COUNT; IF rows != 0 THEN RAISE EXCEPTION SQLSTATE 'INDX2' USING MESSAGE = 'Categories inconsistent', DETAIL = 'Cycle detected'; END IF; RETURN NULL; END; $BODY$ LANGUAGE plpgsql """) DDL(sql).execute(connection)
import * as React from 'react'; import createSvgIcon from './utils/createSvgIcon'; import { jsx as _jsx } from "react/jsx-runtime"; export default createSvgIcon( /*#__PURE__*/_jsx("path", { d: "M20 10h-3V8.86c1.72-.45 3-2 3-3.86h-3V4c0-.55-.45-1-1-1H8c-.55 0-1 .45-1 1v1H4c0 1.86 1.28 3.41 3 3.86V10H4c0 1.86 1.28 3.41 3 3.86V15H4c0 1.86 1.28 3.41 3 3.86V20c0 .55.45 1 1 1h8c.55 0 1-.45 1-1v-1.14c1.72-.45 3-2 3-3.86h-3v-1.14c1.72-.45 3-2 3-3.86zm-8 9c-1.11 0-2-.9-2-2s.89-2 2-2c1.1 0 2 .9 2 2s-.89 2-2 2zm0-5c-1.11 0-2-.9-2-2s.89-2 2-2c1.1 0 2 .9 2 2s-.89 2-2 2zm0-5c-1.11 0-2-.9-2-2 0-1.11.89-2 2-2 1.1 0 2 .89 2 2 0 1.1-.89 2-2 2z" }), 'Traffic');
function accepts(file, acceptedFiles) { if (file && acceptedFiles) { const acceptedFilesArray = Array.isArray(acceptedFiles) ? acceptedFiles : acceptedFiles.split(","); const fileName = file.name || ""; const mimeType = file.type || ""; const baseMimeType = mimeType.replace(/\/.*$/, ""); return acceptedFilesArray.some((type) => { const validType = type.trim(); if (validType.charAt(0) === ".") { return fileName.toLowerCase().endsWith(validType.toLowerCase()); } else if (validType.endsWith("/*")) { // This is something like a image/* mime type return baseMimeType === validType.replace(/\/.*$/, ""); } return mimeType === validType; }); } return true; } function isImage(file) { if (file.type.split("/")[0] === "image") { return true; } } function convertBytesToMbsOrKbs(filesize) { let size = ""; // I know, not technically correct... if (filesize >= 1000000) { size = filesize / 1000000 + " megabytes"; } else if (filesize >= 1000) { size = filesize / 1000 + " kilobytes"; } else { size = filesize + " bytes"; } return size; } async function createFileFromUrl(url) { const response = await fetch(url); const data = await response.blob(); const metadata = { type: data.type }; const filename = url.replace(/\?.+/, "").split("/").pop(); const ext = data.type.split("/").pop(); return new File([data], `${filename}.${ext}`, metadata); } export { createFileFromUrl, accepts };
import os import argparse from ConfigParser import SafeConfigParser import sys import logging #sys.path.append('/home/zhengc/NRC-LIMS-dataDownloader') sys.path.append(os.path.join(os.path.dirname(__file__), '..')) from nrc_ngs_dl.lims_database import LimsDatabase from nrc_ngs_dl.web_parser import WebParser def set_up_logging(): logger = logging.getLogger('nrc_ngs_dl') logger.setLevel(logging.INFO) fh = logging.FileHandler('information.log') formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) logger.addHandler(fh) logger.info('***********test_database**************') def parse_input_args(argv): input_parser = argparse.ArgumentParser() input_parser.add_argument('-c', dest='config_file') args = input_parser.parse_args(argv) return args def main(): # get settings from cinfig.ini.sample file set_up_logging() logger= logging.getLogger('nrc_ngs_dl.lims_downloader') config_parser = SafeConfigParser() try: args = parse_input_args(sys.argv[1:]) except: logger.info('Wrong command line args') sys.exit(1) if not args.config_file: logger.info('Missing the configuration file') logger.info('Usage: python lims_downloader.py /path/to/configuation.sample') sys.exit(1) config_file = args.config_file try: with open(config_file) as f: config_parser.read(config_file) except IOError: logger.info('Cannot open file: config.ini.sample') sys.exit(1) try: logger.info('Get settings ...') DB_NAME = config_parser.get('sqlite_database', 'name') USERNAME = config_parser.get('nrc_lims','username') PASSWORD = config_parser.get('nrc_lims','password') LOGIN_URL = config_parser.get('nrc_lims','login_url') RUNLIST_URL = config_parser.get('nrc_lims','runlist_url') DESTINATION_FOLDER = config_parser.get('output','path') TABLE_RUN_LIST = config_parser.get('run_list_setting','table') COLUMN_RUN_LINK = config_parser.get('run_list_setting','column_link') COLUMN_RUN_STATUS = config_parser.get('run_list_setting','column_status') TABLE_FILE_LIST = config_parser.get('file_list_setting','table') COLUMN_FILE_LINK = config_parser.get('file_list_setting','column_link') COLUMN_LANE = config_parser.get('file_list_setting','column_lane') except: logger.info('Cannot get the configuration settings' ) sys.exit(1) if os.path.exists(DESTINATION_FOLDER) == False: logger.info('DESTINATION_FOLDER not exist; do not have permission to access the folder') sys.exit(1) #connect to database if the database exist #otherwise create tables for this database #if os.path.isfile(DB_NAME): # os.remove(DB_NAME) lims_database = LimsDatabase(DB_NAME) if lims_database is None: logger.info('Cannot access the database') sys.exit(1) #login to LIMS webpage try: logger.info('Logging into ...') web_parser = WebParser(LOGIN_URL,RUNLIST_URL,USERNAME,PASSWORD) except: logger.info('Cannot access the web page') sys.exit(1) #get a list of all the completed sequence runs #information for each run : url_for_the_run, run_name, plate_name, #Plateform, Operator, Creation Date, Description, status try: logger.info('Getting run list ...') run_list = web_parser.get_runlist(TABLE_RUN_LIST, COLUMN_RUN_LINK, COLUMN_RUN_STATUS) except: logger.info('Cannot get the list of sequence runs') sys.exit(1) #for each sequence run in the list, #1. check if it is a new data or re-processed data #2. in the case of new data: download the data, insert the information of the data into database tables #3. in the case of re-processed data: for a_run in run_list: run_url = a_run run_info = web_parser.get_runinfo(run_url) lane_info = web_parser.get_laneinfo(run_url,TABLE_FILE_LIST, COLUMN_LANE,COLUMN_FILE_LINK) for a_lane in lane_info: case = lims_database.check_new_run(run_info,a_lane) if case ==3: logger.info('Deleting records in database for re-processed data (run_name %s, lane_index %s)' % (run_info['run_name'],a_lane[0])) lims_database.delete_old_run(run_info, a_lane) if case != 1: logger.info('downloading new/re-processed data (run_name %s, lane_index %s)' % (run_info['run_name'],a_lane[0])) file_info = web_parser.get_fileinfo(run_url,a_lane,TABLE_FILE_LIST) rowid = lims_database.insert_run_info(run_info) lims_database.insert_lane_info(rowid,run_url,a_lane) lims_database.insert_file_info(rowid,file_info) lims_database.disconnect() if __name__ == '__main__': main()
from . import problem, user, submit
from contextlib import suppress import awxkit.exceptions as exc from awxkit.api.pages import base, WorkflowJobTemplate, UnifiedJobTemplate, JobTemplate from awxkit.api.mixins import HasCreate, DSAdapter from awxkit.api.resources import resources from awxkit.utils import update_payload, PseudoNamespace, random_title from . import page class WorkflowJobTemplateNode(HasCreate, base.Base): dependencies = [WorkflowJobTemplate, UnifiedJobTemplate] NATURAL_KEY = ('workflow_job_template', 'identifier') def payload(self, workflow_job_template, unified_job_template, **kwargs): if not unified_job_template: # May pass "None" to explicitly create an approval node payload = PseudoNamespace(workflow_job_template=workflow_job_template.id) else: payload = PseudoNamespace(workflow_job_template=workflow_job_template.id, unified_job_template=unified_job_template.id) optional_fields = ( 'diff_mode', 'extra_data', 'limit', 'scm_branch', 'job_tags', 'job_type', 'skip_tags', 'verbosity', 'extra_data', 'identifier', 'all_parents_must_converge', ) update_payload(payload, optional_fields, kwargs) if 'inventory' in kwargs: payload['inventory'] = kwargs['inventory'].id return payload def create_payload(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs): if not unified_job_template: self.create_and_update_dependencies(workflow_job_template) payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=None, **kwargs) else: self.create_and_update_dependencies(workflow_job_template, unified_job_template) payload = self.payload(workflow_job_template=self.ds.workflow_job_template, unified_job_template=self.ds.unified_job_template, **kwargs) payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store) return payload def create(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs): payload = self.create_payload(workflow_job_template=workflow_job_template, unified_job_template=unified_job_template, **kwargs) return self.update_identity(WorkflowJobTemplateNodes(self.connection).post(payload)) def _add_node(self, endpoint, unified_job_template, **kwargs): node = endpoint.post(dict(unified_job_template=unified_job_template.id, **kwargs)) node.create_and_update_dependencies(self.ds.workflow_job_template, unified_job_template) return node def add_always_node(self, unified_job_template, **kwargs): return self._add_node(self.related.always_nodes, unified_job_template, **kwargs) def add_failure_node(self, unified_job_template, **kwargs): return self._add_node(self.related.failure_nodes, unified_job_template, **kwargs) def add_success_node(self, unified_job_template, **kwargs): return self._add_node(self.related.success_nodes, unified_job_template, **kwargs) def add_credential(self, credential): with suppress(exc.NoContent): self.related.credentials.post(dict(id=credential.id, associate=True)) def remove_credential(self, credential): with suppress(exc.NoContent): self.related.credentials.post(dict(id=credential.id, disassociate=True)) def remove_all_credentials(self): for cred in self.related.credentials.get().results: with suppress(exc.NoContent): self.related.credentials.post(dict(id=cred.id, disassociate=True)) def make_approval_node(self, **kwargs): if 'name' not in kwargs: kwargs['name'] = 'approval node {}'.format(random_title()) self.related.create_approval_template.post(kwargs) return self.get() def get_job_node(self, workflow_job): candidates = workflow_job.get_related('workflow_nodes', identifier=self.identifier) return candidates.results.pop() page.register_page( [resources.workflow_job_template_node, (resources.workflow_job_template_nodes, 'post'), (resources.workflow_job_template_workflow_nodes, 'post')], WorkflowJobTemplateNode, ) class WorkflowJobTemplateNodes(page.PageList, WorkflowJobTemplateNode): pass page.register_page( [ resources.workflow_job_template_nodes, resources.workflow_job_template_workflow_nodes, resources.workflow_job_template_node_always_nodes, resources.workflow_job_template_node_failure_nodes, resources.workflow_job_template_node_success_nodes, ], WorkflowJobTemplateNodes, )
/*global window, console, document */ /* * Speaker represents the volume icon that will be shown in the mediaPlayer, for example. * It manages the volume level of the media tag given in the constructor. * Every Speaker is a View. * Ex.: var speaker = Speaker({elementID: element, media: mediaTag, id: id}); */ var Erizo = Erizo || {}; Erizo.Speaker = function (spec) { "use strict"; var that = Erizo.View({}), show, mute, unmute, lastVolume = 50; // Variables // DOM element in which the Speaker will be appended that.elementID = spec.elementID; // media tag that.media = spec.media; // Speaker id that.id = spec.id; // MediaStream that.stream = spec.stream; // Container that.div = document.createElement('div'); that.div.setAttribute('style', 'width: 40%; height: 100%; max-width: 32px; position: absolute; right: 0;z-index:0;'); // Volume icon that.icon = document.createElement('img'); that.icon.setAttribute('id', 'volume_' + that.id); that.icon.setAttribute('src', that.url + '/assets/sound48.png'); that.icon.setAttribute('style', 'width: 80%; height: 100%; position: absolute;'); that.div.appendChild(that.icon); if (!that.stream.local) { // Volume bar that.picker = document.createElement('input'); that.picker.setAttribute('id', 'picker_' + that.id); that.picker.type = "range"; that.picker.min = 0; that.picker.max = 100; that.picker.step = 10; that.picker.value = lastVolume; that.picker.setAttribute("orient", "vertical"); // FireFox supports range sliders as of version 23 that.div.appendChild(that.picker); that.media.volume = that.picker.value / 100; that.media.muted = false; that.picker.oninput = function (evt) { if (that.picker.value > 0) { that.media.muted = false; that.icon.setAttribute('src', that.url + '/assets/sound48.png'); } else { that.media.muted = true; that.icon.setAttribute('src', that.url + '/assets/mute48.png'); } that.media.volume = that.picker.value / 100; }; // Private functions show = function (displaying) { that.picker.setAttribute('style', 'background: transparent; width: 32px; height: 100px; position: absolute; bottom: 90%; z-index: 1;' + that.div.offsetHeight + 'px; right: 0px; -webkit-appearance: slider-vertical; display: ' + displaying); }; mute = function () { that.icon.setAttribute('src', that.url + '/assets/mute48.png'); lastVolume = that.picker.value; that.picker.value = 0; that.media.volume = 0; that.media.muted = true; }; unmute = function () { that.icon.setAttribute('src', that.url + '/assets/sound48.png'); that.picker.value = lastVolume; that.media.volume = that.picker.value / 100; that.media.muted = false; }; that.icon.onclick = function (evt) { if (that.media.muted) { unmute(); } else { mute(); } } // Public functions that.div.onmouseover = function (evt) { show('block'); }; that.div.onmouseout = function (evt) { show('none'); }; show('none'); } else { mute = function () { that.media.muted = true; that.icon.setAttribute('src', that.url + '/assets/mute48.png'); that.stream.stream.getAudioTracks()[0].enabled = false; }; unmute = function () { that.media.muted = false; that.icon.setAttribute('src', that.url + '/assets/sound48.png'); that.stream.stream.getAudioTracks()[0].enabled = true; }; that.icon.onclick = function (evt) { if (that.media.muted) { unmute(); } else { mute(); } } } document.getElementById(that.elementID).appendChild(that.div); return that; };
module.exports = function(grunt) { grunt.initConfig({ exec: { build: { cmd: 'jekyll build' }, serve: { cmd: "jekyll serve --watch --baseurl=''" }, deploy: { cmd: 'echo This is hosted on GitHub Pages. Push to deploy.' } } }); grunt.loadNpmTasks('grunt-exec'); grunt.registerTask('default', [ 'exec:serve' ]); grunt.registerTask('serve', [ 'exec:serve' ]); grunt.registerTask('deploy', [ 'exec:deploy' ]); };
/* istanbul instrument in package npmdoc_eslint */ /*jslint bitwise: true, browser: true, maxerr: 8, maxlen: 96, node: true, nomen: true, regexp: true, stupid: true */ (function () { 'use strict'; var local; // run shared js-env code - pre-init (function () { // init local local = {}; // init modeJs local.modeJs = (function () { try { return typeof navigator.userAgent === 'string' && typeof document.querySelector('body') === 'object' && typeof XMLHttpRequest.prototype.open === 'function' && 'browser'; } catch (errorCaughtBrowser) { return module.exports && typeof process.versions.node === 'string' && typeof require('http').createServer === 'function' && 'node'; } }()); // init global local.global = local.modeJs === 'browser' ? window : global; switch (local.modeJs) { // re-init local from window.local case 'browser': local = local.global.utility2.objectSetDefault( local.global.utility2_rollup || local.global.local, local.global.utility2 ); break; // re-init local from example.js case 'node': local = (local.global.utility2_rollup || require('utility2')) .requireExampleJsFromReadme(); break; } // export local local.global.local = local; }()); // run shared js-env code - function (function () { return; }()); switch (local.modeJs) { // run browser js-env code - function case 'browser': break; // run node js-env code - function case 'node': break; } // run shared js-env code - post-init (function () { return; }()); switch (local.modeJs) { // run browser js-env code - post-init case 'browser': local.testCase_browser_nullCase = local.testCase_browser_nullCase || function ( options, onError ) { /* * this function will test browsers's null-case handling-behavior-behavior */ onError(null, options); }; // run tests local.nop(local.modeTest && document.querySelector('#testRunButton1') && document.querySelector('#testRunButton1').click()); break; // run node js-env code - post-init /* istanbul ignore next */ case 'node': local.testCase_buildApidoc_default = local.testCase_buildApidoc_default || function ( options, onError ) { /* * this function will test buildApidoc's default handling-behavior-behavior */ options = { modulePathList: module.paths }; local.buildApidoc(options, onError); }; local.testCase_buildApp_default = local.testCase_buildApp_default || function ( options, onError ) { /* * this function will test buildApp's default handling-behavior-behavior */ local.testCase_buildReadme_default(options, local.onErrorThrow); local.testCase_buildLib_default(options, local.onErrorThrow); local.testCase_buildTest_default(options, local.onErrorThrow); local.testCase_buildCustomOrg_default(options, local.onErrorThrow); options = []; local.buildApp(options, onError); }; local.testCase_buildCustomOrg_default = local.testCase_buildCustomOrg_default || function (options, onError) { /* * this function will test buildCustomOrg's default handling-behavior */ options = {}; local.buildCustomOrg(options, onError); }; local.testCase_buildLib_default = local.testCase_buildLib_default || function ( options, onError ) { /* * this function will test buildLib's default handling-behavior */ options = {}; local.buildLib(options, onError); }; local.testCase_buildReadme_default = local.testCase_buildReadme_default || function ( options, onError ) { /* * this function will test buildReadme's default handling-behavior-behavior */ options = {}; local.buildReadme(options, onError); }; local.testCase_buildTest_default = local.testCase_buildTest_default || function ( options, onError ) { /* * this function will test buildTest's default handling-behavior */ options = {}; local.buildTest(options, onError); }; local.testCase_webpage_default = local.testCase_webpage_default || function ( options, onError ) { /* * this function will test webpage's default handling-behavior */ options = { modeCoverageMerge: true, url: local.serverLocalHost + '?modeTest=1' }; local.browserTest(options, onError); }; // run test-server local.testRunServer(local); break; } }());
import React, { useState } from "react"; import "./main.css"; import axios from "axios"; import { useHistory } from "react-router-dom"; import { setCountry } from "../../actions/country"; import { setRegion } from "../../actions/region"; import { connect } from "react-redux"; const Main = ({ setCountry, setRegion }) => { const [state, setstate] = useState({ country: "", continent: "", region: "", world: 0, n: 0, flag: false, }); let { country, continent, region, n, flag, world } = state; const history = useHistory(); const url = "http://localhost:8080/population"; let config = { headers: { "Access-Control-Allow-Origin": "*", }, params: { country: country, continent: continent, region: region, top: n, }, }; if (!flag) axios .get(url + "/world") .then((res) => setstate({ ...state, world: res.data, flag: true })); return ( <div className="main"> <div> <h5>World Population:{world} </h5> </div> <div className="card-panel" onClick={(e) => axios.get(url + "/continents").then((res) => { setRegion(res.data); history.push("/region"); }) } > <h5>Continents Populations</h5> </div> <div className="card-panel" onClick={(e) => axios.get(url + "/regions").then((res) => { setRegion(res.data); history.push("/region"); }) } > <h5>Regions Populations</h5> </div> <div onClick={(e) => { axios.get(url + "/country/all").then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>World Countries</h5> </div> <div onClick={(e) => { if (continent !== "") axios.get(`${url}/country/continent`, config).then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>Countries in a Continent</h5> <div class="input-field"> <input value={continent} onChange={(e) => setstate({ ...state, continent: e.target.value })} placeholder="Continent" type="text" class="validate" /> </div> </div> <div onClick={(e) => { if (region !== "") axios.get(`${url}/country/region`, config).then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>Countries in a Region</h5> <input value={region} onChange={(e) => setstate({ ...state, region: e.target.value })} placeholder="Region" type="text" class="validate" /> </div> <div onClick={(e) => { if (n !== 0) axios.get(`${url}/country/top`, config).then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>Top n Countries in the world</h5> <input value={n} onChange={(e) => setstate({ ...state, n: e.target.value })} placeholder="Top" type="number" class="validate" /> </div> <div onClick={(e) => { if (n !== 0) axios.get(`${url}/country/continent/top`, config).then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>Top n Countries in the Continent</h5> <input value={continent} onChange={(e) => setstate({ ...state, continent: e.target.value })} placeholder="Continent" type="text" class="validate" /> <input value={n} onChange={(e) => setstate({ ...state, n: e.target.value })} placeholder="Top" type="number" class="validate" /> </div> <div onClick={(e) => { if (region !== "") axios.get(`${url}/country/region/top`, config).then((res) => { setCountry(res.data); history.push("/country"); }); }} className="card-panel" > <h5>Top n Countries in the Region</h5> <input value={region} onChange={(e) => setstate({ ...state, region: e.target.value })} placeholder="Region" type="text" class="validate" /> <input value={n} onChange={(e) => setstate({ ...state, n: e.target.value })} placeholder="Top" type="number" class="validate" /> </div> </div> ); }; export default connect(null, { setCountry, setRegion })(Main);
/** * @file Make component support data operation like Vue for ant mini program * @author sparklewhy@gmail.com */ 'use strict'; import observable, {setObservableContext} from '../base'; import {observableArray, overrideArrayMethods} from '../array'; import {component as antApi, array as antArray} from './array'; setObservableContext('props', true); let componentExtension = observable.component; let rawCreated = componentExtension.created; componentExtension.created = function () { if (this.$rawComputed) { // fix ant reference bug: `this.data.xx` operation is not allowed // when page onload, otherwise it'll affect the init data state // of the page when load next time. // So, here create a shadow copy of data. this.data = Object.assign({}, this.data); } rawCreated.call(this); }; Object.assign(componentExtension.methods, antApi); let arrApis = Object.assign({}, observableArray, antArray); overrideArrayMethods(arrApis, true); overrideArrayMethods(arrApis, false); /** * View update hook * * @private * @param {Object} prevProps the previous property data before update */ observable.component.didUpdate = function (prevProps) { let propObserver = this.__propsObserver; if (!propObserver) { return; } let currProps = this.props; // update the cache props data, as for the prop data will be override // when prop change, it leads to the cache props data will not refer to // the new props data propObserver.rawData = currProps; Object.keys(prevProps).forEach(k => { let newVal = currProps[k]; let oldVal = prevProps[k]; if (newVal !== oldVal) { propObserver.firePropValueChange(k, newVal, oldVal); } }); }; export default observable;
const path = require("path"); const { DefinePlugin } = require("webpack"); const { CleanWebpackPlugin } = require("clean-webpack-plugin"); const isProd = process.env.NODE_ENV !== "development"; module.exports = { mode: isProd ? "production" : "development", devtool: isProd ? "source-map" : "eval-source-map", entry: { "my-lib": path.resolve(__dirname, "src/my-lib.ts") }, output: { filename: "[name].js", path: path.resolve(__dirname, "dist"), library: "MyLib", libraryTarget: "umd", libraryTarget: "window" }, resolve: { extensions: [".ts", ".js"] }, module: { rules: [ { test: /\.ts$/, exclude: [/node_modules/], use: ["babel-loader", "ts-loader", "eslint-loader"] }, { test: /\.js$/, exclude: [/node_modules/], use: ["babel-loader", "eslint-loader"] } ] }, plugins: [ new DefinePlugin({ __VERSION__: require("./package.json").version }), new CleanWebpackPlugin() ] };
""" Copyright (c) 2012-2014, Austin Benson and David Gleich All rights reserved. This file is part of MRTSQR and is under the BSD 2-Clause License, which can be found in the LICENSE file in the root directory, or at http://opensource.org/licenses/BSD-2-Clause """ """ Mapper and reducer implementations for Direct TSQR. Use the script run_dirtsqr.py to run Direct TSQR. """ import sys import time import struct import uuid import cPickle as pickle import gc import numpy import numpy.linalg import util import mrmc import dumbo import dumbo.backends.common from dumbo import opt @opt("getpath", "yes") class DirTSQRMap1(mrmc.MatrixHandler): """ Input: <key, value> pairs representing <row id, row> in the matrix A Output: 1. R matrix: <mapper id, row> 2. Q matrix: <mapper id, row + [row_id]> """ def __init__(self): mrmc.MatrixHandler.__init__(self) self.keys = [] self.data = [] self.mapper_id = uuid.uuid1().hex def collect(self,key,value): if self.ncols == None: self.ncols = len(value) print >>sys.stderr, "Matrix size: %i columns"%(self.ncols) else: assert(len(value) == self.ncols) self.keys.append(key) self.data.append(value) self.nrows += 1 # write status updates so Hadoop doesn't complain if self.nrows%50000 == 0: self.counters['rows processed'] += 50000 def close(self): self.counters['rows processed'] += self.nrows % 50000 # if no data was passed to this task, we just return if len(self.data) == 0: return QR = numpy.linalg.qr(numpy.array(self.data)) yield ("R_%s" % str(self.mapper_id), self.mapper_id), QR[1].tolist() flat_Q = [entry for row in QR[0] for entry in row] val1 = pickle.dumps(self.keys) val2 = struct.pack('d'*len(flat_Q), *flat_Q) val = ''.join([str(len(val1)) + '_', val1, val2]) yield ("Q_%s" % str(self.mapper_id), self.mapper_id), val def __call__(self,data): self.collect_data(data) for key,val in self.close(): yield key, val @opt("getpath", "yes") class DirTSQRRed2(dumbo.backends.common.MapRedBase): """ Takes all of the intermediate Rs Computes [R_1, ..., R_n] = Q2R_{final} Output: 1. R_final: R in A = QR with key-value pairs <i, row> 2. Q2: <mapper_id, row> where Q2 is a list of key value pairs. Each key corresponds to a mapperid from stage 1 and that keys value is the Q2 matrix corresponding to that mapper_id """ def __init__(self, compute_svd=False): self.R_data = {} self.key_order = [] self.Q2 = None self.compute_svd = compute_svd def collect(self, key, value): assert(key not in self.R_data) data = [] for row in value: data.append([float(val) for val in row]) self.R_data[key] = data def close_R(self): data = [] for key in self.R_data: data += self.R_data[key] self.key_order.append(key) A = numpy.array(data) QR = numpy.linalg.qr(A) self.Q2 = QR[0].tolist() self.R_final = QR[1].tolist() for i, row in enumerate(self.R_final): yield ("R_final", i), row if self.compute_svd: U, S, Vt = numpy.linalg.svd(self.R_final) S = numpy.diag(S) for i, row in enumerate(U): yield ("U", i), row for i, row in enumerate(S): yield ("Sigma", i), row for i, row in enumerate(Vt): yield ("Vt", i), row def close_Q(self): num_rows = len(self.Q2) rows_to_read = num_rows / len(self.key_order) ind = 0 key_ind = 0 local_Q = [] for row in self.Q2: local_Q.append(row) ind += 1 if (ind == rows_to_read): flat_Q = [entry for row in local_Q for entry in row] yield ("Q2", self.key_order[key_ind]), flat_Q key_ind += 1 local_Q = [] ind = 0 def __call__(self,data): for key,values in data: for value in values: self.collect(key, value) for key, val in self.close_R(): yield key, val for key, val in self.close_Q(): yield key, val class DirTSQRMap3(dumbo.backends.common.MapRedBase): """ input: Q1 as <mapper_id, [row] + [row_id]> input: Q2 comes attached as a text file, which is then parsed on the fly output: Q as <row_id, row> """ def __init__(self,ncols,q2path='q2.txt',upath=None): # TODO implement this self.Q1_data = {} self.row_keys = {} self.Q2_data = {} self.ncols = ncols self.q2path = q2path self.u_data = None if upath is not None: self.u_data = [] for row in util.parse_matrix_txt(upath): self.u_data.append(row) self.u_data = numpy.mat(self.u_data) def parse_q2(self): try: f = open(self.q2path, 'r') except: # We may be expecting only the file to be distributed # with the script f = open(self.q2path.split('/')[-1], 'r') for line in f: if len(line) > 5: ind1 = line.find('(') ind2 = line.rfind(')') key = line[ind1+1:ind2] # lazy parsing: we only need the keys that we have if key not in self.Q1_data: continue line = line[ind2+3:] line = line.lstrip('[').rstrip().rstrip(']') line = line.split(',') line = [float(v) for v in line] line = numpy.array(line) mat = numpy.reshape(line, (self.ncols, self.ncols)) self.Q2_data[key] = mat f.close() def collect(self, key, keys, value): self.Q1_data[key] = (keys, value) def close(self): # parse the q2 file we were given self.parse_q2() for key in self.Q1_data: assert(key in self.Q2_data) keys, Q1 = self.Q1_data[key] Q2 = self.Q2_data[key] if self.u_data is not None: Q2 = Q2 * self.u_data Q_out = Q1 * Q2 for i, row in enumerate(Q_out.getA()): yield keys[i], struct.pack('d' * len(row), *row) def __call__(self, data): for key, val in data: ind = val.find('_') val1_len = int(val[:ind]) keys = val[ind + 1:ind + 1 + val1_len] matrix = val[ind +1 +val1_len:] keys = pickle.loads(keys) num_entries = len(matrix) / 8 if num_entries % self.ncols != 0: raise mrmc.DataFormatException( 'Length of value (%d) did not match number of columns (%d)' % ( num_entries, self.ncols)) mat = struct.unpack('d' * num_entries, matrix) mat = numpy.mat(mat) mat = numpy.reshape(mat, (num_entries / self.ncols , self.ncols)) self.collect(key, keys, mat) for key, val in self.close(): yield key, val """ The classes RLabeller, QGrouperMap, QGrouperReduce, and DirTSQRRed3 are used for recursive Direct TSQR. """ class RLabeller(dumbo.backends.common.MapRedBase): def __init__(self): self.data = [] def close(self): for pair in self.data: yield pair[0], pair[1] def __call__(self, data): for key, value in data: for i, row in enumerate(value): new_key = str(key) + '_' + str(i) row = [float(val) for val in row] row = struct.pack('d' * len(row), *row) self.data.append((new_key, row)) for key, val in self.close(): yield key, val class QGrouperMap(dumbo.backends.common.MapRedBase): def __init__(self): self.data = [] def close(self): for pair in self.data: yield pair[0], pair[1] def __call__(self, data): for key, value in data: new_key, num = key.split('_') val = pickle.dumps((value, int(num))) self.data.append((new_key, val)) for key, val in self.close(): yield key, val class QGrouperReduce(dumbo.backends.common.MapRedBase): def __init__(self, ncols): self.ncols = ncols self.data = {} def close(self): for key in self.data: assert(None not in self.data[key]) local_Q = self.data[key] flat_Q = [entry for row in local_Q for entry in row] val = 'Q2' + '_' + struct.pack('d' * (self.ncols ** 2), *flat_Q) yield key, val def collect(self, key, value, num): assert(num < self.ncols) if key not in self.data: self.data[key] = self.ncols * [None] row = struct.unpack('d' * self.ncols, value) self.data[key][num] = row def __call__(self, data): for key, values in data: for value in values: val, num = pickle.loads(value) self.collect(key, val, num) for key, val in self.close(): yield key, val class DirTSQRRed3(dumbo.backends.common.MapRedBase): def __init__(self, ncols): self.ncols = ncols self.Q1_data = None self.Q2_data = None def collect(self, key, keys, value): self.Q1_data = (keys, value) def collect_Q2(self, key, value): value = numpy.array(struct.unpack('d' * (self.ncols ** 2), value)) self.Q2_data = numpy.reshape(value, (self.ncols, self.ncols)) def flush(self): keys, Q1 = self.Q1_data Q2 = self.Q2_data Q_out = Q1 * Q2 self.Q1_data = None self.Q2_data = None Q1 = None Q2 = None gc.collect() for i, row in enumerate(Q_out.getA()): yield keys[i], struct.pack('d' * len(row), *row) def __call__(self, data): for key, values in data: for val in values: ind = val.find('_') if val[:ind] == 'Q2': mat = val[ind+1:] self.collect_Q2(key, mat) else: val1_len = int(val[:ind]) keys = val[ind + 1:ind + 1 + val1_len] matrix = val[ind +1 +val1_len:] keys = pickle.loads(keys) num_entries = len(matrix) / 8 assert (num_entries % self.ncols == 0) mat = struct.unpack('d' * num_entries, matrix) mat = numpy.mat(mat) mat = numpy.reshape(mat, (num_entries / self.ncols , self.ncols)) self.collect(key, keys, mat) for k, v in self.flush(): yield k, v
import { primaryColor, dangerColor, successColor, grayColor, defaultFont } from "./material-dashboard-react"; const customInputStyle = { disabled: { "&:before": { backgroundColor: "transparent !important" } }, underline: { "&:hover:not($disabled):before,&:before": { borderColor: grayColor[4] + " !important", borderWidth: "1px !important" }, "&:after": { borderColor: primaryColor[0] } }, underlineError: { "&:after": { borderColor: dangerColor[0] } }, underlineSuccess: { "&:after": { borderColor: successColor[0] } }, labelRoot: { ...defaultFont, color: grayColor[3] + " !important", fontWeight: "400", fontSize: "14px", lineHeight: "1.42857", letterSpacing: "unset" }, labelRootError: { color: dangerColor[0] }, labelRootSuccess: { color: successColor[0] }, feedback: { position: "absolute", top: "18px", right: "0", zIndex: "2", display: "block", width: "24px", height: "24px", textAlign: "center", pointerEvents: "none" }, marginTop: { marginTop: "16px" }, formControl: { paddingBottom: "10px", margin: "27px 0 0 0", position: "relative", verticalAlign: "unset" } }; export default customInputStyle;
// For discussion and comments, see: http://remysharp.com/2009/01/07/html5-enabling-script/ /*@cc_on'abbr article aside audio canvas details figcaption figure footer header hgroup mark menu meter nav output progress section summary time video'.replace(/\w+/g,function(n){document.createElement(n)})@*/ var addEvent = (function () { if (document.addEventListener) { return function (el, type, fn) { if (el && el.nodeName || el === window) { el.addEventListener(type, fn, false); } else if (el && el.length) { for (var i = 0; i < el.length; i++) { addEvent(el[i], type, fn); } } }; } else { return function (el, type, fn) { if (el && el.nodeName || el === window) { el.attachEvent('on' + type, function () { return fn.call(el, window.event); }); } else if (el && el.length) { for (var i = 0; i < el.length; i++) { addEvent(el[i], type, fn); } } }; } })(); (function () { var pre = document.createElement('pre'); pre.id = "view-source" // private scope to avoid conflicts with demos addEvent(window, 'click', function (event) { if (event.target.hash == '#view-source') { // event.preventDefault(); if (!document.getElementById('view-source')) { // pre.innerHTML = ('<!DOCTYPE html>\n<html>\n' + document.documentElement.innerHTML + '\n</html>').replace(/[<>]/g, function (m) { return {'<':'&lt;','>':'&gt;'}[m]}); var xhr = new XMLHttpRequest(); // original source - rather than rendered source xhr.onreadystatechange = function () { if (this.readyState == 4 && this.status == 200) { pre.innerHTML = this.responseText.replace(/[<>]/g, function (m) { return {'<':'&lt;','>':'&gt;'}[m]}); prettyPrint(); } }; document.body.appendChild(pre); // really need to be sync? - I like to think so xhr.open("GET", window.location, true); xhr.send(); } document.body.className = 'view-source'; var sourceTimer = setInterval(function () { if (window.location.hash != '#view-source') { clearInterval(sourceTimer); document.body.className = ''; } }, 200); } }); })();
import logging import os import random as rd import click import mne import pandas as pd import numpy as np from scipy.signal import welch import matplotlib.pyplot as plt from config import CHANNEL_NAMES, DATA_ROOT, PROCESSED_ROOT, RAW_ROOT from data.utils import df_from_fif, df_from_tdt, get_sampling_frequency from data.preprocess import preprocess_raw_mne_file def plot_psd(X, label, Fs, NFFT, color=None): noverlap = int(NFFT * 0.8) freqs, psd = welch(X, fs=Fs, window='hann', nperseg=NFFT, noverlap=noverlap) # print(len(freqs)) # print(len(psd)) f = freqs[freqs > np.zeros(len(freqs))] psd = psd[freqs > np.zeros(len(freqs))] plt.plot(np.log10(f), 10 * np.log10(psd.ravel()), label=label, color=color) def interactive_plot_freq(input_file, kind, apply_proj=False): """Create an interactive figure visualizing all channels from a file.""" df = df_from_fif(input_file) if kind=='PROCESSED' else \ df_from_tdt(input_file) if kind == 'RAW': sfreq = get_sampling_frequency(input_file) else: sfreq = 250 info = mne.create_info(ch_names=CHANNEL_NAMES, sfreq=sfreq, ch_types='eeg') data = mne.io.RawArray(np.transpose(df.values), info) processed = preprocess_raw_mne_file(data, apply_proj) logging.info(f'Plotting frequency components of {input_file} of' 'kind={kind}, sfreq={sfreq}...') # data.plot_psd() # processed.plot_psd() # add some plotting parameter # decim_fit = 100 # we lean a purely spatial model, we don't need all samples # decim_show = 10 # we can make plotting faster n_fft = 2 ** 13 # let's use long windows to see low frequencies for i, channel in enumerate(CHANNEL_NAMES): plt.figure(figsize=(9, 6)) values = df[channel].values logging.info(f'Plotting file {input_file}...') plot_psd(values, Fs=sfreq, NFFT=n_fft, label='EEG', color='black') if kind == 'RAW': plot_psd(processed[i][0][0], Fs=sfreq, NFFT=n_fft, label='EEG-processed', color='orange') plt.legend() plt.xticks(np.log10([0.1, 1, 10, 100]), [0.1, 1, 10, 100]) plt.xlim(np.log10([0.1, 300])) plt.xlabel('log10(frequency) [Hz]') plt.ylabel('Power Spectral Density [dB]') plt.grid() plt.show() def examine_all_freq(kind='RAW', proj=False): input_folder = PROCESSED_ROOT if kind=='PROCESSED' else RAW_ROOT to_examine = os.listdir(input_folder) rd.shuffle(to_examine) for file_name in to_examine: interactive_plot_freq(os.path.join(input_folder, file_name), kind, proj) @click.command() @click.option('--kind', type=str, default='RAW') @click.option('--proj', type=bool, default=False) def main(kind, proj): logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) logger.info(f'Plotting PSD of EEG singals of kind {kind}...') examine_all_freq(kind, proj) if __name__ == '__main__': main()
''' LPD-Net Model: FN-SF-VLAD Feature Network + FN-Parallel structure (P) + Series-FC structure (SF) # Thanks to Mikaela Angelina Uy, modified from PointNetVLAD author: suo_ivy created: 10/26/18 ''' import os import sys import tensorflow as tf #Taken from Charles Qi's pointnet code MODELS_DIR = os.path.dirname(__file__) sys.path.append(MODELS_DIR) sys.path.append(os.path.join(MODELS_DIR, '../utils')) import tf_util #from transform_nets import input_transform_net, feature_transform_net, neural_feature_net #Adopted from Antoine Meich import loupe as lp def placeholder_inputs(batch_num_queries, num_pointclouds_per_query, num_point, input_dim=13): pointclouds_pl = tf.placeholder(tf.float32, shape=(batch_num_queries, num_pointclouds_per_query, num_point, input_dim)) return pointclouds_pl #Adopted from the original pointnet code def forward(point_cloud, is_training, bn_decay=None, params=None): # Network: DGCNN # INPUT: batch_num_queries X num_pointclouds_per_query X num_points_per_pointcloud X input_dim # OUTPUT: batch_num_queries X num_pointclouds_per_query X output_dim batch_num_queries = point_cloud.get_shape()[0].value num_pointclouds_per_query = point_cloud.get_shape()[1].value num_points = point_cloud.get_shape()[2].value CLUSTER_SIZE = params["CLUSTER_SIZE"] # default: 64 OUTPUT_DIM = params["FEATURE_OUTPUT_DIM"] # default: 256 k = params["KNN"] # default: 20 INPUT_DIM = params["INPUT_DIM"] # default: 13 point_cloud = tf.reshape(point_cloud, [batch_num_queries*num_pointclouds_per_query, num_points, INPUT_DIM]) # BxNxC #if INPUT_DIM != 13: # print("input dimension must be 13!!!") # exit() #pc, feature_cloud = tf.split(point_cloud, [3, 10], 2) # BxNx3 BxNx10 #with tf.variable_scope('single') as sc: # pc = tf.expand_dims(point_cloud, -1) # BxNxC -> BxNxCx1 # pfea = tf_util.conv2d(pc, 16, [1, INPUT_DIM], # padding='VALID', stride=[1,1], # bn=True, is_training=is_training, # scope='conv0_a', bn_decay=bn_decay) # BxNx1x16 # pfea = tf_util.conv2d(pfea, 32, [1, 1], # padding='VALID', stride=[1,1], # bn=True, is_training=is_training, # scope='conv0_b', bn_decay=bn_decay) # BxNx1x32 # pfea = tf.squeeze(pfea, [2]) # DGCNN index with tf.variable_scope('fastdgcnn') as sc: dpist = tf_util.pairwise_distance_mask(point_cloud, k=k) # BxNxN # ------------------------------------------------------------------------- x = tf_util.conv1d(point_cloud, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv1', bn_decay=bn_decay) # BxNx64 x1 = tf.matmul(dpist, x) # BxNxN X BxNx64 -> BxNx64 x1 = x1 / float(k) t1 = x1 - x t1 = tf_util.conv1d(t1, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv1_a', bn_decay=bn_decay) # BxNx64 t1 = tf_util.conv1d(t1, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv1_b', bn_decay=bn_decay) # BxNx64 x1 = t1 + x1 # ------------------------------------------------------------------------- x = tf_util.conv1d(x1, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv2', bn_decay=bn_decay) # BxNx64 x2 = tf.matmul(dpist, x) # BxNxN X BxNx64 -> BxNx64 x2 = x2 / float(k) t2 = x2 - x t2 = tf_util.conv1d(t2, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv2_a', bn_decay=bn_decay) # BxNx64 t2 = tf_util.conv1d(t2, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv2_b', bn_decay=bn_decay) # BxNx64 x2 = t2 + x2 # ------------------------------------------------------------------------- x = tf_util.conv1d(x2, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv3', bn_decay=bn_decay) # BxNx64 x3 = tf.matmul(dpist, x) # BxNxN X BxNx64 -> BxNx64 x3 = x3 / float(k) t3 = x3 - x t3 = tf_util.conv1d(t3, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv3_a', bn_decay=bn_decay) # BxNx64 t3 = tf_util.conv1d(t3, 64, 1, bn=True, padding='VALID', stride=1, is_training=is_training, scope='conv3_b', bn_decay=bn_decay) # BxNx64 x3 = t3 + x3 # ------------------------------------------------------------------------- x = tf_util.conv1d(x3, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv4', bn_decay=bn_decay) # BxNx64 x4 = tf.matmul(dpist, x) # BxNxN X BxNx64 -> BxNx64 x4 = x4 / float(k) t4 = x4 - x t4 = tf_util.conv1d(t4, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv4_a', bn_decay=bn_decay) # BxNx64 t4 = tf_util.conv1d(t4, 64, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv4_b', bn_decay=bn_decay) # BxNx64 x4 = t4 + x4 # ------------------------------------------------------------------------- x = tf.concat([x1, x2, x3, x4], axis=-1) # BxNx64 * 4 -> BxNx256 x = tf_util.conv1d(x, 1024, 1, padding='VALID', stride=1, bn=True, is_training=is_training, scope='conv5', bn_decay=bn_decay) # BxNx1024 #x = tf.expand_dims(x, axis=2) # BxNx1024 -> BxNx1x1024 with tf.variable_scope('VLAD') as sc: NetVLAD = lp.G_VLAD(feature_size=1024, max_samples=num_points, cluster_size=CLUSTER_SIZE, output_dim=OUTPUT_DIM, groups=params["GROUPS"], gating=True, add_batch_norm=True, is_training=is_training) net = tf.reshape(x, [-1, 1024]) net = tf.nn.l2_normalize(net, 1) output = NetVLAD.forward(net) print(output) #normalize to have norm 1 output = tf.nn.l2_normalize(output,1) #output = tf.reshape(output,[batch_num_queries,num_pointclouds_per_query,OUTPUT_DIM]) output = tf.reshape(output,[batch_num_queries,num_pointclouds_per_query,OUTPUT_DIM], name="last_output") #return output return tf.nn.l2_normalize(tf.reshape(x, [-1, 1024]), 1), output def best_pos_distance(query, pos_vecs): with tf.name_scope('best_pos_distance') as scope: #batch = query.get_shape()[0] num_pos = pos_vecs.get_shape()[1] query_copies = tf.tile(query, [1,int(num_pos),1]) #shape num_pos x output_dim best_pos=tf.reduce_min(tf.reduce_sum(tf.squared_difference(pos_vecs,query_copies),2),1) #best_pos=tf.reduce_max(tf.reduce_sum(tf.squared_difference(pos_vecs,query_copies),2),1) return best_pos ##########Losses for PointNetVLAD########### #Returns average loss across the query tuples in a batch, loss in each is the average loss of the definite negatives against the best positive def triplet_loss(q_vec, pos_vecs, neg_vecs, margin): # ''', end_points, reg_weight=0.001): best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] query_copies = tf.tile(q_vec, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m=tf.fill([int(batch), int(num_neg)],margin) triplet_loss=tf.reduce_mean(tf.reduce_sum(tf.maximum(tf.add(m,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,query_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) return triplet_loss #Lazy variant def lazy_triplet_loss(q_vec, pos_vecs, neg_vecs, margin): best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] query_copies = tf.tile(q_vec, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m=tf.fill([int(batch), int(num_neg)],margin) triplet_loss=tf.reduce_mean(tf.reduce_max(tf.maximum(tf.add(m,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,query_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) return triplet_loss def softmargin_loss(q_vec, pos_vecs, neg_vecs): best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] query_copies = tf.tile(q_vec, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) ones=tf.fill([int(batch), int(num_neg)],1.0) soft_loss=tf.reduce_mean(tf.reduce_sum(tf.log(tf.exp(tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,query_copies),2)))+1.0),1)) return soft_los def lazy_softmargin_loss(q_vec, pos_vecs, neg_vecs): best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] query_copies = tf.tile(q_vec, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) ones=tf.fill([int(batch), int(num_neg)],1.0) soft_loss=tf.reduce_mean(tf.reduce_max(tf.log(tf.exp(tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,query_copies),2)))+1.0),1)) return soft_loss def quadruplet_loss_sm(q_vec, pos_vecs, neg_vecs, other_neg, m2): soft_loss= softmargin_loss(q_vec, pos_vecs, neg_vecs) best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] other_neg_copies = tf.tile(other_neg, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m2=tf.fill([int(batch), int(num_neg)],m2) second_loss=tf.reduce_mean(tf.reduce_sum(tf.maximum(tf.add(m2,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,other_neg_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) total_loss= soft_loss+second_loss return total_loss def lazy_quadruplet_loss_sm(q_vec, pos_vecs, neg_vecs, other_neg, m2): soft_loss= lazy_softmargin_loss(q_vec, pos_vecs, neg_vecs) best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] other_neg_copies = tf.tile(other_neg, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m2=tf.fill([int(batch), int(num_neg)],m2) second_loss=tf.reduce_mean(tf.reduce_max(tf.maximum(tf.add(m2,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,other_neg_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) total_loss= soft_loss+second_loss return total_loss def quadruplet_loss(q_vec, pos_vecs, neg_vecs, other_neg, m1, m2): trip_loss= triplet_loss(q_vec, pos_vecs, neg_vecs, m1) best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] other_neg_copies = tf.tile(other_neg, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m2=tf.fill([int(batch), int(num_neg)],m2) second_loss=tf.reduce_mean(tf.reduce_sum(tf.maximum(tf.add(m2,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,other_neg_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) total_loss= trip_loss+second_loss return total_loss def lazy_quadruplet_loss(q_vec, pos_vecs, neg_vecs, other_neg, m1, m2): trip_loss= lazy_triplet_loss(q_vec, pos_vecs, neg_vecs, m1) best_pos=best_pos_distance(q_vec, pos_vecs) num_neg = neg_vecs.get_shape()[1] batch = q_vec.get_shape()[0] other_neg_copies = tf.tile(other_neg, [1, int(num_neg),1]) best_pos=tf.tile(tf.reshape(best_pos,(-1,1)),[1, int(num_neg)]) m2=tf.fill([int(batch), int(num_neg)],m2) second_loss=tf.reduce_mean(tf.reduce_max(tf.maximum(tf.add(m2,tf.subtract(best_pos,tf.reduce_sum(tf.squared_difference(neg_vecs,other_neg_copies),2))), tf.zeros([int(batch), int(num_neg)])),1)) total_loss= trip_loss+second_loss return total_loss
from django.contrib.auth.models import User from rest_framework import serializers from mainstore.models import Catalog, Product # hyperlinkedmodelserializer #view_name='api:product-detail' class UserSerializer(serializers.HyperlinkedModelSerializer): url = serializers.HyperlinkedIdentityField(view_name='api:user-detail') products = serializers.HyperlinkedRelatedField(many=True, view_name='api:product-detail', read_only=True) class Meta: model = User fields = ('url', 'id', 'username', 'products') class CatalogSerializer(serializers.HyperlinkedModelSerializer): products = serializers.HyperlinkedRelatedField(many=True, view_name='api:product-detail', read_only=True) url = serializers.HyperlinkedIdentityField(view_name='api:catalog-detail') class Meta: model = Catalog fields = ('url', 'id', 'name', 'slug', 'products') # look class ProductSerializer(serializers.HyperlinkedModelSerializer): url = serializers.HyperlinkedIdentityField(view_name='api:product-detail') catalog_id = serializers.HyperlinkedRelatedField(queryset=Catalog.objects.all(), view_name='api:catalog-detail', read_only=False) catalog = serializers.HyperlinkedRelatedField(view_name='api:catalog-detail', read_only=True) owner = serializers.ReadOnlyField(source='owner.username') class Meta: model = Product fields = ( 'url', 'id', 'name', 'price', 'slug', 'catalog_id', 'catalog', 'quantity', 'available', 'is_new', 'color', 'size', 'owner') def create(self, validated_data): """ Create and return a new 'Product' instance, given the validated data. """ # get id only catalog_id = validated_data.pop('catalog_id').id print('ccccc id is :', catalog_id) # if 'catalog' in validated_data.keys(): # catalog_data = validated_data.pop('catalog') # print("catalog_id :",catalog_id) # catalog=CatalogSerializer.create(CatalogSerializer(),validated_data=catalog_data) # # product=Product.objects.create(catalog=catalog,catalog_id=catalog_id,**validated_data) # else: product = Product.objects.create(catalog_id=catalog_id, **validated_data) return product def update(self, instance, validated_data): """ Update and return an existing 'Product' instance, given the validated data """ try: catalog_id=validated_data.pop('catalog_id').id except catalog_id.DoesNotExit: raise instance.name = validated_data.get('name', instance.name) instance.price = validated_data.get('price', instance.price) instance.image = validated_data.get('image', instance.image) instance.slug = validated_data.get('slug', instance.slug) instance.color = validated_data.get('color', instance.color) instance.size = validated_data.get('size', instance.size) instance.available = validated_data.get('available', instance.available) instance.is_new = validated_data.get('is_new', instance.is_new) instance.quantity = validated_data.get('quantity', instance.quantity) instance.catalog_id = validated_data.get('catalog_id', instance.catalog_id) if str(catalog_id): pass # catalog=Catalog.objects.get(pk=catalog_id) # print("catalog for updating:",catalog) # catalog.url=catalog.get('url',catalog) # catalog.save() instance.save() return instance
var fs = require('fs') var path = require('path') var mkdirp = require('mkdirp') var mr = require('npm-registry-mock') var osenv = require('osenv') var rimraf = require('rimraf') var test = require('tap').test var common = require('../common-tap') var server var pkg = path.resolve(__dirname, 'prune') var cache = path.resolve(pkg, 'cache') var json = { name: 'prune-with-only-dev-deps', description: 'fixture', version: '0.0.1', main: 'index.js', devDependencies: { 'test-package-with-one-dep': '0.0.0', 'test-package': '0.0.0' } } var EXEC_OPTS = { cwd: pkg, npm_config_depth: 'Infinity' } test('setup', function (t) { cleanup() mkdirp.sync(cache) fs.writeFileSync( path.join(pkg, 'package.json'), JSON.stringify(json, null, 2) ) mr({ port: common.port }, function (er, s) { server = s t.end() }) }) test('npm install', function (t) { common.npm([ 'install', '--cache', cache, '--registry', common.registry, '--loglevel', 'silent', '--production', 'false' ], EXEC_OPTS, function (err, code, stdout, stderr) { t.ifErr(err, 'install finished successfully') t.notOk(code, 'exit ok') t.notOk(stderr, 'Should not get data on stderr: ' + stderr) t.end() }) }) function readdir (dir) { try { return fs.readdirSync(dir) } catch (ex) { if (ex.code === 'ENOENT') return [] throw ex } } test('verify installs', function (t) { var dirs = readdir(pkg + '/node_modules').sort() t.same(dirs, [ 'test-package', 'test-package-with-one-dep' ].sort()) t.end() }) test('npm prune', function (t) { common.npm([ 'prune', '--loglevel', 'silent', '--production', 'false' ], EXEC_OPTS, function (err, code, stdout, stderr) { t.ifErr(err, 'prune finished successfully') t.notOk(code, 'exit ok') t.notOk(stderr, 'Should not get data on stderr: ' + stderr) t.end() }) }) test('verify installs', function (t) { var dirs = readdir(pkg + '/node_modules').sort() t.same(dirs, [ 'test-package', 'test-package-with-one-dep' ]) t.end() }) test('npm prune', function (t) { common.npm([ 'prune', '--loglevel', 'silent', '--production', '--json' ], EXEC_OPTS, function (err, code, stdout, stderr) { t.ifErr(err, 'prune finished successfully') t.notOk(code, 'exit ok') t.like(JSON.parse(stdout), {removed: [{name: 'test-package'}, {name: 'test-package-with-one-dep'}]}) t.end() }) }) test('verify installs', function (t) { var dirs = readdir(pkg + '/node_modules').sort() t.same(dirs, []) t.end() }) test('cleanup', function (t) { server.close() cleanup() t.pass('cleaned up') t.end() }) function cleanup () { process.chdir(osenv.tmpdir()) rimraf.sync(pkg) }
const s3 = require("../config/aws"); module.exports = key => { var params = { Bucket: process.env.ResumeBucketName, Key: key }; s3.deleteObject(params, (err, data) => { if (err) { console.log(err); } console.log(`Resume ${key} deleted`); }); };
/* Copyright (C) 2002-2020 Free Software Foundation, Inc. This file is part of the GNU C Library. Contributed by Ulrich Drepper <drepper@redhat.com>, 2002. The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; if not, see <https://www.gnu.org/licenses/>. */ #include <pthread.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <sys/file.h> static pthread_mutex_t lock = PTHREAD_MUTEX_INITIALIZER; static int fd; static void * tf (void *arg) { if (flock (fd, LOCK_SH | LOCK_NB) != 0) { puts ("second flock failed"); exit (1); } pthread_mutex_unlock (&lock); return NULL; } static int do_test (void) { char tmp[] = "/tmp/tst-flock1-XXXXXX"; fd = mkstemp (tmp); if (fd == -1) { puts ("mkstemp failed"); exit (1); } unlink (tmp); write (fd, "foobar xyzzy", 12); if (flock (fd, LOCK_EX | LOCK_NB) != 0) { puts ("first flock failed"); exit (1); } pthread_mutex_lock (&lock); pthread_t th; if (pthread_create (&th, NULL, tf, NULL) != 0) { puts ("pthread_create failed"); exit (1); } pthread_mutex_lock (&lock); void *result; if (pthread_join (th, &result) != 0) { puts ("pthread_join failed"); exit (1); } close (fd); return result != NULL; } #define TEST_FUNCTION do_test () #include "../test-skeleton.c"
''' filename = urls.py author = LJH date = 2019/08/19 ''' from django.conf.urls import url from django.views.generic import TemplateView from . import views from django.views.static import serve from games.settings import MEDIA_ROOT urlpatterns = [ # 个人资料 url(r'^profile/$', views.ProfileView.as_view(), name='profile'), # media图片路由处理 url(r'^media/(?P<path>.*)$', serve, {"document_root": MEDIA_ROOT}), # 修改密码 url(r'^change_passwd/$', views.ChangePasswdView.as_view(), name='change_passwd'), # 找回密码邮箱路由 url(r'password/forget/$', views.PasswordForget.as_view(), name="password_forget"), # 重置密码 url(r'password/reset/(\w+)/$', views.PasswordReset.as_view(), name="password_reset"), # 联系我 url(r'password/callme',views.Callme,name="callme") ]
import os import yaml import hashlib import importlib import sys import argparse from time import sleep from pathlib import Path from pymongo import MongoClient from feed import mainCaller from database_connector import mongo_update from front_end_scorer import pre_scorer try: p = Path(__file__).parents[1] sys.path.append(str(p)) except IndexError: print("[!] Error: Use full path of the file") sys.exit(1) def check(): print("[+] Parsing settings.yaml to get feeds") fil = os.environ.get("CONFIG_FILE", default="settings.yaml") db = os.environ.get("MONGO_DB", default="misp_feed") client = MongoClient( "mongodb://root:password@localhost:27017/?authSource=admin" ) all_dbs = client.list_database_names() if db in all_dbs: database = client[db] filename=str(Path(__file__).parents[1])+"/feed_ingestor/"+fil old_file = database.file.find_one({"filename": filename}) if str(old_file["hash"]) != str( hashlib.md5(open(str(filename), "rb").read()).hexdigest() ): feed_updater = mainCaller() feed_list, event_list = feed_updater.update_misp( filename=str(filename), include_event_tags=False ) # We add try except blocks here so that if function errors out,we need to basically pass. Next minute it will be called anyways and by then probably the feed must be indexed in misp as an event with all attributes and cached feed_list, event_list = feed_updater.update_file( filename=str(filename) ) # no feed_list and event_list is passed (basically errors out, then this is not called) mongo_update.update_feed( feed_list=feed_list, event_list=event_list ) # if no feed_list and event_list is passed (basically errors out, then this is not called). Error out means the event does not have all attributes. old_file["hash"] = str( hashlib.md5(open(str(filename), "rb").read()).hexdigest() ) database.file.replace_one( filter={"_id": old_file["_id"]}, replacement=old_file ) else: feed_updater = mainCaller() feed_list, event_list = feed_updater.update_file(filename=str(filename),include_event_tags=False) mongo_update.update_feed(feed_list=feed_list, event_list=event_list) old_file["hash"] = str( hashlib.md5(open(str(filename), "rb").read()).hexdigest() ) database.file.replace_one( filter={"_id": old_file["_id"]}, replacement=old_file ) else: database = client[db] filename=str(Path(__file__).parents[1])+"/feed_ingestor/"+fil database.file.insert_one( { "filename": filename, "hash": str(hashlib.md5(open(str(filename), "rb").read()).hexdigest()), } ) feed_updater = mainCaller() feed_list, event_list = feed_updater.update_misp(filename=str(filename),include_event_tags=False) mongo_update.update_feed(feed_list=feed_list, event_list=event_list) feed_list = feed_updater.update_file(filename=str(filename)) sleep(240) ip_list = feed_updater.update_attributes() mongo_update.add_ip( ip_list ) # Pulls all IPs and their properties into mongodb current. print("[+] Scoring engine started") print("[+] Loading complete") def add_ip(): print("[+] IP address being added to db") ip_updater = mainCaller() ip_list = ip_updater.update_attributes() mongo_update.add_ip(ip_list) print("[+] Scoring engine started") pre_scorer.score_attributes() print("[+] Loading Complete") if __name__=='__main__': arg = argparse.ArgumentParser(description='IP reputation program') arg.add_argument('-s', const='start',required=False, help='Required only for the first run',nargs='?') config = open(f"{p}/config.yaml") parsed_yaml_file = yaml.load(config, Loader=yaml.FullLoader) os.environ['MISP_URL'] = parsed_yaml_file['credentials']['MISP_URL'] os.environ['MISP_KEY'] = parsed_yaml_file['credentials']['MISP_KEY'] config.close() args = arg.parse_args() if args.s != None: check() else: add_ip()
import asyncio import copy import json import logging import ssl import time from decimal import Decimal from typing import Any, Dict, List, Optional import aiohttp from hummingbot.client.config.fee_overrides_config_map import fee_overrides_config_map from hummingbot.client.config.global_config_map import global_config_map from hummingbot.client.settings import GATEAWAY_CA_CERT_PATH, GATEAWAY_CLIENT_CERT_PATH, GATEAWAY_CLIENT_KEY_PATH from hummingbot.connector.connector_base import ConnectorBase from hummingbot.connector.connector.balancer.balancer_in_flight_order import BalancerInFlightOrder from hummingbot.core.network_iterator import NetworkStatus from hummingbot.core.data_type.cancellation_result import CancellationResult from hummingbot.core.data_type.limit_order import LimitOrder from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount from hummingbot.core.event.events import ( BuyOrderCompletedEvent, BuyOrderCreatedEvent, MarketEvent, MarketOrderFailureEvent, OrderFilledEvent, OrderType, SellOrderCompletedEvent, SellOrderCreatedEvent, TradeType ) from hummingbot.core.utils import async_ttl_cache from hummingbot.core.utils.async_utils import safe_ensure_future, safe_gather from hummingbot.core.utils.ethereum import check_transaction_exceptions, fetch_trading_pairs from hummingbot.core.utils.tracking_nonce import get_tracking_nonce from hummingbot.logger import HummingbotLogger from hummingbot.logger.struct_logger import METRICS_LOG_LEVEL s_logger = None s_decimal_0 = Decimal("0") s_decimal_NaN = Decimal("nan") logging.basicConfig(level=METRICS_LOG_LEVEL) class BalancerConnector(ConnectorBase): """ BalancerConnector connects with balancer gateway APIs and provides pricing, user account tracking and trading functionality. """ API_CALL_TIMEOUT = 10.0 POLL_INTERVAL = 1.0 UPDATE_BALANCE_INTERVAL = 30.0 @classmethod def logger(cls) -> HummingbotLogger: global s_logger if s_logger is None: s_logger = logging.getLogger(__name__) return s_logger def __init__(self, trading_pairs: List[str], wallet_private_key: str, ethereum_rpc_url: str, trading_required: bool = True ): """ :param trading_pairs: a list of trading pairs :param wallet_private_key: a private key for eth wallet :param ethereum_rpc_url: this is usually infura RPC URL :param trading_required: Whether actual trading is needed. """ super().__init__() self._trading_pairs = trading_pairs self._tokens = set() for trading_pair in trading_pairs: self._tokens.update(set(trading_pair.split("-"))) self._wallet_private_key = wallet_private_key self._ethereum_rpc_url = ethereum_rpc_url self._trading_required = trading_required self._ev_loop = asyncio.get_event_loop() self._shared_client = None self._last_poll_timestamp = 0.0 self._last_balance_poll_timestamp = time.time() self._last_est_gas_cost_reported = 0 self._in_flight_orders = {} self._allowances = {} self._status_polling_task = None self._auto_approve_task = None self._initiate_pool_task = None self._initiate_pool_status = None self._real_time_balance_update = False self._poll_notifier = None @property def name(self): return "balancer" @staticmethod async def fetch_trading_pairs() -> List[str]: return await fetch_trading_pairs() @property def limit_orders(self) -> List[LimitOrder]: return [ in_flight_order.to_limit_order() for in_flight_order in self._in_flight_orders.values() ] async def initiate_pool(self) -> str: """ Initiate connector and cache pools """ try: self.logger().info(f"Initializing Balancer connector and caching pools for {self._trading_pairs}.") resp = await self._api_request("get", "eth/balancer/start", {"pairs": json.dumps(self._trading_pairs)}) status = bool(str(resp["success"])) if bool(str(resp["success"])): self._initiate_pool_status = status except asyncio.CancelledError: raise except Exception as e: self.logger().network( f"Error initializing {self._trading_pairs} swap pools", exc_info=True, app_warning_msg=str(e) ) async def auto_approve(self): """ Automatically approves Balancer contract as a spender for token in trading pairs. It first checks if there are any already approved amount (allowance) """ self.logger().info("Checking for allowances...") self._allowances = await self.get_allowances() for token, amount in self._allowances.items(): if amount <= s_decimal_0: amount_approved = await self.approve_balancer_spender(token) if amount_approved > 0: self._allowances[token] = amount_approved await asyncio.sleep(2) else: break async def approve_balancer_spender(self, token_symbol: str) -> Decimal: """ Approves Balancer contract as a spender for a token. :param token_symbol: token to approve. """ resp = await self._api_request("post", "eth/approve", {"token": token_symbol, "connector": self.name}) amount_approved = Decimal(str(resp["amount"])) if amount_approved > 0: self.logger().info(f"Approved Balancer spender contract for {token_symbol}.") else: self.logger().info(f"Balancer spender contract approval failed on {token_symbol}.") return amount_approved async def get_allowances(self) -> Dict[str, Decimal]: """ Retrieves allowances for token in trading_pairs :return: A dictionary of token and its allowance (how much Balancer can spend). """ ret_val = {} resp = await self._api_request("post", "eth/allowances", {"tokenList": "[" + (",".join(['"' + t + '"' for t in self._tokens])) + "]", "connector": self.name}) for token, amount in resp["approvals"].items(): ret_val[token] = Decimal(str(amount)) return ret_val @async_ttl_cache(ttl=5, maxsize=10) async def get_quote_price(self, trading_pair: str, is_buy: bool, amount: Decimal) -> Optional[Decimal]: """ Retrieves a quote price. :param trading_pair: The market trading pair :param is_buy: True for an intention to buy, False for an intention to sell :param amount: The amount required (in base token unit) :return: The quote price. """ try: base, quote = trading_pair.split("-") side = "buy" if is_buy else "sell" resp = await self._api_request("post", "eth/balancer/price", {"base": base, "quote": quote, "amount": amount, "side": side.upper()}) required_items = ["price", "gasLimit", "gasPrice", "gasCost"] if any(item not in resp.keys() for item in required_items): if "info" in resp.keys(): self.logger().info(f"Unable to get price. {resp['info']}") else: self.logger().info(f"Missing data from price result. Incomplete return result for ({resp.keys()})") else: gas_limit = resp["gasLimit"] gas_price = resp["gasPrice"] gas_cost = resp["gasCost"] price = resp["price"] account_standing = { "allowances": self._allowances, "balances": self._account_balances, "base": base, "quote": quote, "amount": amount, "side": side, "gas_limit": gas_limit, "gas_price": gas_price, "gas_cost": gas_cost, "price": price, "swaps": len(resp["swaps"]) } exceptions = check_transaction_exceptions(account_standing) for index in range(len(exceptions)): self.logger().info(f"Warning! [{index+1}/{len(exceptions)}] {side} order - {exceptions[index]}") if price is not None and len(exceptions) == 0: fee_overrides_config_map["balancer_maker_fixed_fees"].value = [ TokenAmount("ETH", Decimal(str(gas_cost))) ] fee_overrides_config_map["balancer_taker_fixed_fees"].value = [ TokenAmount("ETH", Decimal(str(gas_cost))) ] return Decimal(str(price)) except asyncio.CancelledError: raise except Exception as e: self.logger().network( f"Error getting quote price for {trading_pair} {side} order for {amount} amount.", exc_info=True, app_warning_msg=str(e) ) async def get_order_price(self, trading_pair: str, is_buy: bool, amount: Decimal) -> Decimal: """ This is simply the quote price """ return await self.get_quote_price(trading_pair, is_buy, amount) def buy(self, trading_pair: str, amount: Decimal, order_type: OrderType, price: Decimal) -> str: """ Buys an amount of base token for a given price (or cheaper). :param trading_pair: The market trading pair :param amount: The order amount (in base token unit) :param order_type: Any order type is fine, not needed for this. :param price: The maximum price for the order. :return: A newly created order id (internal). """ return self.place_order(True, trading_pair, amount, price) def sell(self, trading_pair: str, amount: Decimal, order_type: OrderType, price: Decimal) -> str: """ Sells an amount of base token for a given price (or at a higher price). :param trading_pair: The market trading pair :param amount: The order amount (in base token unit) :param order_type: Any order type is fine, not needed for this. :param price: The minimum price for the order. :return: A newly created order id (internal). """ return self.place_order(False, trading_pair, amount, price) def place_order(self, is_buy: bool, trading_pair: str, amount: Decimal, price: Decimal) -> str: """ Places an order. :param is_buy: True for buy order :param trading_pair: The market trading pair :param amount: The order amount (in base token unit) :param price: The minimum price for the order. :return: A newly created order id (internal). """ side = TradeType.BUY if is_buy else TradeType.SELL order_id = f"{side.name.lower()}-{trading_pair}-{get_tracking_nonce()}" safe_ensure_future(self._create_order(side, order_id, trading_pair, amount, price)) return order_id async def _create_order(self, trade_type: TradeType, order_id: str, trading_pair: str, amount: Decimal, price: Decimal): """ Calls buy or sell API end point to place an order, starts tracking the order and triggers relevant order events. :param trade_type: BUY or SELL :param order_id: Internal order id (also called client_order_id) :param trading_pair: The market to place order :param amount: The order amount (in base token value) :param price: The order price """ amount = self.quantize_order_amount(trading_pair, amount) price = self.quantize_order_price(trading_pair, price) base, quote = trading_pair.split("-") api_params = {"base": base, "quote": quote, "side": trade_type.name.upper(), "amount": str(amount), "limitPrice": str(price), } try: order_result = await self._api_request("post", "eth/balancer/trade", api_params) hash = order_result.get("txHash") gas_price = order_result.get("gasPrice") gas_limit = order_result.get("gasLimit") gas_cost = order_result.get("gasCost") self.start_tracking_order(order_id, None, trading_pair, trade_type, price, amount, gas_price) tracked_order = self._in_flight_orders.get(order_id) # update onchain balance await self._update_balances() if tracked_order is not None: self.logger().info(f"Created {trade_type.name} order {order_id} txHash: {hash} " f"for {amount} {trading_pair}. Estimated Gas Cost: {gas_cost} ETH " f" (gas limit: {gas_limit}, gas price: {gas_price})") tracked_order.update_exchange_order_id(hash) tracked_order.gas_price = gas_price if hash is not None: tracked_order.fee_asset = "ETH" tracked_order.executed_amount_base = amount tracked_order.executed_amount_quote = amount * price event_tag = MarketEvent.BuyOrderCreated if trade_type is TradeType.BUY else MarketEvent.SellOrderCreated event_class = BuyOrderCreatedEvent if trade_type is TradeType.BUY else SellOrderCreatedEvent self.trigger_event(event_tag, event_class( self.current_timestamp, OrderType.LIMIT, trading_pair, amount, price, order_id, tracked_order.creation_timestamp, hash)) else: self.trigger_event(MarketEvent.OrderFailure, MarketOrderFailureEvent(self.current_timestamp, order_id, OrderType.LIMIT)) except asyncio.CancelledError: raise except Exception as e: self.stop_tracking_order(order_id) self.logger().network( f"Error submitting {trade_type.name} order to Balancer for " f"{amount} {trading_pair} " f"{price}.", exc_info=True, app_warning_msg=str(e) ) self.trigger_event(MarketEvent.OrderFailure, MarketOrderFailureEvent(self.current_timestamp, order_id, OrderType.LIMIT)) def start_tracking_order(self, order_id: str, exchange_order_id: str, trading_pair: str, trade_type: TradeType, price: Decimal, amount: Decimal, gas_price: Decimal): """ Starts tracking an order by simply adding it into _in_flight_orders dictionary. """ self._in_flight_orders[order_id] = BalancerInFlightOrder( client_order_id=order_id, exchange_order_id=exchange_order_id, trading_pair=trading_pair, order_type=OrderType.LIMIT, trade_type=trade_type, price=price, amount=amount, gas_price=gas_price, creation_timestamp=self.current_timestamp ) def stop_tracking_order(self, order_id: str): """ Stops tracking an order by simply removing it from _in_flight_orders dictionary. """ if order_id in self._in_flight_orders: del self._in_flight_orders[order_id] async def _update_order_status(self): """ Calls REST API to get status update for each in-flight order. """ if len(self._in_flight_orders) > 0: tracked_orders = list(self._in_flight_orders.values()) tasks = [] for tracked_order in tracked_orders: order_id = await tracked_order.get_exchange_order_id() tasks.append(self._api_request("post", "eth/poll", {"txHash": order_id})) update_results = await safe_gather(*tasks, return_exceptions=True) for update_result in update_results: self.logger().info(f"Polling for order status updates of {len(tasks)} orders.") if isinstance(update_result, Exception): raise update_result if "txHash" not in update_result: self.logger().info(f"_update_order_status txHash not in resp: {update_result}") continue if update_result["confirmed"] is True: if update_result["receipt"]["status"] == 1: gas_used = update_result["receipt"]["gasUsed"] gas_price = tracked_order.gas_price fee = Decimal(str(gas_used)) * Decimal(str(gas_price)) / Decimal(str(1e9)) self.trigger_event( MarketEvent.OrderFilled, OrderFilledEvent( self.current_timestamp, tracked_order.client_order_id, tracked_order.trading_pair, tracked_order.trade_type, tracked_order.order_type, Decimal(str(tracked_order.price)), Decimal(str(tracked_order.amount)), AddedToCostTradeFee( flat_fees=[TokenAmount(tracked_order.fee_asset, Decimal(str(fee)))] ), exchange_trade_id=order_id ) ) tracked_order.last_state = "FILLED" self.logger().info(f"The {tracked_order.trade_type.name} order " f"{tracked_order.client_order_id} has completed " f"according to order status API.") event_tag = MarketEvent.BuyOrderCompleted if tracked_order.trade_type is TradeType.BUY \ else MarketEvent.SellOrderCompleted event_class = BuyOrderCompletedEvent if tracked_order.trade_type is TradeType.BUY \ else SellOrderCompletedEvent self.trigger_event(event_tag, event_class(self.current_timestamp, tracked_order.client_order_id, tracked_order.base_asset, tracked_order.quote_asset, tracked_order.fee_asset, tracked_order.executed_amount_base, tracked_order.executed_amount_quote, float(fee), tracked_order.order_type)) self.stop_tracking_order(tracked_order.client_order_id) else: self.logger().info( f"The market order {tracked_order.client_order_id} has failed according to order status API. ") self.trigger_event(MarketEvent.OrderFailure, MarketOrderFailureEvent( self.current_timestamp, tracked_order.client_order_id, tracked_order.order_type )) self.stop_tracking_order(tracked_order.client_order_id) def get_taker_order_type(self): return OrderType.LIMIT def get_order_price_quantum(self, trading_pair: str, price: Decimal) -> Decimal: return Decimal("1e-15") def get_order_size_quantum(self, trading_pair: str, order_size: Decimal) -> Decimal: return Decimal("1e-15") @property def ready(self): return all(self.status_dict.values()) def has_allowances(self) -> bool: """ Checks if all tokens have allowance (an amount approved) """ return len(self._allowances.values()) == len(self._tokens) and \ all(amount > s_decimal_0 for amount in self._allowances.values()) @property def status_dict(self) -> Dict[str, bool]: return { "account_balance": len(self._account_balances) > 0 if self._trading_required else True, "allowances": self.has_allowances() if self._trading_required else True } async def start_network(self): if self._trading_required: self._status_polling_task = safe_ensure_future(self._status_polling_loop()) self._initiate_pool_task = safe_ensure_future(self.initiate_pool()) self._auto_approve_task = safe_ensure_future(self.auto_approve()) async def stop_network(self): if self._status_polling_task is not None: self._status_polling_task.cancel() self._status_polling_task = None if self._auto_approve_task is not None: self._auto_approve_task.cancel() self._auto_approve_task = None if self._initiate_pool_task is not None: self._initiate_pool_task.cancel() self._initiate_pool_task = None async def check_network(self) -> NetworkStatus: try: response = await self._api_request("get", "api") if response["status"] != "ok": raise Exception(f"Error connecting to Gateway API. HTTP status is {response.status}.") except asyncio.CancelledError: raise except Exception: return NetworkStatus.NOT_CONNECTED return NetworkStatus.CONNECTED def tick(self, timestamp: float): """ Is called automatically by the clock for each clock's tick (1 second by default). It checks if status polling task is due for execution. """ if time.time() - self._last_poll_timestamp > self.POLL_INTERVAL: if self._poll_notifier is not None and not self._poll_notifier.is_set(): self._poll_notifier.set() async def _status_polling_loop(self): while True: try: self._poll_notifier = asyncio.Event() await self._poll_notifier.wait() await safe_gather( self._update_balances(on_interval = True), self._update_order_status(), ) self._last_poll_timestamp = self.current_timestamp except asyncio.CancelledError: raise except Exception as e: self.logger().error(str(e), exc_info=True) self.logger().network("Unexpected error while fetching account updates.", exc_info=True, app_warning_msg="Could not fetch balances from Gateway API.") await asyncio.sleep(0.5) async def _update_balances(self, on_interval = False): """ Calls Eth API to update total and available balances. """ last_tick = self._last_balance_poll_timestamp current_tick = self.current_timestamp if not on_interval or (current_tick - last_tick) > self.UPDATE_BALANCE_INTERVAL: self._last_balance_poll_timestamp = current_tick local_asset_names = set(self._account_balances.keys()) remote_asset_names = set() resp_json = await self._api_request("post", "eth/balances", {"tokenList": "[" + (",".join(['"' + t + '"' for t in self._tokens])) + "]"}) for token, bal in resp_json["balances"].items(): self._account_available_balances[token] = Decimal(str(bal)) self._account_balances[token] = Decimal(str(bal)) remote_asset_names.add(token) asset_names_to_remove = local_asset_names.difference(remote_asset_names) for asset_name in asset_names_to_remove: del self._account_available_balances[asset_name] del self._account_balances[asset_name] self._in_flight_orders_snapshot = {k: copy.copy(v) for k, v in self._in_flight_orders.items()} self._in_flight_orders_snapshot_timestamp = self.current_timestamp async def _http_client(self) -> aiohttp.ClientSession: """ :returns Shared client session instance """ if self._shared_client is None: ssl_ctx = ssl.create_default_context(cafile=GATEAWAY_CA_CERT_PATH) ssl_ctx.load_cert_chain(GATEAWAY_CLIENT_CERT_PATH, GATEAWAY_CLIENT_KEY_PATH) conn = aiohttp.TCPConnector(ssl_context=ssl_ctx) self._shared_client = aiohttp.ClientSession(connector=conn) return self._shared_client async def _api_request(self, method: str, path_url: str, params: Dict[str, Any] = {}) -> Dict[str, Any]: """ Sends an aiohttp request and waits for a response. :param method: The HTTP method, e.g. get or post :param path_url: The path url or the API end point :param params: A dictionary of required params for the end point :returns A response in json format. """ base_url = f"https://{global_config_map['gateway_api_host'].value}:" \ f"{global_config_map['gateway_api_port'].value}" url = f"{base_url}/{path_url}" client = await self._http_client() if method == "get": if len(params) > 0: response = await client.get(url, params=params) else: response = await client.get(url) elif method == "post": params["privateKey"] = self._wallet_private_key if params["privateKey"][:2] != "0x": params["privateKey"] = "0x" + params["privateKey"] response = await client.post(url, data=params) parsed_response = json.loads(await response.text()) if response.status != 200: err_msg = "" if "error" in parsed_response: err_msg = f" Message: {parsed_response['error']}" raise IOError(f"Error fetching data from {url}. HTTP status is {response.status}.{err_msg}") if "error" in parsed_response: raise Exception(f"Error: {parsed_response['error']} {parsed_response['message']}") return parsed_response async def cancel_all(self, timeout_seconds: float) -> List[CancellationResult]: return [] @property def in_flight_orders(self) -> Dict[str, BalancerInFlightOrder]: return self._in_flight_orders
Mac OS X  2 R
import datetime from decimal import Decimal from unittest.mock import Mock from django.template import TemplateDoesNotExist from django.test import TestCase from haystack.fields import * from test_haystack.core.models import ( ManyToManyLeftSideModel, ManyToManyRightSideModel, MockModel, MockTag, OneToManyLeftSideModel, OneToManyRightSideModel, ) class SearchFieldTestCase(TestCase): def test_get_iterable_objects_with_none(self): self.assertEqual([], SearchField.get_iterable_objects(None)) def test_get_iterable_objects_with_single_non_iterable_object(self): obj = object() expected = [obj] self.assertEqual(expected, SearchField.get_iterable_objects(obj)) def test_get_iterable_objects_with_list_stays_the_same(self): objects = [object(), object()] self.assertIs(objects, SearchField.get_iterable_objects(objects)) def test_get_iterable_objects_with_django_manytomany_rel(self): left_model = ManyToManyLeftSideModel.objects.create() right_model_1 = ManyToManyRightSideModel.objects.create(name="Right side 1") right_model_2 = ManyToManyRightSideModel.objects.create() left_model.related_models.add(right_model_1) left_model.related_models.add(right_model_2) result = SearchField.get_iterable_objects(left_model.related_models) self.assertTrue(right_model_1 in result) self.assertTrue(right_model_2 in result) def test_get_iterable_objects_with_django_onetomany_rel(self): left_model = OneToManyLeftSideModel.objects.create() right_model_1 = OneToManyRightSideModel.objects.create(left_side=left_model) right_model_2 = OneToManyRightSideModel.objects.create(left_side=left_model) result = SearchField.get_iterable_objects(left_model.right_side) self.assertTrue(right_model_1 in result) self.assertTrue(right_model_2 in result) def test_resolve_attributes_lookup_with_field_that_points_to_none(self): related = Mock(spec=["none_field"], none_field=None) obj = Mock(spec=["related"], related=[related]) field = SearchField(null=False) self.assertRaises( SearchFieldError, field.resolve_attributes_lookup, [obj], ["related", "none_field"], ) def test_resolve_attributes_lookup_with_field_that_points_to_none_but_is_allowed_to_be_null( self, ): related = Mock(spec=["none_field"], none_field=None) obj = Mock(spec=["related"], related=[related]) field = SearchField(null=True) self.assertEqual( [None], field.resolve_attributes_lookup([obj], ["related", "none_field"]) ) def test_resolve_attributes_lookup_with_field_that_points_to_none_but_has_default( self, ): related = Mock(spec=["none_field"], none_field=None) obj = Mock(spec=["related"], related=[related]) field = SearchField(default="Default value") self.assertEqual( ["Default value"], field.resolve_attributes_lookup([obj], ["related", "none_field"]), ) def test_resolve_attributes_lookup_with_deep_relationship(self): related_lvl_2 = Mock(spec=["value"], value=1) related = Mock(spec=["related"], related=[related_lvl_2, related_lvl_2]) obj = Mock(spec=["related"], related=[related]) field = SearchField() self.assertEqual( [1, 1], field.resolve_attributes_lookup([obj], ["related", "related", "value"]), ) def test_resolve_attributes_lookup_with_deep_relationship_through_m2m(self): # obj.related2m: # - related1 # .deep1 # .value = 1 # - related2 # .deep2 # .value = 2 # - related3 # .deep3 # .value = 3 values = [1, 2, 3] deep1, deep2, deep3 = (Mock(spec=["value"], value=x) for x in values) related1, related2, related3 = ( Mock(spec=["related"], related=x) for x in (deep1, deep2, deep3) ) m2m_rel = Mock( spec=["__iter__"], __iter__=lambda self: iter([related1, related2, related3]), ) obj = Mock(spec=["related_m2m"], related_m2m=m2m_rel) field = SearchField() self.assertEqual( values, field.resolve_attributes_lookup([obj], ["related_m2m", "related", "value"]), ) def test_prepare_with_null_django_onetomany_rel(self): left_model = OneToManyLeftSideModel.objects.create() field = SearchField(model_attr="right_side__pk", null=True) result = field.prepare(left_model) self.assertEqual(None, result) class CharFieldTestCase(TestCase): def test_init(self): try: foo = CharField(model_attr="foo") except: self.fail() def test_prepare(self): mock = MockModel() mock.user = "daniel" author = CharField(model_attr="user") self.assertEqual(author.prepare(mock), "daniel") # Do a lookup through the relation. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_name = CharField(model_attr="tag__name") self.assertEqual(tag_name.prepare(mock), "primary") # Use the default. mock = MockModel() author = CharField(model_attr="author", default="") self.assertEqual(author.prepare(mock), "") # Simulate failed lookups. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_slug = CharField(model_attr="tag__slug") self.assertRaises(SearchFieldError, tag_slug.prepare, mock) # Simulate failed lookups and ensure we don't get a UnicodeDecodeError # in the error message. mock_tag = MockTag.objects.create(name="básico") mock = MockModel() mock.tag = mock_tag tag_slug = CharField(model_attr="tag__slug") self.assertRaises(SearchFieldError, tag_slug.prepare, mock) # Simulate default='foo'. mock = MockModel() default = CharField(default="foo") self.assertEqual(default.prepare(mock), "foo") # Simulate null=True. mock = MockModel() empty = CharField(null=True) self.assertEqual(empty.prepare(mock), None) mock = MockModel() mock.user = None author = CharField(model_attr="user", null=True) self.assertEqual(author.prepare(mock), None) class NgramFieldTestCase(TestCase): def test_init(self): try: foo = NgramField(model_attr="foo") except: self.fail() self.assertRaises(SearchFieldError, NgramField, faceted=True) def test_prepare(self): mock = MockModel() mock.user = "daniel" author = NgramField(model_attr="user") self.assertEqual(author.prepare(mock), "daniel") # Do a lookup through the relation. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_name = NgramField(model_attr="tag__name") self.assertEqual(tag_name.prepare(mock), "primary") # Use the default. mock = MockModel() author = NgramField(model_attr="author", default="") self.assertEqual(author.prepare(mock), "") # Simulate failed lookups. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_slug = NgramField(model_attr="tag__slug") self.assertRaises(SearchFieldError, tag_slug.prepare, mock) # Simulate default='foo'. mock = MockModel() default = NgramField(default="foo") self.assertEqual(default.prepare(mock), "foo") # Simulate null=True. mock = MockModel() empty = NgramField(null=True) self.assertEqual(empty.prepare(mock), None) mock = MockModel() mock.user = None author = NgramField(model_attr="user", null=True) self.assertEqual(author.prepare(mock), None) class EdgeNgramFieldTestCase(TestCase): def test_init(self): try: foo = EdgeNgramField(model_attr="foo") except: self.fail() self.assertRaises(SearchFieldError, EdgeNgramField, faceted=True) def test_prepare(self): mock = MockModel() mock.user = "daniel" author = EdgeNgramField(model_attr="user") self.assertEqual(author.prepare(mock), "daniel") # Do a lookup through the relation. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_name = EdgeNgramField(model_attr="tag__name") self.assertEqual(tag_name.prepare(mock), "primary") # Use the default. mock = MockModel() author = EdgeNgramField(model_attr="author", default="") self.assertEqual(author.prepare(mock), "") # Simulate failed lookups. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_slug = EdgeNgramField(model_attr="tag__slug") self.assertRaises(SearchFieldError, tag_slug.prepare, mock) # Simulate default='foo'. mock = MockModel() default = EdgeNgramField(default="foo") self.assertEqual(default.prepare(mock), "foo") # Simulate null=True. mock = MockModel() empty = EdgeNgramField(null=True) self.assertEqual(empty.prepare(mock), None) mock = MockModel() mock.user = None author = EdgeNgramField(model_attr="user", null=True) self.assertEqual(author.prepare(mock), None) class IntegerFieldTestCase(TestCase): def test_init(self): try: foo = IntegerField(model_attr="foo") except: self.fail() def test_prepare(self): mock = MockModel() mock.pk = 1 pk = IntegerField(model_attr="pk") self.assertEqual(pk.prepare(mock), 1) # Simulate failed lookups. mock_tag = MockTag.objects.create(name="primary") mock = MockModel() mock.tag = mock_tag tag_count = IntegerField(model_attr="tag__count") self.assertRaises(SearchFieldError, tag_count.prepare, mock) # Simulate default=1. mock = MockModel() default = IntegerField(default=1) self.assertEqual(default.prepare(mock), 1) # Simulate null=True. mock = MockModel() pk_none = IntegerField(model_attr="pk", null=True) self.assertEqual(pk_none.prepare(mock), None) class FloatFieldTestCase(TestCase): def test_init(self): try: foo = FloatField(model_attr="foo") except: self.fail() def test_prepare(self): mock = MockModel() mock.floaty = 12.5 floaty = FloatField(model_attr="floaty") self.assertEqual(floaty.prepare(mock), 12.5) # Simulate default=1.5. mock = MockModel() default = FloatField(default=1.5) self.assertEqual(default.prepare(mock), 1.5) # Simulate null=True. mock = MockModel() floaty_none = FloatField(null=True) self.assertEqual(floaty_none.prepare(mock), None) class DecimalFieldTestCase(TestCase): def test_init(self): try: foo = DecimalField(model_attr="foo") except: self.fail() def test_prepare(self): mock = MockModel() mock.floaty = Decimal("12.5") floaty = DecimalField(model_attr="floaty") self.assertEqual(floaty.prepare(mock), "12.5") # Simulate default=1.5. mock = MockModel() default = DecimalField(default="1.5") self.assertEqual(default.prepare(mock), "1.5") # Simulate null=True. mock = MockModel() floaty_none = DecimalField(null=True) self.assertEqual(floaty_none.prepare(mock), None) class BooleanFieldTestCase(TestCase): def test_init(self): try: foo = BooleanField(model_attr="foo") except: self.fail() def test_prepare(self): mock = MockModel() mock.active = True is_active = BooleanField(model_attr="active") self.assertEqual(is_active.prepare(mock), True) # Simulate default=True. mock = MockModel() default = BooleanField(default=True) self.assertEqual(default.prepare(mock), True) # Simulate null=True. mock = MockModel() booly_none = BooleanField(null=True) self.assertEqual(booly_none.prepare(mock), None) class DateFieldTestCase(TestCase): def test_init(self): try: foo = DateField(model_attr="foo") except: self.fail() def test_convert(self): pub_date = DateField() self.assertEqual(pub_date.convert("2016-02-16"), datetime.date(2016, 2, 16)) def test_prepare(self): mock = MockModel() mock.pub_date = datetime.date(2009, 2, 13) pub_date = DateField(model_attr="pub_date") self.assertEqual(pub_date.prepare(mock), datetime.date(2009, 2, 13)) # Simulate default=datetime.date(2000, 1, 1). mock = MockModel() default = DateField(default=datetime.date(2000, 1, 1)) self.assertEqual(default.prepare(mock), datetime.date(2000, 1, 1)) def test_prepare_from_string(self): mock = MockModel() mock.pub_date = datetime.date(2016, 2, 16) pub_date = DateField(model_attr="pub_date") self.assertEqual(pub_date.prepare(mock), datetime.date(2016, 2, 16)) class DateTimeFieldTestCase(TestCase): def test_init(self): try: foo = DateTimeField(model_attr="foo") except: self.fail() def test_convert(self): pub_date = DateTimeField() self.assertEqual( pub_date.convert("2016-02-16T10:02:03"), datetime.datetime(2016, 2, 16, 10, 2, 3), ) def test_prepare(self): mock = MockModel() mock.pub_date = datetime.datetime(2009, 2, 13, 10, 1, 0) pub_date = DateTimeField(model_attr="pub_date") self.assertEqual( pub_date.prepare(mock), datetime.datetime(2009, 2, 13, 10, 1, 0) ) # Simulate default=datetime.datetime(2009, 2, 13, 10, 01, 00). mock = MockModel() default = DateTimeField(default=datetime.datetime(2000, 1, 1, 0, 0, 0)) self.assertEqual(default.prepare(mock), datetime.datetime(2000, 1, 1, 0, 0, 0)) def test_prepare_from_string(self): mock = MockModel() mock.pub_date = "2016-02-16T10:01:02Z" pub_date = DateTimeField(model_attr="pub_date") self.assertEqual( pub_date.prepare(mock), datetime.datetime(2016, 2, 16, 10, 1, 2) ) class MultiValueFieldTestCase(TestCase): def test_init(self): try: foo = MultiValueField(model_attr="foo") except: self.fail() self.assertRaises(SearchFieldError, MultiValueField, use_template=True) def test_prepare(self): mock = MockModel() mock.sites = ["3", "4", "5"] sites = MultiValueField(model_attr="sites") self.assertEqual(sites.prepare(mock), ["3", "4", "5"]) # Simulate default=[1]. mock = MockModel() default = MultiValueField(default=[1]) self.assertEqual(default.prepare(mock), [1]) # Simulate null=True. mock = MockModel() multy_none = MultiValueField(null=True) self.assertEqual(multy_none.prepare(mock), None) def test_convert_with_single_string(self): field = MultiValueField() self.assertEqual(["String"], field.convert("String")) def test_convert_with_single_int(self): field = MultiValueField() self.assertEqual([1], field.convert(1)) def test_convert_with_list_of_strings(self): field = MultiValueField() self.assertEqual( ["String 1", "String 2"], field.convert(["String 1", "String 2"]) ) def test_convert_with_list_of_ints(self): field = MultiValueField() self.assertEqual([1, 2, 3], field.convert([1, 2, 3])) class CharFieldWithTemplateTestCase(TestCase): def test_init(self): try: foo = CharField(use_template=True) except: self.fail() try: foo = CharField(use_template=True, template_name="foo.txt") except: self.fail() foo = CharField(use_template=True, template_name="foo.txt") self.assertEqual(foo.template_name, "foo.txt") # Test the select_template usage. foo = CharField(use_template=True, template_name=["bar.txt", "foo.txt"]) self.assertEqual(foo.template_name, ["bar.txt", "foo.txt"]) def test_prepare(self): mock = MockModel() mock.pk = 1 mock.user = "daniel" template1 = CharField(use_template=True) self.assertRaises(SearchFieldError, template1.prepare, mock) template2 = CharField(use_template=True) template2.instance_name = "template_x" self.assertRaises(TemplateDoesNotExist, template2.prepare, mock) template3 = CharField(use_template=True) template3.instance_name = "template" self.assertEqual(template3.prepare(mock), "Indexed!\n1") template4 = CharField(use_template=True, template_name="search/indexes/foo.txt") template4.instance_name = "template" self.assertEqual(template4.prepare(mock), "FOO!\n") template5 = CharField( use_template=True, template_name=["foo.txt", "search/indexes/bar.txt"] ) template5.instance_name = "template" self.assertEqual(template5.prepare(mock), "BAR!\n") ############################################################################## # The following tests look like they don't do much, but it's important because # we need to verify that the faceted variants behave like the field they # emulate. The old-broke behavior was convert everything to string. ############################################################################## class FacetFieldTestCase(TestCase): def test_init(self): # You shouldn't use the FacetField itself. try: foo = FacetField(model_attr="foo") self.fail() except: pass try: foo_exact = FacetField(facet_for="bar") self.fail() except: pass class FacetCharFieldTestCase(TestCase): def test_init(self): try: foo = FacetCharField(model_attr="foo") foo_exact = FacetCharField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" author = FacetCharField(model_attr="user") self.assertEqual(author.prepare(mock), "daniel") class FacetIntegerFieldTestCase(TestCase): def test_init(self): try: foo = FacetIntegerField(model_attr="foo") foo_exact = FacetIntegerField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.view_count = 13 view_count = FacetIntegerField(model_attr="view_count") self.assertEqual(view_count.prepare(mock), 13) class FacetFloatFieldTestCase(TestCase): def test_init(self): try: foo = FacetFloatField(model_attr="foo") foo_exact = FacetFloatField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.price = 25.65 price = FacetFloatField(model_attr="price") self.assertEqual(price.prepare(mock), 25.65) class FacetBooleanFieldTestCase(TestCase): def test_init(self): try: foo = FacetBooleanField(model_attr="foo") foo_exact = FacetBooleanField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.is_active = True is_active = FacetBooleanField(model_attr="is_active") self.assertEqual(is_active.prepare(mock), True) class FacetDateFieldTestCase(TestCase): def test_init(self): try: foo = FacetDateField(model_attr="foo") foo_exact = FacetDateField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.created = datetime.date(2010, 10, 30) created = FacetDateField(model_attr="created") self.assertEqual(created.prepare(mock), datetime.date(2010, 10, 30)) class FacetDateTimeFieldTestCase(TestCase): def test_init(self): try: foo = FacetDateTimeField(model_attr="foo") foo_exact = FacetDateTimeField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.created = datetime.datetime(2010, 10, 30, 3, 14, 25) created = FacetDateTimeField(model_attr="created") self.assertEqual( created.prepare(mock), datetime.datetime(2010, 10, 30, 3, 14, 25) ) class FacetMultiValueFieldTestCase(TestCase): def test_init(self): try: foo = FacetMultiValueField(model_attr="foo") foo_exact = FacetMultiValueField(facet_for="bar") except: self.fail() self.assertEqual(foo.facet_for, None) self.assertEqual(foo_exact.null, True) self.assertEqual(foo_exact.facet_for, "bar") def test_prepare(self): mock = MockModel() mock.user = "daniel" mock.sites = [1, 3, 4] sites = FacetMultiValueField(model_attr="sites") self.assertEqual(sites.prepare(mock), [1, 3, 4])
#ifndef RBX_CAPI_RUBY_IO_H #define RBX_CAPI_RUBY_IO_H // MRI includes this file #include "ruby/encoding.h" #endif
window._ = require('lodash'); /** * We'll load the axios HTTP library which allows us to easily issue requests * to our Laravel back-end. This library automatically handles sending the * CSRF token as a header based on the value of the "XSRF" token cookie. */ window.axios = require('axios'); window.axios.defaults.headers.common['X-Requested-With'] = 'XMLHttpRequest'; require('sweetalert') /** * Echo exposes an expressive API for subscribing to channels and listening * for events that are broadcast by Laravel. Echo and event broadcasting * allows your team to easily build robust real-time web applications. */ // import Echo from 'laravel-echo'; // window.Pusher = require('pusher-js'); // window.Echo = new Echo({ // broadcaster: 'pusher', // key: process.env.MIX_PUSHER_APP_KEY, // cluster: process.env.MIX_PUSHER_APP_CLUSTER, // forceTLS: true // });
from argparse import ( ArgumentParser, Namespace, _SubParsersAction, ) import pkg_resources import sys from trinity.config import ( Eth1AppConfig, TrinityConfig, ) from trinity.extensibility import ( BaseMainProcessPlugin, ) from trinity.plugins.builtin.attach.console import ( console, db_shell, ) def is_ipython_available() -> bool: try: pkg_resources.get_distribution('IPython') except pkg_resources.DistributionNotFound: return False else: return True class AttachPlugin(BaseMainProcessPlugin): @property def name(self) -> str: return "Attach" @classmethod def configure_parser(cls, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None: attach_parser = subparser.add_parser( 'attach', help='open an REPL attached to a currently running chain', ) attach_parser.set_defaults(func=cls.run_console) @classmethod def run_console(cls, args: Namespace, trinity_config: TrinityConfig) -> None: try: console(trinity_config.jsonrpc_ipc_path, use_ipython=is_ipython_available()) except FileNotFoundError as err: cls.get_logger().error(str(err)) sys.exit(1) class DbShellPlugin(BaseMainProcessPlugin): @property def name(self) -> str: return "DB Shell" @classmethod def configure_parser(cls, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None: attach_parser = subparser.add_parser( 'db-shell', help='open a REPL to inspect the db', ) attach_parser.set_defaults(func=cls.run_shell) @classmethod def run_shell(cls, args: Namespace, trinity_config: TrinityConfig) -> None: if trinity_config.has_app_config(Eth1AppConfig): config = trinity_config.get_app_config(Eth1AppConfig) db_shell(is_ipython_available(), config.database_dir, trinity_config) else: cls.get_logger().error( "DB Shell only supports the Ethereum 1 node at this time" )
# This is a map from an ISO 639 code or common name to its Wiktionary language name. # noqa: E501 # Note that the iso639 Python package that WikiPron uses can handle only ISO 639-1 and 639-2 codes. # noqa: E501 # ISO 639-3: https://iso639-3.sil.org/sites/iso639-3/files/downloads/iso-639-3.tab # noqa: E501 # Wiktionary: https://en.wiktionary.org/wiki/Category:Terms_with_IPA_pronunciation_by_language # noqa: E501 # TODO: Expand this as needed to cover additional languages. LANGUAGE_CODES = { # Greek. Would be "Greek, Modern (1453-)" in ISO 639. "el": "Greek", "ell": "Greek", "gre": "Greek", "greek": "Greek", "modern greek": "Greek", # Slovene. Would be "Slovenian" in ISO 639. "sl": "Slovene", "slv": "Slovene", "slovene": "Slovene", "slovenian": "Slovene", # Ancient Greek. Would be "Greek, Ancient (to 1453)" in ISO 639. "grc": "Ancient Greek", "ancient greek": "Ancient Greek", # Aramaic. Would be "Imperial Aramaic (700-300 BCE), Official Aramaic (700-300 BCE)" in ISO 639. # noqa: E501 "arc": "Aramaic", "aramaic": "Aramaic", # Cantonese. ISO 639-3 only. "yue": "Cantonese", "cantonese": "Cantonese", # Classical Nahuatl. ISO 639-3 only. "nci": "Classical Nahuatl", "nahuatl": "Classical Nahuatl", "classical nahuatl": "Classical Nahuatl", "aztec": "Classical Nahuatl", # Egyptian. Would be "Egyptian (Ancient)" in ISO 639. "egy": "Egyptian", "egyptian": "Egyptian", "ancient egyptian": "Egyptian", # Khmer. Would be "Central Khmer" by the iso639 package. "khm": "Khmer", "khmer": "Khmer", "central khmer": "Khmer", # Middle English. Would be "English, Middle (1100-1500)" in ISO 639. "enm": "Middle English", "middle english": "Middle English", # Norwegian Bokmål. Would be "Bokmål, Norwegian" by the iso639 package. "nob": "Norwegian Bokmål", "norwegian bokmål": "Norwegian Bokmål", # Old English. Would be "English, Old (ca. 450-1100)" in ISO 639. "ang": "Old English", "old english": "Old English", # Old Irish. Would be "Irish, Old (to 900)" in ISO 639. "sga": "Old Irish", "old irish": "Old Irish", # Serbo-Croatian. ISO 639-3 only. "hbs": "Serbo-Croatian", "serbo-croatian": "Serbo-Croatian", # Alemannic German. Would be "Alemannic, Alsatian, Swiss German" in ISO 639. # noqa: E501 "gsw": "Alemannic German", "alemannic german": "Alemannic German", "swiss german": "Alemannic German", "alsatian german": "Alemannic German", "alsatian": "Alemannic German", # Alutor. ISO 639-3 only. "alr": "Alutor", "alutor": "Alutor", "alyutor": "Alutor", # Carrier. ISO 639-3 only. "crx": "Carrier", "carrier": "Carrier", # Central Franconian. Not an ISO 639 language. "central franconian": "Central Franconian", # Dalmatian. ISO 639-3 only. "dlm": "Dalmatian", "dalmatian": "Dalmatian", "dalmatic": "Dalmatian", # Dongxiang. ISO 639-3 only. "sce": "Dongxiang", "dongxiang": "Dongxiang", # Egyptian Arabic. ISO 639-3 only. "arz": "Egyptian Arabic", "egyptian arabic": "Egyptian Arabic", # Gamilaraay. ISO 639-3 only. "kld": "Gamilaraay", "gamilaraay": "Gamilaraay", "kamilaroi": "Gamilaraay", # German Low German. Not an ISO 639 language. "german low german": "German Low German", # Gulf Arabic. ISO 639-3 only. "afb": "Gulf Arabic", "gulf arabic": "Gulf Arabic", # Hadza. ISO 639-3 only. "hts": "Hadza", "hadza": "Hadza", # Hijazi Arabic. ISO 639-3 only. "acw": "Hijazi Arabic", "hijazi arabic": "Hijazi Arabic", # Hunsrik. ISO 639-3 only. "hrx": "Hunsrik", "hunsrik": "Hunsrik", # Interlingua. Would be "Interlingua (International Auxiliary Language Association)" in ISO 639. # noqa: E501 "ina": "Interlingua", "interlingua": "Interlingua", # K'iche'. ISO 639-3 only. "quc": "K'iche'", "k'iche'": "K'iche'", "quiché": "K'iche'", # Libyan Arabic. ISO 639-3 only. "ayl": "Libyan Arabic", "libyan arabic": "Libyan Arabic", # Ligurian. ISO 639-3 only. "lij": "Ligurian", "ligurian": "Ligurian", # Limburgish. Would be "Limburgan, Limburger, Limburgish" in ISO 639. "lim": "Limburgish", "limburgish": "Limburgish", "limburgan": "Limburgish", "limburger": "Limburgish", "limburgic": "Limburgish", # Livonian. Would be "Liv" in ISO 639. "liv": "Livonian", "livonian": "Livonian", # Lü. ISO 639-3 only. "khb": "Lü", "lü": "Lü", # Mauritian Creole. Would be "Morisyen" in ISO 639. "mfe": "Mauritian Creole", "mauritian creole": "Mauritian Creole", "morisyen": "Mauritian Creole", "morisien": "Mauritian Creole", # Middle Dutch. Would be "Middle Dutch (ca. 1050-1350)" in ISO 639. "dum": "Middle Dutch", "middle dutch": "Middle Dutch", # Middle Low German. ISO 639-3 only. "gml": "Middle Low German", "middle low german": "Middle Low German", # Middle Welsh. ISO 639-3 only. "wlm": "Middle Welsh", "middle welsh": "Middle Welsh", # Min Nan. ISO 639-3 only. "nan": "Min Nan", "min nan": "Min Nan", # Mon. ISO 639-3 only. "mnw": "Mon", # North Frisian. Would be "Northern Frisian" in ISO 639. "frr": "North Frisian", "north frisian": "North Frisian", # Occitan. Would be "Occitan (post 1500)" in ISO 639. "oci": "Occitan", "occitan": "Occitan", # Old French. Would be "Old French (842-ca. 1400)" in ISO 639. "fro": "Old French", "old french": "Old French", # Old High German. Would be "Old High German (ca. 750-1050)" in ISO 639. "goh": "Old High German", "old high german": "Old High German", # Old Norse. Would be "Norse, Old" by the iso639 package. "non": "Old Norse", "old norse": "Old Norse", # Old Portuguese. Not an ISO 639 language. "old portuguese": "Old Portuguese", "roa-opt": "Old Portuguese", "opt": "Old Portuguese", # Old Saxon. ISO 639-3 only. "osx": "Old Saxon", "old saxon": "Old Saxon", # Old Spanish. ISO 639-3 only. "osp": "Old Spanish", "old spanish": "Old Spanish", # Old Tupi. Not an ISO 639 language. "tpw": "Old Tupi", "old tupi": "Old Tupi", "classical tupi": "Old Tupi", # Pashto. Would be "Pashto, Pushto" in ISO 639. "pus": "Pashto", "pashto": "Pashto", "pushto": "Pashto", # Piedmontese. ISO 639-3 only. "pms": "Piedmontese", "piedmontese": "Piedmontese", # Pipil. ISO 639-3 only. "ppl": "Pipil", "pipil": "Pipil", "nawat": "Pipil", "nicarao": "Pipil", # Pitjantjatjara. ISO 639-3 only. "pjt": "Pitjantjatjara", "pitjantjatjara": "Pitjantjatjara", # Punjabi. Would be "Panjabi, Punjabi" in ISO 639. "pan": "Punjabi", "panjabi": "Punjabi", "punjabi": "Punjabi", # Scanian. Not an ISO 639 language. "scanian": "Scanian", # Scottish Gaelic. Would be "Gaelic, Scottish Gaelic" in ISO 639. "gla": "Scottish Gaelic", "scottish gaelic": "Scottish Gaelic", "gaelic": "Scottish Gaelic", # Sylheti. ISO 639-3 only. "syl": "Sylheti", "sylheti": "Sylheti", # Taos. Would be "Northern Tiwa" in ISO 639. "twf": "Taos", "taos": "Taos", "northern tiwa": "Taos", # Tongan. Would be "Tonga (Tonga Islands)" in ISO 639. "ton": "Tongan", "tongan": "Tongan", "tonga": "Tongan", # Tzotzil. ISO 639-3 only. "tzo": "Tzotzil", "tzotzil": "Tzotzil", # Uyghur. Would be "Uighur, Uyghur" in ISO 639. "uig": "Uyghur", "uighur": "Uyghur", "uyghur": "Uyghur", # Wauja. Would be "Waurá" in ISO 639. "wau": "Wauja", "waurá": "Wauja", "wauja": "Wauja", # West Frisian. Would be "Western Frisian" in ISO 639. "fry": "West Frisian", "west frisian": "West Frisian", "western frisian": "West Frisian", # Western Apache. ISO 639-3 only. "apw": "Western Apache", "western apache": "Western Apache", # Westrobothnian. Not an ISO 639 language. "westrobothnian": "Westrobothnian", # White Hmong. Would be "Hmong Daw" in ISO 639. "mww": "White Hmong", "white hmong": "White Hmong", # Zazaki. Would be "Dimili, Dimli (macrolanguage), Kirdki, Kirmanjki (macrolanguage), Zaza, Zazaki" in ISO 639. # noqa: E501 "zza": "Zazaki", "zazaki": "Zazaki", "zaza": "Zazaki", "dimili": "Zazaki", "dimli": "Zazaki", "kirdki": "Zazaki", "kirmanjki": "Zazaki", # Okinawan. ISO 639-3 only. "ryu": "Okinawan", "okinawan": "Okinawan", "central okinawan": "Okinawan", # Ottoman Turkish. Would be "Ottoman Turkish (1500-1928)" in ISO 639. "ota": "Ottoman Turkish", "ottoman turkish": "Ottoman Turkish", # Brunei Malay. Would be "Brunei" in ISO 639. ISO 693-3 only. "kxd": "Brunei Malay", "brunei malay": "Brunei Malay", # Mecayapan Nahuatl. # Would be "Isthmus-Mecayapan Nahuatl" in ISO 639. ISO 693-3 only. "nhx": "Mecayapan Nahuatl", "mecayapan nahuatl": "Mecayapan Nahuatl", # Tetelcingo Nahuatl. ISO 639-3 only. "nhg": "Tetelcingo Nahuatl", "tetelcingo nahuatl": "Tetelcingo Nahuatl", # Bouyei. ISO 639-3 only. "pcc": "Bouyei", "bouyei": "Bouyei", # Lamboya. ISO 639-3 only. "lmy": "Laboya", "laboya": "Laboya", # Moroccan Arabic. ISO 639-3 only. "ary": "Moroccan Arabic", "moroccan arabic": "Moroccan Arabic", # Mandarin Chinese. ISO 639-3 only. "cmn": "Chinese", "chinese": "Chinese", # Abkhaz. Would be Abkhazian in ISO 639. "abk": "Abkhaz", "abkhaz": "Abkhaz", # Avar. Would be Avaric in ISO 639. "ava": "Avar", "avar": "Avar", # Buryat. Would be Buriat in ISO 639. "bua": "Buryat", "buryat": "Buryat", # Chukchi. Would be Chukot in ISO 639. "ckt": "Chukchi", "chukchi": "Chukchi", # Burushaski. ISO 639-3 only. "bsk": "Burushaski", "burushaski": "Burushaski", # Evenki. Not in iso639 lib. "evn": "Evenki", "evenki": "Evenki", # Southern Yukaghir. Not in iso639 lib. "yux": "Southern Yukaghir", "southern yukaghir": "Southern Yukaghir", # Tundra Nenets. Not in iso639 lib. "yrk": "Tundra Nenets", "tundra nenets": "Tundra Nenets", # Estonian. Not in iso639 lib. "ekk": "Estonian", "estonian": "Estonian", # Livvi. Not in iso639 lib. "olo": "Livvi", "livvi": "Livvi", # Kildin Sami. Not in iso639 lib. "sjd": "Kildin Sami", "kildin sami": "Kildin Sami", # Northern Yukaghir. Not in iso639 lib. "ykg": "Northern Yukaghir", "northern yukaghir": "Northern Yukaghir", # Nanai. Not in iso639 lib. "gld": "Nanai", "nanai": "Nanai", # Greenlandic. Would be Kalaallisut in ISO 639. "kal": "Greenlandic", "greenlandic": "Greenlandic", # Khanty. Not in iso639 lib. "kca": "Khanty", "khanty": "Khanty", # Ket. Not in iso639 lib. "ket": "Ket", # Komi-Permyak. Not in iso639 lib. "koi": "Komi-Permyak", "komi-permyak": "Komi-Permyak", # Komi-Zyrian. Not in iso639 lib. "kpv": "Komi-Zyrian", "komi-zyrian": "Komi-Zyrian", # Lak. Not in iso639 lib. "lbe": "Lak", "lak": "Lak", # Lezgi. Would be Lezghian in ISO 639. "lez": "Lezgi", "lezgi": "Lezgi", # Eastern Mari. Not in iso639 lib. "mhr": "Eastern Mari", "eastern mari": "Eastern Mari", # Mansi. Not in iso639 lib. "mns": "Mansi", "mansi": "Mansi", # Nganasan. Not in iso639 lib. "nio": "Nganasan", "nganasan": "Nganasan", # Nivkh. Not in iso639 lib. "niv": "Nivkh", "nivkh": "Nivkh", # Polabian. Note: The code is "sla" in ISO 639-2. "pox": "Polabian", "polabian": "Polabian", # Bahnar. Not in iso639 lib. "bdq": "Bahnar", "bahnar": "Bahnar", # Jeju or Jejueo. Not in iso639 lib. "jje": "Jeju", "jeju": "Jeju", "jejueo": "Jeju", # Lashi. Not in iso639 lib. "lsi": "Lashi", "lashi": "Lashi", # Miyako. Not in iso639 lib. "mvi": "Miyako", "miyako": "Miyako", # Pennsylvania German. Not in iso639 lib. "pdc": "Pennsylvania German", "pennsylvania german": "Pennsylvania German", # Namuyi. Not in iso639 lib. "nmy": "Namuyi", "namuyi": "Namuyi", # Tuvan. Would be "Tuvinian" in ISO 639. "tyv": "Tuvan", "tuvan": "Tuvan", # Bikol Central. Would be "Central Bikol" in ISO 639. "bcl": "Bikol Central", "bikol central": "Bikol Central", # Emilian. ISO 639-3 only. "egl": "Emilian", "emilian": "Emilian", # Ingrian. ISO 639-3 only. "izh": "Ingrian", "ingrian": "Ingrian", # Latgalian. ISO 639-3 only. "ltg": "Latgalian", "latgalian": "Latgalian", # San Pedro Amuzgos Amuzgo. ISO 639-3 only. "azg": "San Pedro Amuzgos Amuzgo", "san pedro amuzgos amuzgo": "San Pedro Amuzgos Amuzgo", # Kyrgyz. Would be "Kirghiz" in ISO 639. "kir": "Kyrgyz", "kyrgyz": "Kyrgyz", # Middle Irish. Would be "Irish, Middle (900-1200)" in ISO 639. "mga": "Middle Irish", "middle irish": "Middle Irish", # Middle Korean. ISO 639-3 only: "Korean, Middle (10th–16th centuries)". "okm": "Middle Korean", "middle korean": "Middle Korean", # Northern Kurdish. Looks ISO 639-3 only. "kmr": "Northern Kurdish", "northern kurdish": "Northern Kurdish", "kurmanji": "Northern Kurdish", "dng": "Dungan", # ISO 639-3. "ofs": "Old Frisian", # ISO 639-3. # Ilocano. "ilo" resolved to "Iloko" by iso639 lib. "ilo": "Ilocano", "iloko": "Ilocano", "ilocano": "Ilocano", # Jamaican Creole. ISO 639-3 only. "jam": "Jamaican Creole", "jamaican creole": "Jamaican Creole", # Limbu. ISO 639-3 only. "lif": "Limbu", "limbu": "Limbu", # Lombard. ISO 639-3 only. "lmo": "Lombard", "lombard": "Lombard", # Murui Huitoto. ISO 639-3 only. "huu": "Murui Huitoto", "murui huitoto": "Murui Huitoto", # Newar. Would be "Newari" or "Nepal Bhasa" in ISO 639. "new": "Newar", "newar": "Newar", "newari": "Newar", "nepal bhasa": "Newar", # Norman. ISO 639-3 only. "nrf": "Norman", "norman": "Norman", "jèrriais": "Norman", # Phalura. ISO 639-3 only. "phl": "Phalura", "phalura": "Phalura", # Plains Cree. ISO 639-3 only. "crk": "Plains Cree", "plains cree": "Plains Cree", # Pnar. ISO 639-3 only. "pbv": "Pnar", "pnar": "Pnar", # Saterland Frisian. ISO 639-3 only. "stq": "Saterland Frisian", "saterland frisian": "Saterland Frisian", "saterfriesisch": "Saterland Frisian", # South Levantine Arabic. ISO 639-3 only. "ajp": "South Levantine Arabic", "south levantine arabic": "South Levantine Arabic", # Western Lawa: ISO 639-3 only. "lcp": "Western Lawa", # Eastern Lawa: ISO 639-3 only. "lwl": "Eastern Lawa", # Nyah Kur: ISO 639-3 only. "cbn": "Nyah Kur", # Atong (Sino-Tibetan language, not to be confused with Niger-Congo Atong). "aot": "Atong (India)", # ISO 639-3 only. # Lolopo: ISO 639-3 only. "ycl": "Lolopo", # Yamphu: ISO 639-3 only. "ybi": "Yamphu", # Khumi Chin: ISO 639-3 only. "cnk": "Khumi Chin", # Zou: ISO 639-3. "zom": "Zou", # Wiyot: ISO 639-3 only. "wiy": "Wiyot", # Central Atlas Tamazight (Berber). ISO 639-9 only. "tzm": "Central Atlas Tamazight", # Chibcha: ISO 639-3 only. "chb": "Chibcha", }
// // Created by Artem Murashko on 25.04.2021. // #ifndef WENDEXTAXI_CARTYPE_H #define WENDEXTAXI_CARTYPE_H enum class CarType { economy = 1, comfort = 2, comfortPlus = 3, business = 4 }; #endif //WENDEXTAXI_CARTYPE_H
import $ from "jquery"; import devices from "core/devices"; import resizeCallbacks from "core/utils/resize_callbacks"; import dblclickEvent from "events/dblclick"; import fx from "animation/fx"; import Color from "color"; import AgendaAppointmentsStrategy from "ui/scheduler/rendering_strategies/ui.scheduler.appointments.strategy.agenda"; import { DataSource } from "data/data_source/data_source"; import CustomStore from "data/custom_store"; import subscribes from "ui/scheduler/ui.scheduler.subscribes"; import dataUtils from "core/element_data"; import { SchedulerTestWrapper } from "./helpers.js"; import "common.css!"; import "generic_light.css!"; import "ui/scheduler/ui.scheduler"; QUnit.testStart(function() { $("#qunit-fixture").html( '<div id="scheduler">\ <div data-options="dxTemplate: { name: \'template\' }">Task Template</div>\ </div>'); }); function getDeltaTz(schedulerTz) { var defaultTz = -10800000; return schedulerTz * 3600000 + defaultTz; } const createInstance = function(options) { const instance = $("#scheduler").dxScheduler($.extend(options, { height: 600 })).dxScheduler("instance"); return new SchedulerTestWrapper(instance); }; QUnit.module("Integration: Agenda", { beforeEach: function() { fx.off = true; this.createInstance = function(options) { this.instance = $("#scheduler").dxScheduler($.extend(options, { height: 600 })).dxScheduler("instance"); }; this.clock = sinon.useFakeTimers(); }, afterEach: function() { fx.off = false; this.clock.restore(); } }); QUnit.test("Scheduler should have a right agenda work space", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda" }); var $element = this.instance.$element(); assert.ok($element.find(".dx-scheduler-work-space").dxSchedulerAgenda("instance"), "Work space is agenda on init"); }); QUnit.test("Scheduler should have a right rendering strategy for agenda view", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda" }); var renderingStrategy = this.instance.getLayoutManager().getRenderingStrategyInstance(); assert.ok(renderingStrategy instanceof AgendaAppointmentsStrategy, "Strategy is OK"); }); QUnit.test("showAllDayPanel option shouldn't have any effect on agenda", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 22), showAllDayPanel: false, dataSource: [ { startDate: new Date(2016, 1, 22, 1), endDate: new Date(2016, 1, 24, 1, 30) } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 3, "Appointment count is OK"); }); QUnit.test("Appointments should not be resizable/draggable if current view is agenda", function(assert) { this.createInstance({ views: ["agenda", "day"], currentView: "agenda" }); var currentDevice = devices.current(), isMobile = currentDevice.phone || currentDevice.tablet; var appointments = this.instance.getAppointmentsInstance(); assert.notOk(appointments.option("allowResize"), "Appointment is not resizable"); assert.notOk(appointments.option("allowDrag"), "Appointment is not draggable"); this.instance.option("currentView", "day"); if(!isMobile) { assert.ok(appointments.option("allowResize"), "Appointment is resizable"); assert.ok(appointments.option("allowDrag"), "Appointment is draggable"); } }); QUnit.test("Appointments should not be resizable/draggable if current view is agenda and view is object", function(assert) { this.createInstance({ views: ["day", { type: "agenda", name: "My Agenda" }], currentView: "My Agenda" }); var currentDevice = devices.current(), isMobile = currentDevice.phone || currentDevice.tablet; var appointments = this.instance.getAppointmentsInstance(); assert.notOk(appointments.option("allowResize"), "Appointment is not resizable"); assert.notOk(appointments.option("allowDrag"), "Appointment is not draggable"); this.instance.option("currentView", "day"); if(!isMobile) { assert.ok(appointments.option("allowResize"), "Appointment is resizable"); assert.ok(appointments.option("allowDrag"), "Appointment is draggable"); } }); QUnit.test("Agenda should contain a right appointment quantity", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 28, 1, 30) } ] }); var appointmentCount = 0; this.instance.$element().find(".dx-scheduler-appointment").each(function() { var apptData = dataUtils.data($(this).get(0), "dxItemData"); if(!apptData.appointmentData) { assert.ok(apptData.startDate); assert.ok(apptData.endDate); } else { assert.ok(apptData.appointmentData.startDate); assert.ok(apptData.appointmentData.endDate); assert.ok(apptData.startDate); } appointmentCount++; }); assert.equal(appointmentCount, 7, "Appointment count is OK"); }); QUnit.test("Agenda appointments should have right sortedIndex", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 28, 1, 30) } ] }); var sortedIndex = 0; this.instance.$element().find(".dx-scheduler-appointment").each(function(index, appointment) { assert.equal(dataUtils.data($(appointment).get(0), "dxAppointmentSettings").sortedIndex, sortedIndex++); }); }); QUnit.test("Agenda should contain a right allDay appointment parts", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 0), endDate: new Date(2016, 1, 25, 0), allDay: true } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 1, "Appointment count is OK"); }); QUnit.test("Agenda should contain a right quantity of long-appointments", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 22, 1), endDate: new Date(2016, 2, 4, 1, 30) } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 7, "Appointment count is OK"); }); QUnit.test("Long and recurrent appointment parts should not have a reduced-icon and reduced class", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), recurrenceRuleExpr: "rRule", dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 25, 1, 30), rRule: "FREQ=DAILY;INTERVAL=3" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.notOk($appointments.eq(0).hasClass("dx-scheduler-appointment-reduced"), "Appointment part hasn't a reduced-class"); assert.equal($appointments.eq(0).find(".dx-scheduler-appointment-reduced-icon").length, 0, "Appointment part hasn't a reduced-icon"); assert.notOk($appointments.eq(1).hasClass("dx-scheduler-appointment-reduced"), "Appointment part hasn't a reduced-class"); assert.equal($appointments.eq(1).find(".dx-scheduler-appointment-reduced-icon").length, 0, "Appointment part hasn't a reduced-icon"); assert.notOk($appointments.eq(4).hasClass("dx-scheduler-appointment-reduced"), "Appointment part hasn't a reduced-class"); assert.equal($appointments.eq(4).find(".dx-scheduler-appointment-reduced-icon").length, 0, "Appointment part hasn't a reduced-icon"); }); QUnit.test("Particular recurrence appt should have a correct data", function(assert) { this.createInstance({ views: ["agenda"], resources: [ { field: "ownerId", dataSource: [{ id: 1, color: "#ff0000" }, { id: 2, color: "#0000ff" }] } ], groups: ["ownerId"], currentView: "agenda", currentDate: new Date(2015, 2, 23), recurrenceEditMode: "occurrence", dataSource: [ { startDate: new Date(2015, 2, 22, 1), endDate: new Date(2015, 2, 22, 1, 30), text: "a", recurrenceRule: "FREQ=DAILY", ownerId: 1 } ] }); var apptIndex = 0; sinon.stub(this.instance, "showAppointmentPopup", function(appData, createNew, singleAppData) { var expectedDate = new Date(2015, 2, 23 + apptIndex); expectedDate.setHours(1); assert.equal(singleAppData.startDate.getTime(), expectedDate.getTime(), "Start date is OK"); }); this.instance.$element().find(".dx-scheduler-appointment").each(function() { var $appt = $(this); assert.equal($appt.find(".dx-scheduler-appointment-title").text(), "a", "Title is OK"); assert.equal(new Color($appt.css("backgroundColor")).toHex(), "#ff0000", "Appointment color is OK"); $appt.trigger("dxdblclick"); apptIndex++; }); }); QUnit.test("Particular recurrence appt data calculation", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2015, 0, 29), dataSource: [] }); var renderingStrategy = this.instance.getRenderingStrategyInstance(); var rows = [ [0, 1, 0, 2, 1, 1, 1], [3, 0, 1, 0, 1, 1, 1] ]; var expectedResults = [ new Date(2015, 0, 30), new Date(2015, 1, 1), new Date(2015, 1, 1), new Date(2015, 1, 2), new Date(2015, 1, 3), new Date(2015, 1, 4), new Date(2015, 0, 29), new Date(2015, 0, 29), new Date(2015, 0, 29), new Date(2015, 0, 31), new Date(2015, 1, 2), new Date(2015, 1, 3), new Date(2015, 1, 4) ]; for(var i = 0; i <= 12; i++) { assert.equal(renderingStrategy.getDateByIndex(i, rows, new Date(2015, 0, 29)).getTime(), expectedResults[i].getTime(), "Date is OK"); } }); QUnit.test("AllDay appointment should have specific content on agenda view", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), allDay: true } ] }); var $contentDetails = this.instance.$element().find(".dx-scheduler-appointment-content-details"), $appointmentAllDayTitle = this.instance.$element().find(".dx-scheduler-appointment").eq(0).find(".dx-scheduler-appointment-content-allday"); assert.equal($contentDetails.get(0).firstChild, $appointmentAllDayTitle.get(0), "AllDay title is the first element of content"); assert.equal($appointmentAllDayTitle.length, 1, "Appointment has an allDay title"); assert.ok($appointmentAllDayTitle.is(":visible"), "AllDay title is visible"); }); QUnit.test("Appointment parts should have appointmentSettings field", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 22, 1), endDate: new Date(2016, 2, 4, 1, 30) } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.ok(dataUtils.data($appointments.get(1), "dxItemData").settings, "Appointment part has special field for settings"); assert.equal(dataUtils.data($appointments.get(1), "dxItemData").settings.startDate.getTime(), new Date(2016, 1, 25, 0).getTime(), "Current date of appointment part is OK"); assert.deepEqual(dataUtils.data($appointments.get(0), "dxItemData").startDate, dataUtils.data($appointments.get(1), "dxItemData").startDate, "Appointments data is OK"); }); QUnit.test("Agenda should contain a right quantity of recurrence appointments", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), recurrenceRule: "FREQ=DAILY" }, { startDate: new Date(2016, 1, 22, 1), endDate: new Date(2016, 1, 22, 1, 30), recurrenceRule: "FREQ=DAILY" }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 28, 1, 30) } ] }); var appointmentCount = 0; this.instance.$element().find(".dx-scheduler-appointment").each(function() { var apptData = dataUtils.data($(this)[0], "dxItemData"); if(!apptData.appointmentData) { assert.ok(apptData.startDate); assert.ok(apptData.endDate); } else { assert.ok(apptData.appointmentData.startDate); assert.ok(apptData.appointmentData.endDate); assert.ok(apptData.startDate); } appointmentCount++; }); assert.equal(appointmentCount, 20, "Appointment count is OK"); }); QUnit.test("Agenda should contain a right quantity of recurrence long appointments", function(assert) { this.createInstance({ views: ["agenda", "week"], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", recurrenceRuleExpr: "RecurrenceRule", dataSource: [ { Start: new Date(2016, 1, 22, 1).toString(), End: new Date(2016, 1, 23, 1, 30).toString(), RecurrenceRule: "FREQ=DAILY;INTERVAL=3", text: "appointment 1" } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 4, "Appointment count is OK"); this.instance.option({ currentDate: new Date(2015, 1, 23), dataSource: [ { Start: new Date(2015, 1, 23, 1), End: new Date(2015, 1, 24, 5), RecurrenceRule: "FREQ=DAILY;INTERVAL=3", text: "appointment 2" } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 5, "Appointment count is OK"); }); QUnit.test("Agenda should contain a right quantity of long appointments after changing currentView", function(assert) { this.createInstance({ views: ["agenda", "week"], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), End: new Date(2016, 1, 26, 5), text: "appointment 1" } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 3, "Appointment count is OK"); this.instance.option("currentView", "week"); this.instance.option("currentView", "agenda"); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 3, "Appointment count is OK"); }); QUnit.test("Grouped agenda should contain a right appointment quantity", function(assert) { this.createInstance({ views: ["agenda"], groups: ["ownerId", "roomId"], resources: [ { field: "ownerId", allowMultiple: true, dataSource: [{ id: 1 }, { id: 2 }] }, { field: "roomId", allowMultiple: true, dataSource: [{ id: 1 }, { id: 2 }] } ], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 25, 1).toString(), End: new Date(2016, 1, 25, 1, 30).toString(), ownerId: [1, 2], roomId: 1, text: "one" }, { Start: new Date(2016, 1, 26, 1).toString(), End: new Date(2016, 1, 26, 1, 30).toString(), ownerId: 1, roomId: [1, 2], text: "two" } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 4, "Appointment count is OK"); }); QUnit.test("Grouped agenda should contain a right long-appointment quantity", function(assert) { this.createInstance({ views: ["agenda"], groups: ["ownerId", "roomId"], resources: [ { field: "ownerId", allowMultiple: true, dataSource: [{ id: 1 }, { id: 2 }] }, { field: "roomId", allowMultiple: true, dataSource: [{ id: 1 }, { id: 2 }] } ], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1).toString(), End: new Date(2016, 1, 26, 1, 30).toString(), ownerId: [1, 2], roomId: 1, text: "one" } ] }); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 6, "Appointment count is OK"); }); QUnit.test("Grouped appointments should have a correct color", function(assert) { this.createInstance({ views: ["agenda"], groups: ["roomId", "ownerId"], resources: [ { field: "ownerId", dataSource: [{ id: 1, color: "#ff0000" }, { id: 2, color: "#0000ff" }], allowMultiple: true } ], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1).toString(), End: new Date(2016, 1, 25, 1, 30).toString(), ownerId: 1, text: "one" }, { Start: new Date(2016, 1, 24, 1).toString(), End: new Date(2016, 1, 25, 1, 30).toString(), ownerId: 2, text: "two" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(new Color($appointments.eq(0).css("backgroundColor")).toHex(), "#ff0000", "Appointment color is OK"); assert.equal(new Color($appointments.eq(1).css("backgroundColor")).toHex(), "#ff0000", "Appointment color is OK"); assert.equal(new Color($appointments.eq(2).css("backgroundColor")).toHex(), "#0000ff", "Appointment color is OK"); assert.equal(new Color($appointments.eq(3).css("backgroundColor")).toHex(), "#0000ff", "Appointment color is OK"); }); QUnit.test("Grouped appointments should be rendered if resources aren't defined", function(assert) { this.createInstance({ views: ["agenda"], groups: ["roomId", "ownerId"], currentView: "agenda", currentDate: new Date(2016, 1, 24).toString(), endDateExpr: "End", startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1).toString(), End: new Date(2016, 1, 24, 1, 30).toString(), ownerId: 1, text: "one" }, { Start: new Date(2016, 1, 24, 1).toString(), End: new Date(2016, 1, 24, 1, 30).toString(), ownerId: 2, text: "two" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appointments.length, 2, "Appointments are rendered"); }); QUnit.test("Group row count should depend on existing appointment count", function(assert) { this.createInstance({ views: ["agenda"], groups: ["roomId", "ownerId"], resources: [ { field: "roomId", allowMultiple: true, dataSource: [ { id: 1 }, { id: 2 } ] }, { field: "ownerId", allowMultiple: true, dataSource: [ { id: 1 }, { id: 2 } ] } ], currentView: "agenda", currentDate: new Date(2015, 2, 4).toString(), height: 800, dataSource: [ { text: "Task 2", roomId: [1, 2, 3], ownerId: 1, startDate: new Date(2015, 2, 5, 8, 0).toString(), endDate: new Date(2015, 2, 7, 9, 0).toString() }, { text: "Task 3", roomId: [1, 2], ownerId: 1, startDate: new Date(2015, 2, 4, 1).toString(), endDate: new Date(2015, 2, 4, 2).toString() } ] }); var $groupTable = this.instance.$element().find(".dx-scheduler-group-table"), $rows = $groupTable.find(".dx-scheduler-group-row"); assert.equal($rows.length, 2, "Row count is OK"); assert.equal($rows.eq(0).find(".dx-scheduler-group-header").length, 2, "Cell count is OK"); assert.equal($rows.eq(1).find(".dx-scheduler-group-header").length, 2, "Cell count is OK"); }); QUnit.test("Group header height should depend on existing appointment count", function(assert) { this.createInstance({ views: ["agenda"], groups: ["roomId", "ownerId"], resources: [ { field: "roomId", allowMultiple: true, dataSource: [ { id: 1 }, { id: 2 } ] }, { field: "ownerId", allowMultiple: true, dataSource: [ { id: 1 }, { id: 2 } ] } ], currentView: "agenda", currentDate: new Date(2015, 2, 4).toString(), dataSource: [ { text: "Task 1", roomId: [1, 2], ownerId: 1, startDate: new Date(2015, 2, 5, 8, 0).toString(), endDate: new Date(2015, 2, 7, 9, 0).toString() } ] }); var $groupTable = this.instance.$element().find(".dx-scheduler-group-table"), $headers = $groupTable.find(".dx-scheduler-group-header-content"); assert.equal($headers.length, 4, "Header count is OK"); assert.roughEqual($headers.eq(1).outerHeight(), 240, 2, "Header height is OK"); assert.roughEqual($headers.eq(3).outerHeight(), 240, 2, "Header height is OK"); }); QUnit.test("Group header should be rendered in right place (T374948)", function(assert) { this.createInstance({ views: ["agenda"], groups: ['priorityId'], currentView: "agenda", startDayHour: 6, endDayHour: 24, height: 600 }); var instance = this.instance, priorityData = [ { text: "Low Priority", id: 1, color: "#1e90ff" }, { text: "High Priority", id: 2, color: "#ff9747" } ]; instance.option("currentDate", new Date(2015, 4, 25)); instance.option("dataSource", [ { text: "Website Re-Design Plan", priorityId: 2, startDate: new Date(2015, 4, 25, 9, 0), endDate: new Date(2015, 4, 25, 11, 30) }, { text: "Book Flights to San Fran for Sales Trip", priorityId: 2, startDate: new Date(2015, 4, 25, 12, 0), endDate: new Date(2015, 4, 25, 13, 0) }, { text: "Install New Router in Dev Room", priorityId: 1, startDate: new Date(2015, 4, 25, 14, 30), endDate: new Date(2015, 4, 25, 15, 30) }, ] ); instance.option("resources", [{ field: "priorityId", allowMultiple: false, dataSource: priorityData, label: "Priority" }]); var $groupTable = instance.$element().find(".dx-scheduler-group-table"), $container = instance.$element().find(".dx-scheduler-date-table-scrollable .dx-scrollable-content"); assert.equal($groupTable.length, 1, "Group table was rendered"); assert.equal($container.children().get(0), $groupTable.get(0), "Group table was rendered in right place"); }); QUnit.test("Row count should be correct if appt ends at 0h 0m 0sec(T378182)", function(assert) { this.createInstance({ dataSource: [{ clubId: 1, text: "One", startDate: "2016-06-15T19:00:00.000Z", endDate: "2016-06-15T21:00:00.000Z" }], resources: [ { field: "clubId", dataSource: [{ id: 1 }] } ], groups: ["clubId"], views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 5, 12) }); assert.equal(this.instance.$element().find(".dx-scheduler-date-table-row").length, 1, "Row count is OK"); }); QUnit.test("Agenda should contain a right appointment sorting", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 26, 1), endDate: new Date(2016, 1, 27, 1, 30), text: "e" }, { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 28, 1, 30), text: "d" }, { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), text: "a" }, { Start: new Date(2016, 1, 25, 1), endDate: new Date(2016, 1, 25, 1, 30), text: "b" }, { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), text: "c" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(dataUtils.data($appointments.get(0), "dxItemData").text, "d"); // 24 assert.equal(dataUtils.data($appointments.get(1), "dxItemData").text, "a"); // 24 assert.equal(dataUtils.data($appointments.get(2), "dxItemData").text, "c"); // 24 assert.equal(dataUtils.data($appointments.get(3), "dxItemData").text, "d"); // 25 assert.equal(dataUtils.data($appointments.get(4), "dxItemData").text, "b"); // 25 assert.equal(dataUtils.data($appointments.get(5), "dxItemData").text, "d"); // 26 assert.equal(dataUtils.data($appointments.get(6), "dxItemData").text, "e"); // 26 assert.equal(dataUtils.data($appointments.get(7), "dxItemData").text, "e"); // 27 assert.equal(dataUtils.data($appointments.get(8), "dxItemData").text, "d"); // 27 assert.equal(dataUtils.data($appointments.get(9), "dxItemData").text, "d"); // 28 }); QUnit.test("Agenda should contain a right appointment sorting after adding of the new appointment", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 6), endDate: new Date(2016, 1, 24, 6, 30), text: "a" }, { Start: new Date(2016, 1, 27, 1), endDate: new Date(2016, 1, 27, 1, 30), text: "b" } ] }); this.instance.addAppointment({ Start: new Date(2016, 1, 25, 1), endDate: new Date(2016, 1, 25, 1, 30), text: "c" }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(dataUtils.data($appointments.get(0), "dxItemData").text, "a"); assert.equal(dataUtils.data($appointments.get(1), "dxItemData").text, "c"); assert.equal(dataUtils.data($appointments.get(2), "dxItemData").text, "b"); }); QUnit.test("Agenda should contain a right appointment sorting after updating of the", function(assert) { var items = [ { Start: new Date(2016, 1, 24, 6), endDate: new Date(2016, 1, 24, 6, 30), text: "a" }, { Start: new Date(2016, 1, 27, 1), endDate: new Date(2016, 1, 27, 1, 30), text: "b" } ]; this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDateExpr: "Start", dataSource: items }); this.instance.updateAppointment(items[0], { Start: new Date(2016, 1, 24, 6), endDate: new Date(2016, 1, 24, 9, 30), text: "a" }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(dataUtils.data($appointments.get(0), "dxItemData").text, "a"); assert.equal(dataUtils.data($appointments.get(1), "dxItemData").text, "b"); }); QUnit.test("Agenda should contain a right recurrence appointment sorting", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), text: "d" }, { Start: new Date(2016, 1, 22, 5), endDate: new Date(2016, 1, 22, 5, 30), text: "e", recurrenceRule: "FREQ=DAILY" }, { Start: new Date(2016, 1, 23, 2), endDate: new Date(2016, 1, 23, 2, 30), text: "f", recurrenceRule: "FREQ=DAILY" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(dataUtils.data($appointments.get(0), "dxItemData").text, "d"); // 24 assert.equal(dataUtils.data($appointments.get(1), "dxItemData").text, "f"); // 24 assert.equal(dataUtils.data($appointments.get(2), "dxItemData").text, "e"); // 24 }); QUnit.test("Long & recurrence appts should be sorted correctly", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2015, 1, 23), dataSource: [ { startDate: new Date(2015, 1, 22, 1), endDate: new Date(2015, 1, 22, 1, 30), text: "a", recurrenceRule: "FREQ=DAILY" }, { startDate: new Date(2015, 1, 23, 3), endDate: new Date(2015, 1, 28, 3, 30), text: "long..." }, ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"), recurrenceApptsIndices = [0, 3, 5, 7, 9, 11, 12], longApptsIndices = [1, 2, 4, 6, 8, 10]; $appointments.each(function(index, appt) { var $appt = $(appt), positionInArray; if($appt.hasClass("dx-scheduler-appointment-recurrence")) { positionInArray = recurrenceApptsIndices.indexOf(index); assert.notOk($appt.hasClass("dx-scheduler-appointment-reduced"), "Recurrence appt doesn't have 'reduced' class"); } else { positionInArray = longApptsIndices.indexOf(index); } assert.ok(positionInArray > -1, "Appointment are rendered correctly"); }); }); QUnit.test("Appointments should have correct width & height", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24) }); var agenda = this.instance.getWorkSpace(), rowHeight = 77, $element = this.instance.$element(), timePanelWidth = $element.find(".dx-scheduler-time-panel").outerWidth(), expectedWidth = $element.find(".dx-scheduler-date-table").outerWidth() - timePanelWidth, agendaStub = sinon.stub(agenda, "_getRowHeight").returns(rowHeight); try { this.instance.option("dataSource", [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) } ]); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.roughEqual($appointments.eq(0).outerHeight(), 2.001, rowHeight, "Appointment height is OK"); assert.equal(parseInt($appointments.eq(0).css("marginBottom"), 10), 5, "Appointment offset is OK"); assert.roughEqual($appointments.eq(0).outerWidth(), 2.001, expectedWidth, "Appointment width is OK"); assert.roughEqual($appointments.eq(1).outerHeight(), 2.001, rowHeight, "Appointment height is OK"); assert.equal(parseInt($appointments.eq(1).css("marginBottom"), 10), 20, "Appointment offset is OK"); assert.roughEqual($appointments.eq(1).outerWidth(), 2.001, expectedWidth, "Appointment width is OK"); } finally { agendaStub.restore(); } }); QUnit.test("Grouped appointments should have a right offsets", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), groups: ["ownerId", "roomId"], resources: [ { field: "ownerId", dataSource: [{ id: 1 }, { id: 2 }], allowMultiple: true }, { field: "roomId", dataSource: [{ id: 1 }, { id: 2 }], allowMultiple: true } ], dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), roomId: [1, 2], ownerId: [1, 2] }, { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30), roomId: [1, 2], ownerId: [1, 2] } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(parseInt($appointments.eq(0).css("marginBottom"), 10), 5, "Appointment offset is OK"); assert.equal(parseInt($appointments.eq(1).css("marginBottom"), 10), 20, "Appointment offset is OK"); assert.equal(parseInt($appointments.eq(2).css("marginBottom"), 10), 5, "Appointment offset is OK"); assert.equal(parseInt($appointments.eq(3).css("marginBottom"), 10), 20, "Appointment offset is OK"); }); QUnit.test("Tooltip should appear by appointment click", function(assert) { const scheduler = createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) } ] }); scheduler.appointments.click(); assert.ok(scheduler.tooltip.isVisible(), "Tooltip is rendered"); }); QUnit.test("Agenda should be rerendered when data source is changed", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) } ] }); var $element = this.instance.$element(); assert.equal($element.find(".dx-scheduler-date-table-row").length, 1, "Date table rows are OK"); assert.equal($element.find(".dx-scheduler-time-panel-row").length, 1, "Time panel rows are OK"); this.instance.addAppointment({ startDate: new Date(2016, 1, 25, 1), endDate: new Date(2016, 1, 25, 1, 30) }); $element = this.instance.$element(); assert.equal($element.find(".dx-scheduler-date-table-row").length, 2, "Date table rows are OK"); assert.equal($element.find(".dx-scheduler-time-panel-row").length, 2, "Time panel rows are OK"); }); QUnit.test("Appointment count should be ok after dimensionChanged", function(assert) { this.createInstance({ currentDate: new Date(2016, 1, 11), currentView: "agenda", dataSource: [{ text: "a", allDay: true, startDate: new Date(2016, 1, 11, 10), endDate: new Date(2016, 1, 11, 15), recurrenceRule: "FREQ=DAILY" }] }); resizeCallbacks.fire(); assert.equal(this.instance._appointments.option("items").length, 7, "Appointments are OK before rendering"); }); QUnit.test("Appts should not be repainted when the 'editing' option is changed", function(assert) { this.createInstance({ currentDate: new Date(2016, 1, 11), currentView: "agenda", dataSource: [{ text: "a", allDay: true, startDate: new Date(2016, 1, 11, 10), endDate: new Date(2016, 1, 11, 15), recurrenceRule: "FREQ=DAILY" }] }); var apptsInstance = this.instance.getAppointmentsInstance(), repaintStub = sinon.stub(apptsInstance, "repaint"); this.instance.option("editing", { allowUpdating: false }); assert.equal(repaintStub.callCount, 0, "The 'repaint' method isn't called"); }); QUnit.test("No Data message should be rendered if agenda is empty", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [] }); var $element = this.instance.$element(), $message = $element.find(".dx-scheduler-agenda-nodata"); assert.equal($message.length, 1, "Message was rendered"); assert.equal($message.text(), "No data to display", "Message is correct"); }); QUnit.test("Custom No Data message should be rendered if agenda is empty", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [], noDataText: "No data" }); var $element = this.instance.$element(), $message = $element.find(".dx-scheduler-agenda-nodata"); assert.equal($message.length, 1, "Message was rendered"); assert.equal($message.text(), "No data", "Message is correct"); }); QUnit.test("No Data message should be rendered if agenda is empty, grouped agenda", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 26), dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 25, 1, 30), group: 1 } ], groups: ['group'], resources: [ { field: "group", allowMultiple: true, dataSource: [ { text: "Group1", id: 1 }, { text: "Group2", id: 2 } ] }] }); var $element = this.instance.$element(), $message = $element.find(".dx-scheduler-agenda-nodata"); assert.equal($message.length, 1, "Message was rendered"); assert.equal($message.text(), "No data to display", "Message is correct"); }); QUnit.test("No Data message should not be rendered if one group doesn't have appts, grouped agenda", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), height: 500, dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 2), groupID: 1 } ], groups: ['groupID'], resources: [ { field: "groupID", allowMultiple: true, dataSource: [ { text: "Group1", id: 1 }, { text: "Group2", id: 2 } ] }] }); var $element = this.instance.$element(), $message = $element.find(".dx-scheduler-agenda-nodata"), $apps = $element.find(".dx-scheduler-appointment"); assert.equal($message.length, 0, "Message is absent"); assert.equal($apps.length, 1, "Appointments was found"); }); QUnit.test("No Data message should be removed after dataSource changing", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: [] }); this.instance.option("dataSource", [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 25, 1, 30) } ]); var $element = this.instance.$element(), $message = $element.find(".dx-scheduler-agenda-nodata"); assert.equal($message.length, 0, "Message was remover"); }); QUnit.test("The timeZone option should be processed correctly", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 4, 6), timeZone: "Asia/Ashkhabad", dataSource: [{ startDate: new Date(2016, 4, 7), startDateTimeZone: "Asia/Qyzylorda", endDate: new Date(2016, 4, 7, 0, 30), text: "a" }, { startDate: new Date(2016, 4, 7, 23), endDate: new Date(2016, 4, 7, 23, 59), text: "b" }] }); var $element = this.instance.$element(), $dateTableRows = $element.find(".dx-scheduler-date-table-row"), $timePanelRows = $element.find(".dx-scheduler-time-panel-row"); assert.equal($timePanelRows.length, 2, "Timepanel row count is OK"); assert.equal($dateTableRows.length, 2, "DateTable row count is OK"); }); QUnit.test("All-day appointment should not be duplicated with custom timezone", function(assert) { var tzOffsetStub = sinon.stub(subscribes, "getClientTimezoneOffset").returns(-10800000); try { this.clock.restore(); var timezoneDifference = getDeltaTz(5), getDate = function(date) { return new Date(date.getTime() - timezoneDifference); }; this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 4, 3), timeZone: "Asia/Ashkhabad", dataSource: [{ startDate: getDate(new Date(2016, 4, 4)), endDate: getDate(new Date(2016, 4, 5)) }] }); var $appts = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appts.length, 1, "Appt count is OK"); } finally { tzOffsetStub.restore(); } }); QUnit.test("All-day appointment should not be duplicated with custom timezone (T437288)", function(assert) { this.clock.restore(); this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2015, 4, 18), timeZone: "America/Los_Angeles", height: 300, dataSource: [{ startDate: "2015-05-25T00:00:00.000Z", endDate: "2015-05-26T00:00:00.000Z" }] }); var $appts = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appts.length, 1, "Appt count is OK"); }); QUnit.test("Recurring appointment and timepanel should be rendered correctly if DST makes sense(T444318)", function(assert) { // can be reproduced in PST timezone this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 10, 5), firstDayOfWeek: 1, height: 300, onAppointmentRendered: function(e) { var targetedAppointmentData = e.targetedAppointmentData; assert.equal(targetedAppointmentData.settings.startDate.getDate(), 10, "Appointment start date is OK"); assert.equal(targetedAppointmentData.settings.endDate.getDate(), 10, "Appointment end date is OK"); }, dataSource: [{ text: "test-rec", startDate: new Date(2016, 10, 3, 9, 0), endDate: new Date(2016, 10, 3, 9, 15), recurrenceRule: "FREQ=WEEKLY;INTERVAL=1" }] }); var $element = this.instance.$element(); var $appts = $element.find(".dx-scheduler-appointment"); var timePanelDate = $element.find(".dx-scheduler-agenda-date").text(); assert.equal($appts.length, 1, "Appt count is OK"); assert.equal(timePanelDate, "10 Thu", "Time panel date is OK"); }); QUnit.test("Recurring appointment and timepanel should be rendered correctly if DST makes sense(T444318), the second case", function(assert) { // can be reproduced in PST timezone this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 10, 6), firstDayOfWeek: 1, height: 300, dataSource: [{ text: "test-rec", startDate: new Date(2016, 10, 6, 1, 0), endDate: new Date(2016, 10, 6, 1, 15), recurrenceRule: "FREQ=WEEKLY;INTERVAL=1" }] }); var $element = this.instance.$element(); var $appts = $element.find(".dx-scheduler-appointment"); var $timePanelDateEl = $element.find(".dx-scheduler-agenda-date"); var timePanelDate = $timePanelDateEl.text(); assert.equal($appts.length, 1, "Appt count is OK"); assert.equal($timePanelDateEl.length, 1, "Timepanel cell count is OK"); assert.equal(timePanelDate, "6 Sun", "Time panel date is OK"); }); QUnit.test("dateCellTemplate should take cellElement with correct geometry (T453520)", function(assert) { this.createInstance({ currentView: "agenda", views: ["agenda"], height: 700, width: 700, currentDate: new Date(2016, 10, 28), dataSource: [{ startDate: new Date(2016, 10, 28, 1), endDate: new Date(2016, 10, 28, 2) }], dateCellTemplate: function(cellData, cellIndex, cellElement) { assert.equal($(cellElement).outerWidth(), 100, "Date cell width is OK"); assert.equal($(cellElement).outerHeight(), 80, "Date cell height is OK"); } }); }); QUnit.test("resourceCellTemplate should take cellElement with correct geometry (T453520)", function(assert) { this.createInstance({ currentView: "agenda", views: ["agenda"], height: 700, width: 700, groups: ["owner"], currentDate: new Date(2016, 10, 28), resources: [{ fieldExpr: "owner", dataSource: [{ id: 1, text: "a" }] }], dataSource: [{ startDate: new Date(2016, 10, 28, 1), endDate: new Date(2016, 10, 28, 2), owner: 1 }], resourceCellTemplate: function(cellData, cellIndex, cellElement) { assert.equal($(cellElement).outerWidth(), 80, "Resource cell width is OK"); assert.equal($(cellElement).outerHeight(), 80, "Resource cell height is OK"); } }); }); QUnit.test("Long appointment parts data should be correct", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 11, 30), text: "a" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal(dataUtils.data($appointments.get(0), "dxItemData").text, "a"); assert.equal(dataUtils.data($appointments.get(1), "dxItemData").text, "a"); assert.equal(dataUtils.data($appointments.get(2), "dxItemData").text, "a"); assert.equal(dataUtils.data($appointments.get(3), "dxItemData").text, "a"); assert.deepEqual(dataUtils.data($appointments.get(0), "dxItemData").Start, new Date(2016, 1, 24, 1)); // first part of long appointment has original startDate assert.deepEqual(dataUtils.data($appointments.get(1), "dxItemData").settings.Start, new Date(2016, 1, 25, 8)); assert.deepEqual(dataUtils.data($appointments.get(2), "dxItemData").settings.Start, new Date(2016, 1, 26, 8)); assert.deepEqual(dataUtils.data($appointments.get(3), "dxItemData").settings.Start, new Date(2016, 1, 27, 8)); assert.deepEqual(dataUtils.data($appointments.get(0), "dxItemData").endDate, new Date(2016, 1, 27, 11, 30)); // first part of long appointment has original endDate assert.deepEqual(dataUtils.data($appointments.get(1), "dxItemData").settings.endDate, new Date(2016, 1, 25, 20)); assert.deepEqual(dataUtils.data($appointments.get(2), "dxItemData").settings.endDate, new Date(2016, 1, 26, 20)); assert.deepEqual(dataUtils.data($appointments.get(3), "dxItemData").settings.endDate, new Date(2016, 1, 27, 11, 30)); }); QUnit.test("Long appointment parts targetedAppointmentData should be correct", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 25), firstDayOfWeek: 1, height: 300, onAppointmentRendered: function(e) { var targetedAppointmentData = e.targetedAppointmentData, originalAppointmentData = e.appointmentData; assert.deepEqual(targetedAppointmentData, originalAppointmentData, "Targeted appointment data is ok"); }, dataSource: [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 11, 30), text: "a" } ] }); }); QUnit.test("Long appointment parts popup should have original data", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 1, 30), text: "a" } ] }); var $appointment = $(this.instance.$element()).find(".dx-scheduler-appointment").eq(1); $appointment.trigger(dblclickEvent.name); var detailsForm = this.instance.getAppointmentDetailsForm(), formData = detailsForm.option("formData"); assert.deepEqual(formData.Start, new Date(2016, 1, 24, 1), "start is correct"); assert.deepEqual(formData.endDate, new Date(2016, 1, 27, 1, 30), "end is correct"); assert.equal(formData.text, "a", "text is correct"); }); QUnit.test("Long appointment should be rendered correctly after changing view", function(assert) { this.createInstance({ views: ["agenda", "month"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 10), text: "a" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appointments.length, 4, "appointments are OK"); this.instance.option("currentView", "month"); var cellWidth = this.instance.$element().find(".dx-scheduler-date-table-cell").eq(0).outerWidth(); $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appointments.length, 1, "appointment is OK"); assert.roughEqual($appointments.eq(0).outerWidth(), cellWidth * 4, 2.5, "appointment size is OK"); }); QUnit.test("Timepanel rows count should be OK for long appointment", function(assert) { this.createInstance({ views: ["agenda", "month"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 10), text: "a" } ] }); var $element = this.instance.$element(); assert.equal($element.find(".dx-scheduler-time-panel-row").length, 4, "Time panel rows are OK"); }); QUnit.test("Timepanel rows count should be OK for long recurrence appointment", function(assert) { this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDateExpr: "Start", recurrenceRuleExpr: "Recurrence", dataSource: [ { Start: new Date(2016, 1, 24, 22), endDate: new Date(2016, 1, 25, 10), text: "a", Recurrence: "FREQ=DAILY;COUNT=2" } ] }); var $element = this.instance.$element(); assert.equal($element.find(".dx-scheduler-time-panel-row").length, 3, "Time panel rows are OK"); }); QUnit.test("Long appointment should have a correct template", function(assert) { this.createInstance({ views: ["agenda", "month"], currentView: "agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 9, 30), endDate: new Date(2016, 1, 27, 10), text: "a" } ] }); var $appts = this.instance.$element().find(".dx-scheduler-appointment"), $firstContentDates = $appts.eq(0).find(".dx-scheduler-appointment-content-date"), $secondContentDates = $appts.eq(1).find(".dx-scheduler-appointment-content-date"), $lastContentDates = $appts.last().find(".dx-scheduler-appointment-content-date"); assert.equal($firstContentDates.first().text(), "9:30 AM", "Start date is correct"); assert.equal($firstContentDates.last().text(), "8:00 PM", "End date is correct"); assert.equal($secondContentDates.first().text(), "8:00 AM", "Start date is correct"); assert.equal($secondContentDates.last().text(), "8:00 PM", "End date is correct"); assert.equal($lastContentDates.first().text(), "8:00 AM", "Start date is correct"); assert.equal($lastContentDates.last().text(), "10:00 AM", "End date is correct"); }); QUnit.test("Agenda should contain a right appointment quantity after dataSource reloading", function(assert) { var data = [ { startDate: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 24, 1, 30) }, ]; var dataSource = new DataSource({ store: new CustomStore({ load: function() { var d = $.Deferred(); setTimeout(function() { d.resolve(data); }, 100); return d.promise(); } }) }); this.createInstance({ views: ["agenda"], currentView: "agenda", currentDate: new Date(2016, 1, 24), dataSource: dataSource }); this.clock.tick(100); dataSource.load(); this.clock.tick(100); assert.equal(this.instance.$element().find(".dx-scheduler-appointment").length, 1, "Appointment count is OK"); }); QUnit.test("Appointments should be rendered correctly if agenda view is set as object", function(assert) { this.createInstance({ views: [{ type: "day", name: "My day" }, { type: "agenda", name: "My agenda" }], currentView: "My agenda", currentDate: new Date(2016, 1, 24), startDayHour: 8, endDayHour: 20, startDateExpr: "Start", dataSource: [ { Start: new Date(2016, 1, 24, 1), endDate: new Date(2016, 1, 27, 10), text: "a" } ] }); var $appointments = this.instance.$element().find(".dx-scheduler-appointment"); assert.equal($appointments.length, 4, "appointments are OK"); assert.equal($appointments.first().position().top, 0, "appointment position is OK"); assert.equal($appointments.last().position().top, 240, "appointment position is OK"); });
import calendar import six from six.moves.urllib.parse import urlparse import re import struct import base64 import time from ipaddress import AddressValueError from ipaddress import IPv4Address from ipaddress import IPv6Address from saml2 import time_util XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance' XSI_NIL = '{%s}nil' % XSI_NAMESPACE # --------------------------------------------------------- class NotValid(Exception): pass class OutsideCardinality(Exception): pass class MustValueError(ValueError): pass class ShouldValueError(ValueError): pass class ResponseLifetimeExceed(Exception): pass class ToEarly(Exception): pass # --------------------- validators ------------------------------------- # NCNAME = re.compile(r"(?P<NCName>[a-zA-Z_](\w|[_.-])*)") def valid_ncname(name): match = NCNAME.match(name) #if not match: # hack for invalid authnRequest/ID from meteor saml lib # raise NotValid("NCName") return True def valid_id(oid): valid_ncname(oid) def valid_any_uri(item): """very simplistic, ...""" try: part = urlparse(item) except Exception: raise NotValid("AnyURI") if part[0] == "urn" and part[1] == "": # A urn return True # elif part[1] == "localhost" or part[1] == "127.0.0.1": # raise NotValid("AnyURI") return True def valid_date_time(item): try: time_util.str_to_time(item) except Exception: raise NotValid("dateTime") return True def valid_url(url): try: _ = urlparse(url) except Exception: raise NotValid("URL") # if part[1] == "localhost" or part[1] == "127.0.0.1": # raise NotValid("URL") return True def validate_on_or_after(not_on_or_after, slack): if not_on_or_after: now = time_util.utc_now() nooa = calendar.timegm(time_util.str_to_time(not_on_or_after)) if now > nooa + slack: now_str=time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(now)) raise ResponseLifetimeExceed( "Can't use response, too old (now=%s + slack=%d > " \ "not_on_or_after=%s" % (now_str, slack, not_on_or_after)) return nooa else: return False def validate_before(not_before, slack): if not_before: now = time_util.utc_now() nbefore = calendar.timegm(time_util.str_to_time(not_before)) if nbefore > now + slack: now_str = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(now)) raise ToEarly("Can't use response yet: (now=%s + slack=%d) " "<= notbefore=%s" % (now_str, slack, not_before)) return True def valid_address(address): """Validate IPv4/IPv6 addresses.""" if not (valid_ipv4(address) or valid_ipv6(address)): raise NotValid("address") return True def valid_ipv4(address): """Validate IPv4 addresses.""" try: IPv4Address(six.text_type(address)) except AddressValueError: return False return True def valid_ipv6(address): """Validate IPv6 addresses.""" is_enclosed_in_brackets = address.startswith("[") and address.endswith("]") address_raw = address[1:-1] if is_enclosed_in_brackets else address try: IPv6Address(six.text_type(address_raw)) except AddressValueError: return False return True def valid_boolean(val): vall = val.lower() if vall in ["true", "false", "0", "1"]: return True else: raise NotValid("boolean") def valid_duration(val): try: time_util.parse_duration(val) except Exception: raise NotValid("duration") return True def valid_string(val): """ Expects unicode Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] """ for char in val: try: char = ord(char) except TypeError: raise NotValid("string") if char == 0x09 or char == 0x0A or char == 0x0D: continue elif 0x20 <= char <= 0xD7FF: continue elif 0xE000 <= char <= 0xFFFD: continue elif 0x10000 <= char <= 0x10FFFF: continue else: raise NotValid("string") return True def valid_unsigned_short(val): try: struct.pack("H", int(val)) except struct.error: raise NotValid("unsigned short") except ValueError: raise NotValid("unsigned short") return True def valid_positive_integer(val): try: integer = int(val) except ValueError: raise NotValid("positive integer") if integer > 0: return True else: raise NotValid("positive integer") def valid_non_negative_integer(val): try: integer = int(val) except ValueError: raise NotValid("non negative integer") if integer < 0: raise NotValid("non negative integer") return True def valid_integer(val): try: int(val) except ValueError: raise NotValid("integer") return True def valid_base64(val): try: base64.b64decode(val) except Exception: raise NotValid("base64") return True def valid_qname(val): """ A qname is either NCName or NCName ':' NCName """ try: (prefix, localpart) = val.split(":") return valid_ncname(prefix) and valid_ncname(localpart) except ValueError: return valid_ncname(val) def valid_anytype(val): """ Goes through all known type validators :param val: The value to validate :return: True is value is valid otherwise an exception is raised """ for validator in VALIDATOR.values(): if validator == valid_anytype: # To hinder recursion continue try: if validator(val): return True except NotValid: pass if isinstance(val, type): return True raise NotValid("AnyType") # ----------------------------------------------------------------------------- VALIDATOR = { "ID": valid_id, "NCName": valid_ncname, "dateTime": valid_date_time, "anyURI": valid_any_uri, "nonNegativeInteger": valid_non_negative_integer, "PositiveInteger": valid_positive_integer, "boolean": valid_boolean, "unsignedShort": valid_unsigned_short, "duration": valid_duration, "base64Binary": valid_base64, "integer": valid_integer, "QName": valid_qname, "anyType": valid_anytype, "string": valid_string, } # ----------------------------------------------------------------------------- def validate_value_type(value, spec): """ c_value_type = {'base': 'string', 'enumeration': ['Permit', 'Deny', 'Indeterminate']} {'member': 'anyURI', 'base': 'list'} {'base': 'anyURI'} {'base': 'NCName'} {'base': 'string'} """ if "maxlen" in spec: return len(value) <= int(spec["maxlen"]) if spec["base"] == "string": if "enumeration" in spec: if value not in spec["enumeration"]: raise NotValid("value not in enumeration") else: return valid_string(value) elif spec["base"] == "list": # comma separated list of values for val in [v.strip() for v in value.split(",")]: valid(spec["member"], val) else: return valid(spec["base"], value) return True def valid(typ, value): try: return VALIDATOR[typ](value) except KeyError: try: (_namespace, typ) = typ.split(":") except ValueError: if typ == "": typ = "string" return VALIDATOR[typ](value) def _valid_instance(instance, val): try: val.verify() except NotValid as exc: raise NotValid("Class '%s' instance: %s" % ( instance.__class__.__name__, exc.args[0])) except OutsideCardinality as exc: raise NotValid( "Class '%s' instance cardinality error: %s" % ( instance.__class__.__name__, exc.args[0])) ERROR_TEXT = "Wrong type of value '%s' on attribute '%s' expected it to be %s" def valid_instance(instance): instclass = instance.__class__ class_name = instclass.__name__ # if instance.text: # _has_val = True # else: # _has_val = False if instclass.c_value_type and instance.text: try: validate_value_type(instance.text.strip(), instclass.c_value_type) except NotValid as exc: raise NotValid("Class '%s' instance: %s" % (class_name, exc.args[0])) for (name, typ, required) in instclass.c_attributes.values(): value = getattr(instance, name, '') if required and not value: txt = "Required value on property '%s' missing" % name raise MustValueError("Class '%s' instance: %s" % (class_name, txt)) if value: try: if isinstance(typ, type): if typ.c_value_type: spec = typ.c_value_type else: spec = {"base": "string"} # do I need a default validate_value_type(value, spec) else: valid(typ, value) except (NotValid, ValueError) as exc: txt = ERROR_TEXT % (value, name, exc.args[0]) raise NotValid("Class '%s' instance: %s" % (class_name, txt)) for (name, _spec) in instclass.c_children.values(): value = getattr(instance, name, '') try: _card = instclass.c_cardinality[name] try: _cmin = _card["min"] except KeyError: _cmin = None try: _cmax = _card["max"] except KeyError: _cmax = None except KeyError: _cmin = _cmax = _card = None if value: #_has_val = True if isinstance(value, list): _list = True vlen = len(value) else: _list = False vlen = 1 if _card: if _cmin is not None and _cmin > vlen: raise NotValid( "Class '%s' instance cardinality error: %s" % ( class_name, "less then min (%s<%s)" % (vlen, _cmin))) if _cmax is not None and vlen > _cmax: raise NotValid( "Class '%s' instance cardinality error: %s" % ( class_name, "more then max (%s>%s)" % (vlen, _cmax))) if _list: for val in value: # That it is the right class is handled elsewhere _valid_instance(instance, val) else: _valid_instance(instance, value) else: if _cmin: raise NotValid( "Class '%s' instance cardinality error: %s" % ( class_name, "too few values on %s" % name)) return True def valid_domain_name(dns_name): m = re.match( r"^[a-z0-9]+([-.]{ 1 }[a-z0-9]+).[a-z]{2,5}(:[0-9]{1,5})?(\/.)?$", dns_name, re.I) if not m: raise ValueError("Not a proper domain name")
// // ui.h // kk // // Created by zhanghailong on 2018/10/29. // Copyright © 2018年 kkmofang.cn. All rights reserved. // #ifndef ui_ui_h #define ui_ui_h #include <core/kk.h> #include <core/event.h> #include <core/dispatch.h> #include <core/jit.h> #include <core/timer.h> #include <core/sqlite.h> namespace kk { namespace ui { typedef kk::Float Float; struct Point { Float x,y; operator kk::Any() { return kk::Any(new TObject<kk::String, kk::Float>({{"x",x},{"y",y}})); } }; struct Size { Float width,height; Size(); Size(Float width,Float height); operator kk::Any() { return kk::Any(new TObject<kk::String, kk::Float>({{"width",width},{"height",height}})); } }; struct Rect { Point origin; Size size; operator kk::Any() { return kk::Any(new TObject<kk::String, kk::Float>({{"x",origin.x},{"y",origin.y},{"width",size.width},{"height",size.height}})); } }; struct Edge { Float top,left,bottom,right; Edge(); Edge(Float top,Float right,Float bottom,Float left); Edge(kk::CString v); Edge(Any &v):Edge((kk::CString) v){}; operator kk::Any() { return kk::Any(new TObject<kk::String, kk::Float>({{"top",top},{"left",left},{"bottom",bottom},{"right",right}})); } }; struct Color { public: Color(); Color(Float r,Float g,Float b,Float a); Color(kk::CString v); Color(kk::Uint v); Color(Any &v):Color((kk::CString) v){}; Float r,g,b,a; operator kk::Any() { char data[64]; if(a == 1.0) { snprintf(data, sizeof(data), "#%02x%02x%02x",(kk::Uint)(r * 255),(kk::Uint)(g * 255),(kk::Uint)(b * 255)); } else { snprintf(data, sizeof(data), "rgba(%g,%g,%g,%g)",(r * 255),(g * 255),(b * 255),a); } return kk::Any((CString)data); } Color & operator=(Any &v) { Color c((kk::CString)v); r = c.r; g = c.g; b = c.b; a = c.a; return * this; } kk::Int intValue() { return ((kk::Int) (a * 0x0ff) << 24) | ((kk::Int) (r * 0x0ff) << 16) | ((kk::Int) (g * 0x0ff) << 8) | ((kk::Int) (b * 0x0ff)); } }; class Palette { public: Palette(std::initializer_list<std::pair<const kk::String,Color>> &&v); virtual void set(kk::CString name,Color v); virtual Color get(kk::CString name); static Palette Default; protected: std::map<kk::String,Color> _values; }; enum FontStyle { FontStyleNormal,FontStyleItalic }; enum FontWeight { FontWeightNormal,FontWeightBold }; struct Font { public: Font():size(14),style(FontStyleNormal),weight(FontWeightNormal){} Font(Float size):size(size),style(FontStyleNormal),weight(FontWeightNormal){} Font(kk::CString family,Float size,FontStyle style,FontWeight weight):family(family),size(size),style(style),weight(weight){} Font(kk::CString v); Font(kk::Any &v):Font((kk::CString) v){}; kk::String family; Float size; FontStyle style; FontWeight weight; operator kk::Any(){ std::vector<kk::String> items; if(family != "") { items.push_back(family); } if(weight == FontWeightBold) { items.push_back("bold"); } if(style == FontStyleItalic) { items.push_back("italic"); } char fmt[32]; snprintf(fmt, sizeof(fmt), "%gpx",size); items.push_back(fmt); return kk::Any(CStringJoin(items, " ")); } }; enum TextAlign { TextAlignStart, TextAlignEnd, TextAlignCenter, TextAlignLeft, TextAlignRight }; TextAlign TextAlignFromString(kk::CString string); kk::CString StringFromTextAlign(TextAlign v); enum TextBaseline { TextBaselineAlphabetic, TextBaselineTop, TextBaselineHanging, TextBaselineMiddle, TextBaselineIdeographic, TextBaselineBottom, }; TextBaseline TextBaselineFromString(kk::CString string); kk::CString StringFromTextBaseline(TextBaseline v); struct Transform { Float a, b; Float c, d; Float tx, ty; }; extern Transform TransformIdentity; Transform TransformTranslate(Transform t, Float tx, Float ty); Transform TransformScale(Transform t, Float sx, Float sy); Transform TransformRotate(Transform t, Float angle); Transform TransformFromString(kk::CString v); enum ImageState { ImageStateNone,ImageStateLoading,ImageStateError,ImageStateLoaded }; class Image : public kk::EventEmitter { public: virtual ImageState state() = 0; virtual kk::Uint width() = 0; virtual kk::Uint height() = 0; virtual kk::CString src() = 0; virtual void copyPixels(void * data) = 0; virtual Boolean isCopyPixels() = 0; Ker_CLASS(Image,EventEmitter,"Image") }; class Context; class Worker : public Object { public: Worker(Context * main,kk::CString path); virtual ~Worker(); virtual void postMessage(Any data); virtual void terminate(); virtual Context * context(); Ker_CLASS(Worker,Object,"Worker") static void Openlib(); protected: virtual void onBackgroundMessage(Any & data); virtual void onMessage(Any & data); kk::Weak<Context> _main; kk::DispatchQueue * _queue; Context * _context; }; class Canvas; class Context : public EventEmitter, public Container { public: Context(kk::CString basePath,kk::DispatchQueue * queue); virtual ~Context(); virtual kk::CString basePath(); virtual kk::DispatchQueue * queue(); virtual duk_context * jsContext(); virtual kk::String absolutePath(kk::CString path); virtual kk::String getTextContent(kk::CString path); virtual void set(kk::Object * object); virtual kk::Object * get(kk::Object * object); virtual void remove(kk::Object * object); virtual void exec(kk::CString path,TObject<String, Any> * librarys); virtual void exec(kk::CString path,JSObject * librarys); virtual kk::Strong<Worker> createWorker(kk::CString path); virtual kk::Strong<Canvas> createCanvas(); virtual kk::Strong<Image> createImage(kk::CString src); virtual kk::Strong<Sqlite> createSqlite(kk::CString path); virtual kk::Strong<Context> parent(); virtual void setParent(Context * v); static void Openlib(); Ker_CLASS(Context, EventEmitter, "UIContext"); protected: kk::Weak<Context> _parent; kk::String _basePath; kk::Strong<kk::DispatchQueue> _queue; duk_context * _jsContext; std::map<void *,kk::Strong<kk::Object>> _objects; }; kk::Strong<Image> ImageCreate(Context * context,kk::CString src); std::function<void(Context * ,Image *)> & getImageLoader(); void setImageLoader(std::function<void(Context * ,Image *)> && func); } } #endif /* ui_h */
from computer_players.base_computer_player import BaseComputerPlayer from dominion_game_engine.hand import has_card_type, select_by_name class TheBureaucrat(BaseComputerPlayer): def buy(self, supply, money, buys): return self.buy_card(money, 'Bureaucrat') def play_action_cards(self, hand): """ """ if select_by_name(hand, ['Bureaucrat']): self.game_client.play_action_card('Bureaucrat') def respond(self, action, *args): if action != 'Cellar': return discard_cards = [c.Name() for c in self.state.hand if c.Type in ('Victory', 'Curse')] return discard_cards
from fqfa.validator.validator import ( dna_bases_validator, dna_characters_validator, rna_bases_validator, amino_acids_validator, amino_acids_all_validator, ) from fqfa.validator.create import create_validator __all__ = [ "create_validator", "dna_bases_validator", "dna_characters_validator", "rna_bases_validator", "amino_acids_validator", "amino_acids_all_validator", ]
# -*- coding: utf-8 -*- """ Trac WebAdmin plugin for administration of custom fields. License: BSD (c) 2005-2012 ::: www.CodeResort.com - BV Network AS (simon-code@bvnetwork.no) (c) 2007-2009 ::: www.Optaros.com (.....) """ from pkg_resources import resource_filename from trac.config import Option from trac.core import * from trac.web.chrome import ITemplateProvider, add_script, add_warning from trac.admin.api import IAdminPanelProvider from customfieldadmin.api import CustomFields, _ class CustomFieldAdminPage(Component): implements(ITemplateProvider, IAdminPanelProvider) def __init__(self): # Init CustomFields so translations work from first request # FIXME: It actually only works from SECOND request - Trac bug?! CustomFields(self.env) # IAdminPanelProvider methods def get_admin_panels(self, req): if 'TICKET_ADMIN' in req.perm('admin', 'ticket/customfields'): yield ('ticket', _("Ticket System"), 'customfields', _("Custom Fields")) def render_admin_panel(self, req, cat, page, customfield): req.perm('admin', 'ticket/customfields').require('TICKET_ADMIN') add_script(req, 'customfieldadmin/js/customfieldadmin.js') def _customfield_from_req(self, req): cfield = {'name': req.args.get('name','').encode('utf-8'), 'label': req.args.get('label','').encode('utf-8'), 'type': req.args.get('type','').encode('utf-8'), 'value': req.args.get('value','').encode('utf-8'), 'options': [x.strip().encode('utf-8') for x in \ req.args.get('options','').split("\n")], 'cols': req.args.get('cols','').encode('utf-8'), 'rows': req.args.get('rows','').encode('utf-8'), 'order': req.args.get('order', '').encode('utf-8'), 'format': req.args.get('format', '').encode('utf-8')} return cfield cf_api = CustomFields(self.env) cf_admin = {} # Return values for template rendering # Detail view? if customfield: cfield = None for a_cfield in cf_api.get_custom_fields(): if a_cfield['name'] == customfield: cfield = a_cfield break if not cfield: raise TracError(_("Custom field %(name)s does not exist.", name=customfield)) if req.method == 'POST': if req.args.get('save'): cfield.update(_customfield_from_req(self, req)) cf_api.update_custom_field(cfield) req.redirect(req.href.admin(cat, page)) elif req.args.get('cancel'): req.redirect(req.href.admin(cat, page)) if cfield.has_key('options'): optional_line = '' if cfield.get('optional', False): optional_line = "\n\n" cfield['options'] = optional_line + "\n".join(cfield['options']) cf_admin['cfield'] = cfield cf_admin['cf_display'] = 'detail' else: if req.method == 'POST': # Add Custom Field if req.args.get('add') and req.args.get('name'): cfield = _customfield_from_req(self, req) cf_api.update_custom_field(cfield, create=True) req.redirect(req.href.admin(cat, page)) # Remove Custom Field elif req.args.get('remove') and req.args.get('sel'): sel = req.args.get('sel') sel = isinstance(sel, list) and sel or [sel] if not sel: raise TracError(_("No custom field selected")) for name in sel: cfield = {'name': name} cf_api.delete_custom_field(cfield) req.redirect(req.href.admin(cat, page)) elif req.args.get('apply'): # Change order order = dict([(key[6:], req.args.get(key)) for key in req.args.keys() if key.startswith('order_')]) cfields = cf_api.get_custom_fields() for current_cfield in cfields: new_order = order.get(current_cfield['name'], 0) if new_order: current_cfield['order'] = new_order cf_api.update_custom_field(current_cfield) req.redirect(req.href.admin(cat, page)) cfields = [] orders_in_use = [] for item in cf_api.get_custom_fields(): item['href'] = req.href.admin(cat, page, item['name']) item['registry'] = ('ticket-custom', item['name']) in Option.registry cfields.append(item) orders_in_use.append(int(item.get('order'))) cf_admin['cfields'] = cfields cf_admin['cf_display'] = 'list' if sorted(orders_in_use) != range(1, len(cfields)+1): add_warning(req, _("Custom Fields are not correctly sorted. " \ "This may affect appearance when viewing tickets.")) return ('customfieldadmin.html', cf_admin) # ITemplateProvider methods def get_templates_dirs(self): return [resource_filename(__name__, 'templates')] def get_htdocs_dirs(self): return [('customfieldadmin', resource_filename(__name__, 'htdocs'))]
# -*- coding: utf-8 -*- # Scrapy settings for news_crawler project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # http://doc.scrapy.org/en/latest/topics/settings.html # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html BOT_NAME = 'news_crawler' SPIDER_MODULES = ['news_crawler.spiders'] NEWSPIDER_MODULE = 'news_crawler.spiders' LOG_LEVEL = 'WARNING' DOWNLOAD_DELAY = 1 # Crawl responsibly by identifying yourself (and your website) on the user-agent # USER_AGENT = 'news_crawler (+http://www.yourdomain.com)' # Configure maximum concurrent requests performed by Scrapy (default: 16) # CONCURRENT_REQUESTS=32 # Configure a delay for requests for the same website (default: 0) # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs # DOWNLOAD_DELAY=3 # The download delay setting will honor only one of: # CONCURRENT_REQUESTS_PER_DOMAIN=16 # CONCURRENT_REQUESTS_PER_IP=16 # Disable cookies (enabled by default) # COOKIES_ENABLED=False # Disable Telnet Console (enabled by default) # TELNETCONSOLE_ENABLED=False # Override the default request headers: # DEFAULT_REQUEST_HEADERS = { # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # 'Accept-Language': 'en', # } # Enable or disable spider middlewares # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html # SPIDER_MIDDLEWARES = { # 'news_crawler.middlewares.MyCustomSpiderMiddleware': 543, # } # Enable or disable downloader middlewares # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html # DOWNLOADER_MIDDLEWARES = { # 'news_crawler.middlewares.MyCustomDownloaderMiddleware': 543, # } # Enable or disable extensions # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html # EXTENSIONS = { # 'scrapy.telnet.TelnetConsole': None, # } # Configure item pipelines # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html ITEM_PIPELINES = { 'news_crawler.pipelines.MongoPipeline': 300, } MONGO_URI = "mongo" MONGODB_PORT = 27017 MONGO_DATABASE = "articles_news" MONGODB_COLLECTION = "articles" # USER_AGENT = 'scrapy-redis (+https://github.com/rolando/scrapy-redis)' DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter" SCHEDULER = "scrapy_redis.scheduler.Scheduler" SCHEDULER_PERSIST = True REDIS_HOST = 'redis' REDIS_PORT = 6379 # Enable and configure the AutoThrottle extension (disabled by default) # See http://doc.scrapy.org/en/latest/topics/autothrottle.html # NOTE: AutoThrottle will honour the standard settings for concurrency and delay AUTOTHROTTLE_ENABLED = True # The initial download delay AUTOTHROTTLE_START_DELAY = 5 # The maximum download delay to be set in case of high latencies AUTOTHROTTLE_MAX_DELAY = 60 # Enable showing throttling stats for every response received: AUTOTHROTTLE_DEBUG = True # Enable and configure HTTP caching (disabled by default) # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings HTTPCACHE_ENABLED = True HTTPCACHE_EXPIRATION_SECS = 0 # HTTPCACHE_DIR='httpcache' # HTTPCACHE_IGNORE_HTTP_CODES=[] HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.DbmCacheStorage'
import {Map} from 'immutable'; import {EditorLocation, Notification, UiState} from '../records'; const defaultState = new UiState(); function addNotification(state, type, severity, metadata = {}) { return state.setIn( ['notifications', type], new Notification({type, severity, metadata: new Map(metadata)}), ); } function dismissNotification(state, type) { return state.update( 'notifications', notifications => notifications.delete(type), ); } /* eslint-disable complexity */ export default function ui(stateIn, action) { let state = stateIn; if (state === undefined) { state = defaultState; } switch (action.type) { case 'CHANGE_CURRENT_PROJECT': return state. set('isEditingInstructions', false). update( 'openTopBarMenu', menu => menu === 'projectPicker' ? null : menu, ); case 'PROJECT_CREATED': return state.set('isEditingInstructions', false); case 'UPDATE_PROJECT_SOURCE': return state. set('isTyping', true). deleteIn(['notifications', 'snapshot-created']); case 'USER_DONE_TYPING': return state.set('isTyping', false); case 'FOCUS_LINE': return state.set( 'requestedFocusedLine', new EditorLocation({ component: action.payload.component, line: action.payload.line, column: action.payload.column, }), ); case 'CLEAR_CONSOLE_ENTRIES': return state.set( 'requestedFocusedLine', new EditorLocation({ component: 'console', line: 0, column: 0, }), ); case 'EDITOR_FOCUSED_REQUESTED_LINE': return state.set('requestedFocusedLine', null); case 'START_DRAG_COLUMN_DIVIDER': return state.set('isDraggingColumnDivider', true); case 'STOP_DRAG_COLUMN_DIVIDER': return state.set('isDraggingColumnDivider', false); case 'GIST_NOT_FOUND': return addNotification( state, 'gist-import-not-found', 'error', {gistId: action.payload.gistId}, ); case 'GIST_IMPORT_ERROR': return addNotification( state, 'gist-import-error', 'error', {gistId: action.payload.gistId}, ); case 'NOTIFICATION_TRIGGERED': return addNotification( state, action.payload.type, action.payload.severity, action.payload.metadata, ); case 'USER_DISMISSED_NOTIFICATION': return dismissNotification(state, action.payload.type); case 'UPDATE_NOTIFICATION_METADATA': return state.updateIn( ['notifications', action.payload.type, 'metadata'], metadata => metadata.merge(action.payload.metadata), ); case 'USER_LOGGED_OUT': case 'LINK_GITHUB_IDENTITY': return state.update( 'openTopBarMenu', menu => menu === 'currentUser' ? null : menu, ); case 'SNAPSHOT_CREATED': return addNotification( state, 'snapshot-created', 'notice', {snapshotKey: action.payload}, ); case 'APPLICATION_LOADED': if (action.payload.isExperimental) { return state.set('isExperimental', true); } return state; case 'SNAPSHOT_EXPORT_ERROR': return addNotification(state, 'snapshot-export-error', 'error'); case 'SNAPSHOT_IMPORT_ERROR': return addNotification(state, 'snapshot-import-error', 'error'); case 'SNAPSHOT_NOT_FOUND': return addNotification(state, 'snapshot-not-found', 'error'); case 'TOGGLE_EDITOR_TEXT_SIZE': return state.update( 'isTextSizeLarge', isTextSizeLarge => !isTextSizeLarge, ); case 'TOGGLE_TOP_BAR_MENU': return state.update( 'openTopBarMenu', menu => menu === action.payload ? null : action.payload, ); case 'CLOSE_TOP_BAR_MENU': return state.update( 'openTopBarMenu', menu => menu === action.payload ? null : menu, ); case 'PROJECT_EXPORT_NOT_DISPLAYED': return addNotification( state, 'project-export-complete', 'notice', action.payload, ); case 'PROJECT_EXPORT_ERROR': if (action.payload.name === 'EmptyGistError') { return addNotification( state, 'empty-gist', 'error', ); } return addNotification( state, `${action.payload.exportType}-export-error`, 'error', action.payload, ); case 'PROJECT_COMPILATION_FAILED': return addNotification( state, 'project-compilation-failed', 'error', ); case 'PROJECT_COMPILED': return dismissNotification(state, 'project-compilation-failed'); case 'START_EDITING_INSTRUCTIONS': return state.set('isEditingInstructions', true); case 'CANCEL_EDITING_INSTRUCTIONS': case 'UPDATE_PROJECT_INSTRUCTIONS': return state.set('isEditingInstructions', false); case 'SHOW_SAVE_INDICATOR': return state.set('saveIndicatorShown', true); case 'HIDE_SAVE_INDICATOR': return state.set('saveIndicatorShown', false); case 'IDENTITY_LINKED': { return addNotification( state, 'identity-linked', 'notice', {provider: action.payload.credential.providerId}, ); } case 'LINK_IDENTITY_FAILED': return addNotification(state, 'link-identity-failed', 'error'); case 'GAPI_CLIENT_UNAVAILABLE': return addNotification(state, 'gapi-client-unavailable', 'error'); default: return state; } }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; /** * @summary Resource usage statistics for a Task. * */ class TaskStatistics { /** * Create a TaskStatistics. * @property {string} url The URL of the statistics. * @property {date} startTime The start time of the time range covered by the * statistics. * @property {date} lastUpdateTime The time at which the statistics were last * updated. All statistics are limited to the range between startTime and * lastUpdateTime. * @property {moment.duration} userCPUTime The total user mode CPU time * (summed across all cores and all Compute Nodes) consumed by the Task. * @property {moment.duration} kernelCPUTime The total kernel mode CPU time * (summed across all cores and all Compute Nodes) consumed by the Task. * @property {moment.duration} wallClockTime The total wall clock time of the * Task. The wall clock time is the elapsed time from when the Task started * running on a Compute Node to when it finished (or to the last time the * statistics were updated, if the Task had not finished by then). If the * Task was retried, this includes the wall clock time of all the Task * retries. * @property {number} readIOps The total number of disk read operations made * by the Task. * @property {number} writeIOps The total number of disk write operations * made by the Task. * @property {number} readIOGiB The total gibibytes read from disk by the * Task. * @property {number} writeIOGiB The total gibibytes written to disk by the * Task. * @property {moment.duration} waitTime The total wait time of the Task. The * wait time for a Task is defined as the elapsed time between the creation * of the Task and the start of Task execution. (If the Task is retried due * to failures, the wait time is the time to the most recent Task * execution.). */ constructor() { } /** * Defines the metadata of TaskStatistics * * @returns {object} metadata of TaskStatistics * */ mapper() { return { required: false, serializedName: 'TaskStatistics', type: { name: 'Composite', className: 'TaskStatistics', modelProperties: { url: { required: true, serializedName: 'url', type: { name: 'String' } }, startTime: { required: true, serializedName: 'startTime', type: { name: 'DateTime' } }, lastUpdateTime: { required: true, serializedName: 'lastUpdateTime', type: { name: 'DateTime' } }, userCPUTime: { required: true, serializedName: 'userCPUTime', type: { name: 'TimeSpan' } }, kernelCPUTime: { required: true, serializedName: 'kernelCPUTime', type: { name: 'TimeSpan' } }, wallClockTime: { required: true, serializedName: 'wallClockTime', type: { name: 'TimeSpan' } }, readIOps: { required: true, serializedName: 'readIOps', type: { name: 'Number' } }, writeIOps: { required: true, serializedName: 'writeIOps', type: { name: 'Number' } }, readIOGiB: { required: true, serializedName: 'readIOGiB', type: { name: 'Number' } }, writeIOGiB: { required: true, serializedName: 'writeIOGiB', type: { name: 'Number' } }, waitTime: { required: true, serializedName: 'waitTime', type: { name: 'TimeSpan' } } } } }; } } module.exports = TaskStatistics;
# Aula 19 - 04-12-2019 # Lista com for e metodos # Como comer um gigante.... é com um pedaço de cada vez. # Na hora de fazer este exercicio, atentar para # Com o arquivo de cadastro.txt onde possui os seguintes dados: codigo cliente, nome, idade, sexo, e-mail e telefone # 1 - Crie um metodo que gere e retorne uma lista com bibliotecas com os dados dos clientes # 2 - Com a lista do exercicio 1, separe os adultos dos menores de idade e salve em um arquivo .txt cada. # Esta função tambem retornar uma lista com a biblioteca dos maiores de idades. # 3 - Crie uma função que conte quantas mulheres e quantos homens tem na lista. Salve cada um em um arquivo diferente. # 4 - Faça uma função de consulta de cadastro. A função deve receber o valor do código do cliente e deve imprimir na # tela os dados do cliente com f-string usando a lista do exercicio 1 # 4.1 - A pesquisa deve aparecer uma frase para as seguintes condições: # Mulheres até 16 anos: "Ola {nome}! Você quer aproveitar nosso Tikito sabor Gloss? É uma delicia!"" # Mulheres acima de 16 a 18 anos: "Olá {nome}! Quer experimentar nosso refigerante sabor alegria! O seu # cruch vai adorar!" # Mulheres acima de 18: "Olá {nome}! Já experimentou nossa bebida a base de tequila? Baixo tero alcoolico # com o dobro de sabor!!!" # Homens até 16 anos: "Ola {nome}! Você quer aproveitar nosso Tikito sabor Meleka? É uma delicia!"" # Homens acima de 16 a 18 anos: "Olá {nome}! Quer experimentar nosso refigerante sabor Corriga de carros! # A sua amada vai adorar!" # Homens acima de 18: "Olá {nome}! Já experimentou nossa cerveja? alto teor alcoolico # com o dobro do amargor!!!" # Lembre-se: É importante que apareça a frase. Pois a mesma será encaminhada por e-mail pela equipe de marketing #1: def ler(): arquivo = open('C:\\Users\\900157\\Documents\\GitHub\\TrabalhosPython\\Aula19\\exercicios\\cadastro.txt','r') lista = [] for linha in arquivo: linha_limpa =linha.strip() linha_lista = linha_limpa.split(';') id_user = linha_lista[0] nome = linha_lista[1] idade = linha_lista[2] sexo = linha_lista[3] email = linha_lista[4] telefone = linha_lista[5] dicionario = {'ID':id_user,'Nome':nome,'Idade':idade,'Sexo':sexo,'Email':email,'Telefone':telefone} lista.append(dicionario) arquivo.close() return lista #2: lista = ler() maiores = open('C:\\Users\\900157\\Documents\\GitHub\\TrabalhosPython\\Aula19\\exercicios\\maiores.txt','a') menores = open('C:\\Users\\900157\\Documents\\GitHub\\TrabalhosPython\\Aula19\\exercicios\\menores.txt','a') for dicionario in lista: if int(dicionario['Idade']) >=18: maiores.write(f'{dicionario["ID"]};{dicionario["Nome"]};{dicionario["Idade"]};{dicionario["Sexo"]};{dicionario["Email"]};{dicionario["Telefone"]}\n') else: menores.write(f'{dicionario["ID"]};{dicionario["Nome"]};{dicionario["Idade"]};{dicionario["Sexo"]};{dicionario["Email"]};{dicionario["Telefone"]}\n') maiores.close() menores.close() #3: def contador(lista): lista = ler() lista_maiores = [] homens = open('C:\\Users\\900157\\Documents\\GitHub\\TrabalhosPython\\Aula19\\exercicios\\homens.txt','a') mulheres = open('C:\\Users\\900157\\Documents\\GitHub\\TrabalhosPython\\Aula19\\exercicios\\mulheres.txt','a') contarhomens = 0 contarmulheres = 0 for dicionario in lista: if dicionario['Sexo'] == "m": homens.write(f'{dicionario["ID"]};{dicionario["Nome"]};{dicionario["Idade"]};{dicionario["Sexo"]};{dicionario["Email"]};{dicionario["Telefone"]}\n') contarhomens += 1 else: mulheres.write(f'{dicionario["ID"]};{dicionario["Nome"]};{dicionario["Idade"]};{dicionario["Sexo"]};{dicionario["Email"]};{dicionario["Telefone"]}\n') contarmulheres += 1 homens.close() mulheres.close() contador = (f'Homens: {contarhomens} Mulheres: {contarmulheres}') return contador print(contador(lista))
// @ts-check const Cuboid = require("./cuboid"); describe("obj.getResultsAfterSubtraction", () => { it("works as expected", () => { const cuboid = new Cuboid(10, 50, 20, 60, 30, 70); expect(cuboid.getResultsAfterSubtraction(new Cuboid(34, 44, 35, 45, 36, 46))).toEqual([ { startX: 10, endX: 33, startY: 20, endY: 60, startZ: 30, endZ: 70 }, { startX: 34, endX: 44, startY: 20, endY: 34, startZ: 30, endZ: 70 }, { startX: 34, endX: 44, startY: 35, endY: 45, startZ: 30, endZ: 35 }, { startX: 34, endX: 44, startY: 35, endY: 45, startZ: 47, endZ: 70 }, { startX: 34, endX: 44, startY: 46, endY: 60, startZ: 30, endZ: 70 }, { startX: 45, endX: 50, startY: 20, endY: 60, startZ: 30, endZ: 70 }, ]); expect(cuboid.getResultsAfterSubtraction(new Cuboid(0, 5, 10, 15, 20, 25))).toEqual(null); expect(cuboid.getResultsAfterSubtraction(cuboid)).toEqual([]); }); }); describe("obj.listPoints", () => { it("works as expected", () => { const cuboid = new Cuboid(10, 12, 11, 13, 12, 14); expect(cuboid.listPoints()).toEqual([ "10,11,12", "10,11,13", "10,11,14", "10,12,12", "10,12,13", "10,12,14", "10,13,12", "10,13,13", "10,13,14", "11,11,12", "11,11,13", "11,11,14", "11,12,12", "11,12,13", "11,12,14", "11,13,12", "11,13,13", "11,13,14", "12,11,12", "12,11,13", "12,11,14", "12,12,12", "12,12,13", "12,12,14", "12,13,12", "12,13,13", "12,13,14", ]); }); }); describe("obj.getSize", () => { it("works as expected", () => { const cuboid = new Cuboid(10, 12, 11, 13, 12, 14); expect(cuboid.getSize()).toEqual(27); }); });
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** @typedef {!{ rect: !Protocol.DOM.Rect, snapshot: !SDK.PaintProfilerSnapshot }} */ SDK.SnapshotWithRect; /** * @interface */ SDK.Layer = function() {}; SDK.Layer.prototype = { /** * @return {string} */ id() {}, /** * @return {?string} */ parentId() {}, /** * @return {?SDK.Layer} */ parent() {}, /** * @return {boolean} */ isRoot() {}, /** * @return {!Array.<!SDK.Layer>} */ children() {}, /** * @param {!SDK.Layer} child */ addChild(child) {}, /** * @return {?SDK.DOMNode} */ node() {}, /** * @return {?SDK.DOMNode} */ nodeForSelfOrAncestor() {}, /** * @return {number} */ offsetX() {}, /** * @return {number} */ offsetY() {}, /** * @return {number} */ width() {}, /** * @return {number} */ height() {}, /** * @return {?Array.<number>} */ transform() {}, /** * @return {!Array.<number>} */ quad() {}, /** * @return {!Array.<number>} */ anchorPoint() {}, /** * @return {boolean} */ invisible() {}, /** * @return {number} */ paintCount() {}, /** * @return {?Protocol.DOM.Rect} */ lastPaintRect() {}, /** * @return {!Array.<!Protocol.LayerTree.ScrollRect>} */ scrollRects() {}, /** * @return {number} */ gpuMemoryUsage() {}, /** * @param {function(!Array.<string>)} callback */ requestCompositingReasons(callback) {}, /** * @return {boolean} */ drawsContent() {}, /** * @return {!Array<!Promise<?SDK.SnapshotWithRect>>} */ snapshots() {} }; SDK.Layer.ScrollRectType = { NonFastScrollable: 'NonFastScrollable', TouchEventHandler: 'TouchEventHandler', WheelEventHandler: 'WheelEventHandler', RepaintsOnScroll: 'RepaintsOnScroll' }; /** * @unrestricted */ SDK.LayerTreeBase = class { /** * @param {?SDK.Target} target */ constructor(target) { this._target = target; this._domModel = target ? target.model(SDK.DOMModel) : null; this._layersById = {}; this._root = null; this._contentRoot = null; /** @type {!Map<number, ?SDK.DOMNode>} */ this._backendNodeIdToNode = new Map(); } /** * @return {?SDK.Target} */ target() { return this._target; } /** * @return {?SDK.Layer} */ root() { return this._root; } /** * @param {?SDK.Layer} root * @protected */ setRoot(root) { this._root = root; } /** * @return {?SDK.Layer} */ contentRoot() { return this._contentRoot; } /** * @param {?SDK.Layer} contentRoot * @protected */ setContentRoot(contentRoot) { this._contentRoot = contentRoot; } /** * @param {function(!SDK.Layer)} callback * @param {?SDK.Layer=} root * @return {boolean} */ forEachLayer(callback, root) { if (!root) { root = this.root(); if (!root) return false; } return callback(root) || root.children().some(this.forEachLayer.bind(this, callback)); } /** * @param {string} id * @return {?SDK.Layer} */ layerById(id) { return this._layersById[id] || null; } /** * @param {!Set<number>} requestedNodeIds * @param {function()} callback */ resolveBackendNodeIds(requestedNodeIds, callback) { if (!requestedNodeIds.size || !this._domModel) { callback(); return; } if (this._domModel) this._domModel.pushNodesByBackendIdsToFrontend(requestedNodeIds, populateBackendNodeMap.bind(this)); /** * @this {SDK.LayerTreeBase} * @param {?Map<number, ?SDK.DOMNode>} nodesMap */ function populateBackendNodeMap(nodesMap) { if (nodesMap) { for (var nodeId of nodesMap.keysArray()) this._backendNodeIdToNode.set(nodeId, nodesMap.get(nodeId) || null); } callback(); } } /** * @return {!Map<number, ?SDK.DOMNode>} */ backendNodeIdToNode() { return this._backendNodeIdToNode; } /** * @param {!{width: number, height: number}} viewportSize */ setViewportSize(viewportSize) { this._viewportSize = viewportSize; } /** * @return {!{width: number, height: number}|undefined} */ viewportSize() { return this._viewportSize; } /** * @param {number} id * @return {?SDK.DOMNode} */ _nodeForId(id) { return this._domModel ? this._domModel.nodeForId(id) : null; } };
/** @jsx jsx */ import { jsx } from "theme-ui" import { Link } from "gatsby" import Layout from "../components/layout" import SEO from "../components/seo" import { Flex } from "theme-ui" import Form from "react-bootstrap/Form" import { RecipeButton } from "../components/recipeButton" const LoginPage = () => { return ( <Layout> <SEO title="Login" /> <div sx={{ width: ["90%", "70%"], maxWidth: "container", margin: "0 auto", mt: "4", }} > <div sx={{ display: "flex", flexWrap: "nowrap", justifyContent: "space-evenly", alignItems: "baseline", width: "80%", mt: 5, mx: "auto", }} > <h3 sx={{ width: "50%", textAlign: "center", borderRight: "1px solid orange", }} > Login </h3> <h3 sx={{ width: "50%", textAlign: "center", borderLeft: "1px solid orange", "&:hover": { color: "#FFC551", cursor: "pointer", }, }} > Register </h3> </div> <Form sx={{ width: ["90%", "90%", "90%", "60%"], mt: "4", mx: "auto" }}> <Form.Group controlId="formBasicEmail"> <Form.Label>Email address</Form.Label> <Form.Control required type="email" placeholder="Enter email" /> </Form.Group> <Form.Group controlId="formBasicPassword"> <Form.Label>Password</Form.Label> <Form.Control required type="password" placeholder="Password" /> </Form.Group> <Form.Text className="text-muted" sx={{ my: "2" }}> <span sx={{ my: "2", "&:hover": { color: "#FFC551", cursor: "pointer", }, }} > Forgot your password? </span> </Form.Text> <RecipeButton sx={{ mx: "40%" }}>Submit</RecipeButton> </Form> </div> </Layout> ) } export default LoginPage
import pytest import cogctl.cli.bundle.versions as bundle # TODO: Eww, this import pytestmark = pytest.mark.usefixtures("mocks") def test_versions_with_no_args_lists_everything(cogctl): result = cogctl(bundle.versions, []) assert result.exit_code == 0 assert result.output == """\ BUNDLE VERSION STATUS disabled_bundle 0.0.4 Disabled disabled_bundle 0.0.5 Disabled disabled_bundle 0.0.6 Disabled enabled_bundle 0.0.1 Disabled enabled_bundle 0.0.2 Enabled enabled_bundle 0.0.3 Disabled has_incompatible_versions 0.0.7 Incompatible has_incompatible_versions 0.0.8 Incompatible has_incompatible_versions 0.0.9 Enabled """ def test_versions_list_all_incompatible_versions(cogctl): result = cogctl(bundle.versions, ["--incompatible"]) assert result.exit_code == 0 assert result.output == """\ BUNDLE VERSION STATUS has_incompatible_versions 0.0.7 Incompatible has_incompatible_versions 0.0.8 Incompatible """ def test_versions_for_specific_bundle(cogctl): result = cogctl(bundle.versions, ["enabled_bundle"]) assert result.exit_code == 0 assert result.output == """\ BUNDLE VERSION STATUS enabled_bundle 0.0.1 Disabled enabled_bundle 0.0.2 Enabled enabled_bundle 0.0.3 Disabled """ def test_versions_list_incompatible_for_bundle_with_no_incompatible_versions(cogctl): # noqa: E501 result = cogctl(bundle.versions, ["enabled_bundle", "--incompatible"]) assert result.exit_code == 0 assert result.output == """\ BUNDLE VERSION STATUS """ def test_versions_list_incompatible_for_bundle_with_incompatible_versions(cogctl): # noqa: E501 result = cogctl(bundle.versions, ["has_incompatible_versions", "--incompatible"]) assert result.exit_code == 0 assert result.output == """\ BUNDLE VERSION STATUS has_incompatible_versions 0.0.7 Incompatible has_incompatible_versions 0.0.8 Incompatible """ def test_versions_for_nonexistent_bundle(cogctl): result = cogctl(bundle.versions, ["not_a_bundle"]) assert result.exit_code == 2 assert result.output == """\ Usage: versions [OPTIONS] NAME Error: Invalid value for "name": Bundle 'not_a_bundle' not found """
#!/usr/bin/env python3 # Copyright (c) 2017-2018 The BitRub Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the -uacomment option.""" import re from test_framework.test_framework import BitRubTestFramework from test_framework.test_node import ErrorMatch from test_framework.util import assert_equal class UacommentTest(BitRubTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def run_test(self): self.log.info("test multiple -uacomment") test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1] assert_equal(test_uacomment, "(testnode0)") self.restart_node(0, ["-uacomment=foo"]) foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1] assert_equal(foo_uacomment, "(testnode0; foo)") self.log.info("test -uacomment max length") self.stop_node(0) expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \(256\). Reduce the number or size of uacomments." self.nodes[0].assert_start_raises_init_error(["-uacomment=" + 'a' * 256], expected, match=ErrorMatch.FULL_REGEX) self.log.info("test -uacomment unsafe characters") for unsafe_char in ['/', ':', '(', ')', '₿', '🏃']: expected = r"Error: User Agent comment \(" + re.escape(unsafe_char) + r"\) contains unsafe characters." self.nodes[0].assert_start_raises_init_error(["-uacomment=" + unsafe_char], expected, match=ErrorMatch.FULL_REGEX) if __name__ == '__main__': UacommentTest().main()
import { getSetting, updateSetting } from '@/api/login' import storage from '@/utils/storage' const state = { info: storage.get('app_info') } const mutations = { SET_INFO: (state, data) => { state.info = data storage.set('app_info', data) } } const actions = { settingDetail({ commit }) { return new Promise((resolve, reject) => { getSetting().then(response => { const { data } = response commit('SET_INFO', data) resolve(data) }).catch(error => { reject(error) }) }) }, updateSetting({ commit }, info) { return new Promise((resolve, reject) => { updateSetting(info).then(response => { const { data } = response commit('SET_INFO', data) resolve(response) }).catch(error => { reject(error) }) }) } } export default { namespaced: true, state, mutations, actions }
var MyCalendar = function() { this.arr = [] }; /** * @param {number} start * @param {number} end * @return {boolean} */ MyCalendar.prototype.book = function(start, end) { let arr = this.arr if (!arr.length) { return Boolean(arr.push([start, end])) } if (arr[0][0] >= end) { this.arr.unshift([start, end]) return true } for (var i = 0; i < arr.length; i++) { if (arr[i][0] >= end) { if (arr[i - 1] && arr[i - 1][1] <= start) { this.arr = [...arr.slice(0, i), [start, end], ...arr.slice(i, arr.length)] return true } } } if (arr[arr.length - 1][1] <= start) { this.arr.push([start, end]) return true } return false }; /** * Your MyCalendar object will be instantiated and called as such: * var obj = new MyCalendar() * var param_1 = obj.book(start,end) */ class MyCalendar { constructor() { this.booked = []; } book(start, end) { for (let i = 0; i < this.booked.length; i++) { const [bookedStart, bookedEnd] = this.booked[i]; if (start < bookedEnd && end > bookedStart) return false; } this.booked.push([start, end]); return true; } };
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports import logging from copy import copy import json from mock import patch # External imports # Bokeh imports from bokeh.io.doc import curdoc from bokeh.models import ColumnDataSource from bokeh.document.events import (ColumnsPatchedEvent, ColumnsStreamedEvent, ModelChangedEvent, RootAddedEvent, RootRemovedEvent, SessionCallbackAdded, SessionCallbackRemoved, TitleChangedEvent) from bokeh.protocol.messages.patch_doc import process_document_events from bokeh.util.logconfig import basicConfig # Module under test import bokeh.document.document as document #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- from _util_document import AnotherModelInTestDocument, SomeModelInTestDocument, ModelThatOverridesName, ModelWithSpecInTestDocument #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class TestDocumentHold(object): @pytest.mark.parametrize('policy', document.HoldPolicy) @pytest.mark.unit def test_hold(self, policy): d = document.Document() assert d._hold == None assert d._held_events == [] d.hold(policy) assert d._hold == policy def test_hold_bad_policy(self): d = document.Document() with pytest.raises(ValueError): d.hold("junk") @pytest.mark.parametrize('first,second', [('combine', 'collect'), ('collect', 'combine')]) @pytest.mark.unit def test_rehold(self, first, second, caplog): d = document.Document() with caplog.at_level(logging.WARN): d.hold(first) assert caplog.text == "" assert len(caplog.records) == 0 d.hold(first) assert caplog.text == "" assert len(caplog.records) == 0 d.hold(second) assert caplog.text.strip().endswith("hold already active with '%s', ignoring '%s'" % (first, second)) assert len(caplog.records) == 1 d.unhold() d.hold(second) assert len(caplog.records) == 1 @pytest.mark.parametrize('policy', document.HoldPolicy) @pytest.mark.unit def test_unhold(self, policy): d = document.Document() assert d._hold == None assert d._held_events == [] d.hold(policy) assert d._hold == policy d.unhold() assert d._hold == None @patch("bokeh.document.document.Document._trigger_on_change") def test_unhold_triggers_events(self, mock_trigger): d = document.Document() d.hold('collect') d._held_events = [1,2,3] d.unhold() assert mock_trigger.call_count == 3 assert mock_trigger.call_args[0] == (3,) assert mock_trigger.call_args[1] == {} extra = [] class Test_Document_delete_modules(object): def test_basic(self): d = document.Document() assert not d.roots class FakeMod(object): __name__ = 'junkjunkjunk' mod = FakeMod() import sys assert 'junkjunkjunk' not in sys.modules sys.modules['junkjunkjunk'] = mod d._modules.append(mod) assert 'junkjunkjunk' in sys.modules d.delete_modules() assert 'junkjunkjunk' not in sys.modules assert d._modules is None def test_extra_referrer_error(self, caplog): d = document.Document() assert not d.roots class FakeMod(object): __name__ = 'junkjunkjunk' mod = FakeMod() import sys assert 'junkjunkjunk' not in sys.modules sys.modules['junkjunkjunk'] = mod d._modules.append(mod) assert 'junkjunkjunk' in sys.modules # add an extra referrer for delete_modules to complain about extra.append(mod) import gc # get_referrers behavior changed in Python 3.7, see https://github.com/bokeh/bokeh/issues/8221 assert len(gc.get_referrers(mod)) in (3,4) with caplog.at_level(logging.ERROR): d.delete_modules() assert "Module %r has extra unexpected referrers! This could indicate a serious memory leak. Extra referrers:" % mod in caplog.text assert len(caplog.records) == 1 assert 'junkjunkjunk' not in sys.modules assert d._modules is None class TestDocument(object): def test_empty(self): d = document.Document() assert not d.roots def test_default_template_vars(self): d = document.Document() assert not d.roots assert d.template_variables == {} def test_add_roots(self): d = document.Document() assert not d.roots d.add_root(AnotherModelInTestDocument()) assert len(d.roots) == 1 assert next(iter(d.roots)).document == d def test_roots_preserves_insertion_order(self): d = document.Document() assert not d.roots roots = [ AnotherModelInTestDocument(), AnotherModelInTestDocument(), AnotherModelInTestDocument(), ] for r in roots: d.add_root(r) assert len(d.roots) == 3 assert type(d.roots) is list roots_iter = iter(d.roots) assert next(roots_iter) is roots[0] assert next(roots_iter) is roots[1] assert next(roots_iter) is roots[2] def test_set_title(self): d = document.Document() assert d.title == document.DEFAULT_TITLE d.title = "Foo" assert d.title == "Foo" def test_all_models(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 m = SomeModelInTestDocument() m2 = AnotherModelInTestDocument() m.child = m2 d.add_root(m) assert len(d.roots) == 1 assert len(d._all_models) == 2 m.child = None assert len(d._all_models) == 1 m.child = m2 assert len(d._all_models) == 2 d.remove_root(m) assert len(d._all_models) == 0 def test_get_model_by_id(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 m = SomeModelInTestDocument() m2 = AnotherModelInTestDocument() m.child = m2 d.add_root(m) assert len(d.roots) == 1 assert len(d._all_models) == 2 assert d.get_model_by_id(m.id) == m assert d.get_model_by_id(m2.id) == m2 assert d.get_model_by_id("not a valid ID") is None def test_get_model_by_name(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 m = SomeModelInTestDocument(name="foo") m2 = AnotherModelInTestDocument(name="bar") m.child = m2 d.add_root(m) assert len(d.roots) == 1 assert len(d._all_models) == 2 assert len(d._all_models_by_name._dict) == 2 assert d.get_model_by_name(m.name) == m assert d.get_model_by_name(m2.name) == m2 assert d.get_model_by_name("not a valid name") is None def test_get_model_by_changed_name(self): d = document.Document() m = SomeModelInTestDocument(name="foo") d.add_root(m) assert d.get_model_by_name("foo") == m m.name = "bar" assert d.get_model_by_name("foo") == None assert d.get_model_by_name("bar") == m def test_get_model_by_changed_from_none_name(self): d = document.Document() m = SomeModelInTestDocument(name=None) d.add_root(m) assert d.get_model_by_name("bar") == None m.name = "bar" assert d.get_model_by_name("bar") == m def test_get_model_by_changed_to_none_name(self): d = document.Document() m = SomeModelInTestDocument(name="bar") d.add_root(m) assert d.get_model_by_name("bar") == m m.name = None assert d.get_model_by_name("bar") == None def test_can_get_name_overriding_model_by_name(self): d = document.Document() m = ModelThatOverridesName(name="foo") d.add_root(m) assert d.get_model_by_name("foo") == m m.name = "bar" assert d.get_model_by_name("bar") == m def test_cannot_get_model_with_duplicate_name(self): d = document.Document() m = SomeModelInTestDocument(name="foo") m2 = SomeModelInTestDocument(name="foo") d.add_root(m) d.add_root(m2) got_error = False try: d.get_model_by_name("foo") except ValueError as e: got_error = True assert 'Found more than one' in repr(e) assert got_error d.remove_root(m) assert d.get_model_by_name("foo") == m2 def test_select(self): # we aren't trying to replace test_query here, only test # our wrappers around it, so no need to try every kind of # query d = document.Document() root1 = SomeModelInTestDocument(foo=42, name='a') child1 = SomeModelInTestDocument(foo=43, name='b') root2 = SomeModelInTestDocument(foo=44, name='c') root3 = SomeModelInTestDocument(foo=44, name='d') child3 = SomeModelInTestDocument(foo=45, name='c') root1.child = child1 root3.child = child3 d.add_root(root1) d.add_root(root2) d.add_root(root3) # select() assert set([root1]) == set(d.select(dict(foo=42))) assert set([root1]) == set(d.select(dict(name='a'))) assert set([root2, child3]) == set(d.select(dict(name='c'))) assert set() == set(d.select(dict(name='nope'))) # select() on object assert set() == set(root3.select(dict(name='a'))) assert set([child3]) == set(root3.select(dict(name='c'))) # select_one() assert root3 == d.select_one(dict(name='d')) assert None == d.select_one(dict(name='nope')) got_error = False try: d.select_one(dict(name='c')) except ValueError as e: got_error = True assert 'Found more than one' in repr(e) assert got_error # select_one() on object assert None == root3.select_one(dict(name='a')) assert child3 == root3.select_one(dict(name='c')) # set_select() d.set_select(dict(foo=44), dict(name='c')) assert set([root2, child3, root3]) == set(d.select(dict(name='c'))) # set_select() on object root3.set_select(dict(name='c'), dict(foo=57)) assert set([child3, root3]) == set(d.select(dict(foo=57))) assert set([child3, root3]) == set(root3.select(dict(foo=57))) def test_is_single_string_selector(self): d = document.Document() # this is an implementation detail but just ensuring it works assert d._is_single_string_selector(dict(foo='c'), 'foo') assert d._is_single_string_selector(dict(foo=u'c'), 'foo') assert not d._is_single_string_selector(dict(foo='c', bar='d'), 'foo') assert not d._is_single_string_selector(dict(foo=42), 'foo') def test_all_models_with_multiple_references(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument() root2 = SomeModelInTestDocument() child1 = AnotherModelInTestDocument() root1.child = child1 root2.child = child1 d.add_root(root1) d.add_root(root2) assert len(d.roots) == 2 assert len(d._all_models) == 3 root1.child = None assert len(d._all_models) == 3 root2.child = None assert len(d._all_models) == 2 root1.child = child1 assert len(d._all_models) == 3 root2.child = child1 assert len(d._all_models) == 3 d.remove_root(root1) assert len(d._all_models) == 2 d.remove_root(root2) assert len(d._all_models) == 0 def test_all_models_with_cycles(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument() root2 = SomeModelInTestDocument() child1 = SomeModelInTestDocument() root1.child = child1 root2.child = child1 child1.child = root1 print("adding root1") d.add_root(root1) print("adding root2") d.add_root(root2) assert len(d.roots) == 2 assert len(d._all_models) == 3 print("clearing child of root1") root1.child = None assert len(d._all_models) == 3 print("clearing child of root2") root2.child = None assert len(d._all_models) == 2 print("putting child1 back in root1") root1.child = child1 assert len(d._all_models) == 3 print("Removing root1") d.remove_root(root1) assert len(d._all_models) == 1 print("Removing root2") d.remove_root(root2) assert len(d._all_models) == 0 def test_change_notification(self): d = document.Document() assert not d.roots m = AnotherModelInTestDocument() d.add_root(m) assert len(d.roots) == 1 assert m.bar == 1 assert curdoc() is not d events = [] curdoc_from_listener = [] def listener(event): curdoc_from_listener.append(curdoc()) events.append(event) d.on_change(listener) m.bar = 42 assert events event = events[0] assert isinstance(event, ModelChangedEvent) assert event.document == d assert event.model == m assert event.attr == 'bar' assert event.old == 1 assert event.new == 42 assert len(curdoc_from_listener) == 1 assert curdoc_from_listener[0] is d def test_stream_notification(self): d = document.Document() assert not d.roots m = ColumnDataSource(data=dict(a=[10], b=[20])) d.add_root(m) assert len(d.roots) == 1 assert curdoc() is not d events = [] curdoc_from_listener = [] def listener(event): curdoc_from_listener.append(curdoc()) events.append(event) d.on_change(listener) m.stream(dict(a=[11, 12], b=[21, 22]), 200) assert events event = events[0] assert isinstance(event, ModelChangedEvent) assert isinstance(event.hint, ColumnsStreamedEvent) assert event.document == d assert event.model == m assert event.hint.column_source == m assert event.hint.data == dict(a=[11, 12], b=[21, 22]) assert event.hint.rollover == 200 assert event.attr == 'data' # old == new because stream events update in-place assert event.old == dict(a=[10, 11, 12], b=[20, 21, 22]) assert event.new == dict(a=[10, 11, 12], b=[20, 21, 22]) assert len(curdoc_from_listener) == 1 assert curdoc_from_listener[0] is d def test_patch_notification(self): d = document.Document() assert not d.roots m = ColumnDataSource(data=dict(a=[10,11], b=[20,21])) d.add_root(m) assert len(d.roots) == 1 assert curdoc() is not d events = [] curdoc_from_listener = [] def listener(event): curdoc_from_listener.append(curdoc()) events.append(event) d.on_change(listener) m.patch(dict(a=[(0, 1)], b=[(0,0), (1,1)])) assert events event = events[0] assert isinstance(event, ModelChangedEvent) assert isinstance(event.hint, ColumnsPatchedEvent) assert event.document == d assert event.model == m assert event.hint.column_source == m assert event.hint.patches == dict(a=[(0, 1)], b=[(0,0), (1,1)]) assert event.attr == 'data' # old == new because stream events update in-place assert event.old == dict(a=[1, 11], b=[0, 1]) assert event.new == dict(a=[1, 11], b=[0, 1]) assert len(curdoc_from_listener) == 1 assert curdoc_from_listener[0] is d def test_change_notification_removal(self): d = document.Document() assert not d.roots m = AnotherModelInTestDocument() d.add_root(m) assert len(d.roots) == 1 assert m.bar == 1 events = [] def listener(event): events.append(event) d.on_change(listener) m.bar = 42 assert len(events) == 1 assert events[0].new == 42 d.remove_on_change(listener) m.bar = 43 assert len(events) == 1 def test_notification_of_roots(self): d = document.Document() assert not d.roots events = [] def listener(event): events.append(event) d.on_change(listener) m = AnotherModelInTestDocument(bar=1) d.add_root(m) assert len(d.roots) == 1 assert len(events) == 1 assert isinstance(events[0], RootAddedEvent) assert events[0].model == m m2 = AnotherModelInTestDocument(bar=2) d.add_root(m2) assert len(d.roots) == 2 assert len(events) == 2 assert isinstance(events[1], RootAddedEvent) assert events[1].model == m2 d.remove_root(m) assert len(d.roots) == 1 assert len(events) == 3 assert isinstance(events[2], RootRemovedEvent) assert events[2].model == m d.remove_root(m2) assert len(d.roots) == 0 assert len(events) == 4 assert isinstance(events[3], RootRemovedEvent) assert events[3].model == m2 def test_notification_of_title(self): d = document.Document() assert not d.roots assert d.title == document.DEFAULT_TITLE events = [] def listener(event): events.append(event) d.on_change(listener) d.title = "Foo" assert d.title == "Foo" assert len(events) == 1 assert isinstance(events[0], TitleChangedEvent) assert events[0].document is d assert events[0].title == "Foo" def test_add_remove_periodic_callback(self): d = document.Document() events = [] def listener(event): events.append(event) d.on_change(listener) assert len(d.session_callbacks) == 0 assert not events def cb(): pass callback_obj = d.add_periodic_callback(cb, 1) assert len(d.session_callbacks) == len(events) == 1 assert isinstance(events[0], SessionCallbackAdded) assert callback_obj == d.session_callbacks[0] == events[0].callback assert callback_obj.period == 1 d.remove_periodic_callback(callback_obj) assert len(d.session_callbacks) == 0 assert len(events) == 2 assert isinstance(events[0], SessionCallbackAdded) assert isinstance(events[1], SessionCallbackRemoved) def test_add_remove_timeout_callback(self): d = document.Document() events = [] def listener(event): events.append(event) d.on_change(listener) assert len(d.session_callbacks) == 0 assert not events def cb(): pass callback_obj = d.add_timeout_callback(cb, 1) assert len(d.session_callbacks) == len(events) == 1 assert isinstance(events[0], SessionCallbackAdded) assert callback_obj == d.session_callbacks[0] == events[0].callback assert callback_obj.timeout == 1 d.remove_timeout_callback(callback_obj) assert len(d.session_callbacks) == 0 assert len(events) == 2 assert isinstance(events[0], SessionCallbackAdded) assert isinstance(events[1], SessionCallbackRemoved) def test_add_partial_callback(self): from functools import partial d = document.Document() events = [] def listener(event): events.append(event) d.on_change(listener) assert len(d.session_callbacks) == 0 assert not events def _cb(): pass cb = partial(_cb) callback_obj = d.add_timeout_callback(cb, 1) assert len(d.session_callbacks) == len(events) == 1 assert isinstance(events[0], SessionCallbackAdded) assert callback_obj == d.session_callbacks[0] == events[0].callback assert callback_obj.timeout == 1 def test_add_remove_next_tick_callback(self): d = document.Document() events = [] def listener(event): events.append(event) d.on_change(listener) assert len(d.session_callbacks) == 0 assert not events def cb(): pass callback_obj = d.add_next_tick_callback(cb) assert len(d.session_callbacks) == len(events) == 1 assert isinstance(events[0], SessionCallbackAdded) assert callback_obj == d.session_callbacks[0] == events[0].callback d.remove_next_tick_callback(callback_obj) assert len(d.session_callbacks) == 0 assert len(events) == 2 assert isinstance(events[0], SessionCallbackAdded) assert isinstance(events[1], SessionCallbackRemoved) def test_periodic_callback_gets_curdoc(self): d = document.Document() assert curdoc() is not d curdoc_from_cb = [] def cb(): curdoc_from_cb.append(curdoc()) callback_obj = d.add_periodic_callback(cb, 1) callback_obj.callback() assert len(curdoc_from_cb) == 1 assert curdoc_from_cb[0] is d def test_timeout_callback_gets_curdoc(self): d = document.Document() assert curdoc() is not d curdoc_from_cb = [] def cb(): curdoc_from_cb.append(curdoc()) callback_obj = d.add_timeout_callback(cb, 1) callback_obj.callback() assert len(curdoc_from_cb) == 1 assert curdoc_from_cb[0] is d def test_next_tick_callback_gets_curdoc(self): d = document.Document() assert curdoc() is not d curdoc_from_cb = [] def cb(): curdoc_from_cb.append(curdoc()) callback_obj = d.add_next_tick_callback(cb) callback_obj.callback() assert len(curdoc_from_cb) == 1 assert curdoc_from_cb[0] is d def test_model_callback_gets_curdoc(self): d = document.Document() m = AnotherModelInTestDocument(bar=42) d.add_root(m) assert curdoc() is not d curdoc_from_cb = [] def cb(attr, old, new): curdoc_from_cb.append(curdoc()) m.on_change('bar', cb) m.bar = 43 assert len(curdoc_from_cb) == 1 assert curdoc_from_cb[0] is d def test_clear(self): d = document.Document() assert not d.roots assert d.title == document.DEFAULT_TITLE d.add_root(AnotherModelInTestDocument()) d.add_root(AnotherModelInTestDocument()) d.title = "Foo" assert len(d.roots) == 2 assert d.title == "Foo" d.clear() assert not d.roots assert not d._all_models assert d.title == "Foo" # do not reset title def test_serialization_one_model(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument() d.add_root(root1) d.title = "Foo" json = d.to_json_string() copy = document.Document.from_json_string(json) assert len(copy.roots) == 1 assert copy.title == "Foo" def test_serialization_more_models(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument(foo=42) root2 = SomeModelInTestDocument(foo=43) child1 = SomeModelInTestDocument(foo=44) root1.child = child1 root2.child = child1 d.add_root(root1) d.add_root(root2) assert len(d.roots) == 2 json = d.to_json_string() copy = document.Document.from_json_string(json) assert len(copy.roots) == 2 foos = [] for r in copy.roots: foos.append(r.foo) foos.sort() assert [42,43] == foos some_root = next(iter(copy.roots)) assert some_root.child.foo == 44 def test_serialization_has_version(self): from bokeh import __version__ d = document.Document() json = d.to_json() assert json['version'] == __version__ def test_patch_integer_property(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument(foo=42) root2 = SomeModelInTestDocument(foo=43) child1 = SomeModelInTestDocument(foo=44) root1.child = child1 root2.child = child1 d.add_root(root1) d.add_root(root2) assert len(d.roots) == 2 event1 = ModelChangedEvent(d, root1, 'foo', root1.foo, 57, 57) patch1, buffers = process_document_events([event1]) d.apply_json_patch_string(patch1) assert root1.foo == 57 event2 = ModelChangedEvent(d, child1, 'foo', child1.foo, 67, 67) patch2, buffers = process_document_events([event2]) d.apply_json_patch_string(patch2) assert child1.foo == 67 def test_patch_spec_property(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = ModelWithSpecInTestDocument(foo=42) d.add_root(root1) assert len(d.roots) == 1 def patch_test(new_value): serializable_new = root1.lookup('foo').property.to_serializable(root1, 'foo', new_value) event1 = ModelChangedEvent(d, root1, 'foo', root1.foo, new_value, serializable_new) patch1, buffers = process_document_events([event1]) d.apply_json_patch_string(patch1) if isinstance(new_value, dict): expected = copy(new_value) if 'units' not in expected: expected['units'] = root1.foo_units assert expected == root1.lookup('foo').serializable_value(root1) else: assert new_value == root1.foo patch_test(57) assert 'data' == root1.foo_units patch_test(dict(value=58)) assert 'data' == root1.foo_units patch_test(dict(value=58, units='screen')) assert 'screen' == root1.foo_units patch_test(dict(value=59, units='screen')) assert 'screen' == root1.foo_units patch_test(dict(value=59, units='data')) assert 'data' == root1.foo_units patch_test(dict(value=60, units='data')) assert 'data' == root1.foo_units patch_test(dict(value=60, units='data')) assert 'data' == root1.foo_units patch_test(61) assert 'data' == root1.foo_units root1.foo = "a_string" # so "woot" gets set as a string patch_test("woot") assert 'data' == root1.foo_units patch_test(dict(field="woot2")) assert 'data' == root1.foo_units patch_test(dict(field="woot2", units='screen')) assert 'screen' == root1.foo_units patch_test(dict(field="woot3")) assert 'screen' == root1.foo_units patch_test(dict(value=70)) assert 'screen' == root1.foo_units root1.foo = 123 # so 71 gets set as a number patch_test(71) assert 'screen' == root1.foo_units def test_patch_reference_property(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument(foo=42) root2 = SomeModelInTestDocument(foo=43) child1 = SomeModelInTestDocument(foo=44) child2 = SomeModelInTestDocument(foo=45) child3 = SomeModelInTestDocument(foo=46, child=child2) root1.child = child1 root2.child = child1 d.add_root(root1) d.add_root(root2) assert len(d.roots) == 2 assert child1.id in d._all_models assert child2.id not in d._all_models assert child3.id not in d._all_models event1 = ModelChangedEvent(d, root1, 'child', root1.child, child3, child3) patch1, buffers = process_document_events([event1]) d.apply_json_patch_string(patch1) assert root1.child.id == child3.id assert root1.child.child.id == child2.id assert child1.id in d._all_models assert child2.id in d._all_models assert child3.id in d._all_models # put it back how it was before event2 = ModelChangedEvent(d, root1, 'child', root1.child, child1, child1) patch2, buffers = process_document_events([event2]) d.apply_json_patch_string(patch2) assert root1.child.id == child1.id assert root1.child.child is None assert child1.id in d._all_models assert child2.id not in d._all_models assert child3.id not in d._all_models def test_patch_two_properties_at_once(self): d = document.Document() assert not d.roots assert len(d._all_models) == 0 root1 = SomeModelInTestDocument(foo=42) child1 = SomeModelInTestDocument(foo=43) root1.child = child1 d.add_root(root1) assert len(d.roots) == 1 assert root1.child == child1 assert root1.foo == 42 assert root1.child.foo == 43 child2 = SomeModelInTestDocument(foo=44) event1 = ModelChangedEvent(d, root1, 'foo', root1.foo, 57, 57) event2 = ModelChangedEvent(d, root1, 'child', root1.child, child2, child2) patch1, buffers = process_document_events([event1, event2]) d.apply_json_patch_string(patch1) assert root1.foo == 57 assert root1.child.foo == 44 # a more realistic set of models instead of fake models def test_scatter(self): from bokeh.io.doc import set_curdoc from bokeh.plotting import figure import numpy as np d = document.Document() set_curdoc(d) assert not d.roots assert len(d._all_models) == 0 p1 = figure(tools=[]) N = 10 x = np.linspace(0, 4 * np.pi, N) y = np.sin(x) p1.scatter(x, y, color="#FF00FF", nonselection_fill_color="#FFFF00", nonselection_fill_alpha=1) # figure does not automatically add itself to the document d.add_root(p1) assert len(d.roots) == 1 def test_event_handles_new_callbacks_in_event_callback(self): from bokeh.models import Button d = document.Document() button1 = Button(label="1") button2 = Button(label="2") def clicked_1(): button2.on_click(clicked_2) d.add_root(button2) def clicked_2(): pass button1.on_click(clicked_1) d.add_root(button1) event_json = json.dumps({"event_name":"button_click","event_values":{"model_id":button1.id}}) try: d.apply_json_event(event_json) except RuntimeError: pytest.fail("apply_json_event probably did not copy models before modifying") # TODO test serialize/deserialize with list-and-dict-valued properties # TODO test replace_with_json #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- # needed for caplog tests to function basicConfig()
import logging from typing import Dict, List, Optional from bitchia.consensus.block_record import BlockRecord from bitchia.consensus.blockchain_interface import BlockchainInterface from bitchia.types.blockchain_format.sized_bytes import bytes32 from bitchia.types.blockchain_format.sub_epoch_summary import SubEpochSummary from bitchia.types.header_block import HeaderBlock from bitchia.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments from bitchia.util.ints import uint32 class BlockCache(BlockchainInterface): def __init__( self, blocks: Dict[bytes32, BlockRecord], headers: Dict[bytes32, HeaderBlock] = None, height_to_hash: Dict[uint32, bytes32] = None, sub_epoch_summaries: Dict[uint32, SubEpochSummary] = None, ): if sub_epoch_summaries is None: sub_epoch_summaries = {} if height_to_hash is None: height_to_hash = {} if headers is None: headers = {} self._block_records = blocks self._headers = headers self._height_to_hash = height_to_hash self._sub_epoch_summaries = sub_epoch_summaries self._sub_epoch_segments: Dict[uint32, SubEpochSegments] = {} self.log = logging.getLogger(__name__) def block_record(self, header_hash: bytes32) -> BlockRecord: return self._block_records[header_hash] def height_to_block_record(self, height: uint32, check_db: bool = False) -> BlockRecord: header_hash = self.height_to_hash(height) return self.block_record(header_hash) def get_ses_heights(self) -> List[uint32]: return sorted(self._sub_epoch_summaries.keys()) def get_ses(self, height: uint32) -> SubEpochSummary: return self._sub_epoch_summaries[height] def height_to_hash(self, height: uint32) -> Optional[bytes32]: if height not in self._height_to_hash: self.log.warning(f"could not find height in cache {height}") return None return self._height_to_hash[height] def contains_block(self, header_hash: bytes32) -> bool: return header_hash in self._block_records def contains_height(self, height: uint32) -> bool: return height in self._height_to_hash async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]: return self._block_records async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]: block_records: List[BlockRecord] = [] for height in heights: block_records.append(self.height_to_block_record(height)) return block_records async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]: return self._block_records[header_hash] def remove_block_record(self, header_hash: bytes32): del self._block_records[header_hash] def add_block_record(self, block: BlockRecord): self._block_records[block.header_hash] = block async def get_header_blocks_in_range( self, start: int, stop: int, tx_filter: bool = True ) -> Dict[bytes32, HeaderBlock]: return self._headers async def persist_sub_epoch_challenge_segments( self, sub_epoch_summary_height: uint32, segments: List[SubEpochChallengeSegment] ): self._sub_epoch_segments[sub_epoch_summary_height] = SubEpochSegments(segments) async def get_sub_epoch_challenge_segments( self, sub_epoch_summary_height: uint32, ) -> Optional[List[SubEpochChallengeSegment]]: segments = self._sub_epoch_segments.get(sub_epoch_summary_height) if segments is None: return None return segments.challenge_segments
(window.webpackJsonp=window.webpackJsonp||[]).push([[27],{324:function(t,a,s){"use strict";s.r(a);var n=s(2),e=Object(n.a)({},function(){var t=this,a=t.$createElement,s=t._self._c||a;return s("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[s("h1",{attrs:{id:"grouped-table"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#grouped-table","aria-hidden":"true"}},[t._v("#")]),t._v(" Grouped Table")]),t._v(" "),s("p",[t._v("To create grouped rows, you need two things.")]),t._v(" "),s("h4",{attrs:{id:"_1-add-group-options-to-table-component"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_1-add-group-options-to-table-component","aria-hidden":"true"}},[t._v("#")]),t._v(" 1. Add group-options to table component")]),t._v(" "),s("div",{staticClass:"language-html extra-class"},[s("pre",{pre:!0,attrs:{class:"language-html"}},[s("code",[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("vue-good-table")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":columns")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("columns"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":rows")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":group-options")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("{\n enabled: true\n }"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("vue-good-table")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),s("h4",{attrs:{id:"_2-make-sure-the-rows-are-formatted-correctly-grouped-rows-need-to-be-nested-within-header-rows-containing-data-rows-in-their-children-property-for-example"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_2-make-sure-the-rows-are-formatted-correctly-grouped-rows-need-to-be-nested-within-header-rows-containing-data-rows-in-their-children-property-for-example","aria-hidden":"true"}},[t._v("#")]),t._v(" 2. Make sure the rows are formatted correctly. Grouped rows need to be nested within header rows containing data rows in their children property. For example:")]),t._v(" "),s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("[")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n mode"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'span'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// span means this header will span all columns")]),t._v("\n label"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Mammal'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// this is the label that'll be used for the header")]),t._v("\n html"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token boolean"}},[t._v("false")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// if this is true, label will be rendered as html")]),t._v("\n children"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("[")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" name"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Elephant'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" diet"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'herbivore'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("5")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" name"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Cat'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" diet"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'carnivore'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("28")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("]")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("]")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])])]),s("grouped-table",{attrs:{options:{enabled:!0}}}),t._v(" "),s("h4",{attrs:{id:"_3-sometimes-you-might-want-a-summary-row-instead-of-a-header-row-for-example-if-you-want-to-show-total-count-for-your-group"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_3-sometimes-you-might-want-a-summary-row-instead-of-a-header-row-for-example-if-you-want-to-show-total-count-for-your-group","aria-hidden":"true"}},[t._v("#")]),t._v(" 3. Sometimes, you might want a summary row instead of a header row. For example, if you want to show total count for your group")]),t._v(" "),s("div",{staticClass:"language-javascript extra-class"},[s("pre",{pre:!0,attrs:{class:"language-javascript"}},[s("code",[t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("[")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n name"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Mammals Total'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// this is the label that'll be used for the header")]),t._v("\n diet"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("undefined")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("''")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// total count will be displayed here")]),t._v("\n children"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("[")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" name"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Elephant'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" diet"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'herbivore'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("5")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" name"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Cat'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" diet"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'carnivore'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("28")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("]")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("]")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])])]),s("h4",{attrs:{id:"_4-if-you-want-the-header-summary-row-to-show-up-at-the-bottom-of-the-group-you-can-specify-that-in-the-group-options-property-of-the-table"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_4-if-you-want-the-header-summary-row-to-show-up-at-the-bottom-of-the-group-you-can-specify-that-in-the-group-options-property-of-the-table","aria-hidden":"true"}},[t._v("#")]),t._v(" 4. If you want the header/summary row to show up at the bottom of the group, you can specify that in the group-options property of the table.")]),t._v(" "),s("div",{staticClass:"language-html extra-class"},[s("pre",{pre:!0,attrs:{class:"language-html"}},[s("code",[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("vue-good-table")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":columns")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("columns"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":rows")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":group-options")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("{\n enabled: true,\n headerPosition: 'bottom'\n }"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("vue-good-table")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),s("grouped-table",{attrs:{options:{enabled:!0,headerPosition:"bottom"}}}),t._v(" "),s("h4",{attrs:{id:"_5-what-if-you-wanted-to-add-a-total-count-in-summary-rows"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_5-what-if-you-wanted-to-add-a-total-count-in-summary-rows","aria-hidden":"true"}},[t._v("#")]),t._v(" 5. What if you wanted to add a total count in summary rows?")]),t._v(" "),s("p",[t._v("In your column definition add a property, "),s("code",[t._v("headerField")]),t._v(". This is just like "),s("code",[t._v("field")]),t._v(" property but for summary/header rows only. So lets say we wanted to add a "),s("strong",[t._v("sum function")]),t._v(" to this field.")]),t._v(" "),s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// in columns")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n label"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'Count'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n field"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'count'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n headerField"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("sumCount"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n type"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token string"}},[t._v("'number'")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n\n"),s("span",{pre:!0,attrs:{class:"token comment"}},[t._v("// in methods we define sumCount")]),t._v("\nmethods"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token function-variable function"}},[t._v("sumCount")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("rowObj")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n console"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("log")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("rowObj"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" sum "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("0")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("for")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" i "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token number"}},[t._v("0")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" i "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("<")]),t._v(" rowObj"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("children"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("length"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v(" i"),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("++")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n sum "),s("span",{pre:!0,attrs:{class:"token operator"}},[t._v("+=")]),t._v(" rowObj"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("children"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("[")]),t._v("i"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("]")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("count"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("return")]),t._v(" sum"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v("\n\n")])])]),s("h2",{attrs:{id:"customizing-header-row"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#customizing-header-row","aria-hidden":"true"}},[t._v("#")]),t._v(" Customizing Header Row")]),t._v(" "),s("p",[t._v("If you want more control over what the header row looks like, you can use slots the same way you "),s("router-link",{attrs:{to:"/guide/advanced/#custom-row-template"}},[t._v("customize rows")]),t._v(". For example if you want to add a button in the header row or something, this would be the way to do it.")],1),t._v(" "),s("h3",{attrs:{id:"when-mode-is-span"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#when-mode-is-span","aria-hidden":"true"}},[t._v("#")]),t._v(" When mode is 'span'")]),t._v(" "),s("p",[t._v("In this case, the header row spans across all columns")]),t._v(" "),s("div",{staticClass:"language-vue extra-class"},[s("pre",{pre:!0,attrs:{class:"language-vue"}},[s("code",[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("vue-good-table")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":columns")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("columns"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":rows")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":group-options")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("{\n enabled: true,\n headerPosition: 'top'\n }"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("template")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("slot")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("table-header-row"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("slot-scope")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("props"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("span")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("class")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("my-fancy-class"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n {{ props.row.label }}\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("span")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("template")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("vue-good-table")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),s("grouped-custom-span",{attrs:{options:{enabled:!0,headerPosition:"top"}}}),t._v(" "),s("h3",{attrs:{id:"when-mode-is-not-span"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#when-mode-is-not-span","aria-hidden":"true"}},[t._v("#")]),t._v(" When mode is not 'span'")]),t._v(" "),s("p",[t._v("In this case header row expects a value for each column")]),t._v(" "),s("div",{staticClass:"language-vue extra-class"},[s("pre",{pre:!0,attrs:{class:"language-vue"}},[s("code",[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("vue-good-table")]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":columns")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("columns"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":rows")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("rows"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v(":group-options")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("{\n enabled: true,\n headerPosition: 'top'\n }"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("template")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("slot")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("table-header-row"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("slot-scope")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("props"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("span")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("v-if")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("props.column.field == 'action'"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("button")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("class")]),s("span",{pre:!0,attrs:{class:"token attr-value"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("fancy-btn"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("Action"),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("button")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("span")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("span")]),t._v(" "),s("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("v-else")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n {{ props.formattedRow[props.column.field] }}\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("span")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("template")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("vue-good-table")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),s("grouped-custom",{attrs:{options:{enabled:!0,headerPosition:"top"}}}),t._v(" "),s("div",{staticClass:"tip custom-block"},[s("ul",[s("li",[t._v("The original row object can be accessed via "),s("code",[t._v("props.row")])]),t._v(" "),s("li",[t._v("The column object can be accessed via "),s("code",[t._v("props.column")])]),t._v(" "),s("li",[t._v("You can access the formatted row data (for example - formatted date) via "),s("code",[t._v("props.formattedRow")])])])]),t._v(" "),s("h2",{attrs:{id:"collapsable-rows"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#collapsable-rows","aria-hidden":"true"}},[t._v("#")]),t._v(" Collapsable Rows")]),t._v(" "),s("p",[t._v('To allow the row to collapse and expand you can use the groupOption "collapsable". You can either pass in a boolean or a number.\nIf '),s("code",[t._v("collapsable")]),t._v(" is set to "),s("code",[t._v("true")]),t._v(" then it will default to making the first column collapsable. Alternatively, you can specify the column index number.\nIf you only add new rows to your table at the end, then the expanded or collapsed state of your rows will be maintained.\nHowever if you need to insert rows before the last one, you can pass in "),s("code",[t._v("rowKey")]),t._v(" inside of "),s("code",[t._v("groupOptions")]),t._v(" with a unique identifier for your rows.\nThe expanded and collapsed state will then be maintained.")]),t._v(" "),s("div",{staticClass:"language-html extra-class"},[s("pre",{pre:!0,attrs:{class:"language-html"}},[s("code",[t._v('<vue-good-table\n ref="myCustomTable"\n :columns="columns"\n :rows="rows"\n :group-options="{\n enabled: true,\n rowKey="id"\n collapsable: true // or column index\n }"\n>\n'),s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token tag"}},[s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("vue-good-table")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),s("p",[t._v("To expand/collapse all you can use the method called "),s("code",[t._v("expandAll")]),t._v(" or "),s("code",[t._v("collapseAll")]),t._v(".")]),t._v(" "),s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("myCustomTable"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("expandAll")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n"),s("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("myCustomTable"),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{pre:!0,attrs:{class:"token function"}},[t._v("collapseAll")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])])]),s("ul",[s("li",[s("strong",[t._v("Live Demo:")]),t._v(" https://jsfiddle.net/nb6fcqs7")])])],1)},[],!1,null,null,null);a.default=e.exports}}]);
module.exports = require('./lib/fontface-loader');
ismir2020featsets = {} ########################################################################## ismir2020featsets['ismir2020_all_lyr_gt'] = { 'scaledegreefirst', 'scaledegreesecond', 'scaledegreethird', 'scaledegreefourth', 'scaledegreefifth', 'diatonicpitchfirst', 'diatonicpitchsecond', 'diatonicpitchthird', 'diatonicpitchfourth', 'diatonicpitchfifth', 'midipitchfirst', 'midipitchsecond', 'midipitchthird', 'midipitchfourth', 'midipitchfifth', 'intervalfirst', 'intervalsecond', 'intervalthird', 'intervalfourth', 'intervalfifth', 'VosCenterGravityfirst', 'VosCenterGravitysecond', 'VosCenterGravitythird', 'VosCenterGravityfourth', 'VosCenterGravityfifth', 'VosHarmonyfirst', 'VosHarmonysecond', 'VosHarmonythird', 'VosHarmonyfourth', 'VosHarmonyfifth', 'informationcontentfirst', 'informationcontentsecond', 'informationcontentthird', 'informationcontentfourth', 'informationcontentfifth', 'contourfirst', 'contoursecond', 'contourthird', 'contourfourth', 'contourfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'isascending', 'isdescending', 'ambitus', 'containsleap', 'meternumerator', 'meterdenominator', 'nextisrestfirst', 'nextisrestsecond', 'nextisrestthird', 'nextisrestfourth', 'nextisrestfifth', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'IOIbeatfractionfirst', 'IOIbeatfractionsecond', 'IOIbeatfractionthird', 'IOIbeatfractionfourth', 'IOIbeatfractionfifth', 'durationcummulation', 'onthebeatfirst', 'onthebeatsecond', 'onthebeatthird', 'onthebeatfourth', 'onthebeatfifth', 'completesmeasurephrase', 'completesmeasuresong', 'completesbeatphrase', 'completesbeatsong', 'grouperfirst', 'groupersecond', 'grouperthird', 'grouperfourth', 'grouperfifth', 'noteoffset', 'beatoffset', 'beatduration', 'beatcount', 'gprsumfirst', 'gprsumsecond', 'gprsumthird', 'gprsumfourth', 'gprsumfifth', 'pitchproximityfirst', 'pitchproximitysecond', 'pitchproximitythird', 'pitchproximityfourth', 'pitchproximityfifth', 'pitchreversalfirst', 'pitchreversalsecond', 'pitchreversalthird', 'pitchreversalfourth', 'pitchreversalfifth', 'lbdmfirst', 'lbdmsecond', 'lbdmthird', 'lbdmfourth', 'lbdmfifth', 'wordstressfirst', 'wordstresssecond', 'wordstressthird', 'wordstressfourth', 'wordstressfifth', 'rhymesfirst', 'rhymessecond', 'rhymesthird', 'rhymesfourth', 'rhymesfifth', 'rhyme_noteoffset', 'rhyme_beatoffset', 'noncontentwordfirst', 'noncontentwordsecond', 'noncontentwordthird', 'noncontentwordfourth', 'noncontentwordfifth', 'wordendfirst', 'wordendsecond', 'wordendthird', 'wordendfourth', 'wordendfifth', 'melismastatefirst', 'melismastatesecond', 'melismastatethird', 'melismastatefourth', 'melismastatefifth', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'diatonicpitchfirstsecond', 'diatonicpitchsecondthird', 'diatonicpitchthirdfourth', 'diatonicpitchfourthfifth', 'VosHarmonyfirstsecond', 'VosHarmonysecondthird', 'VosHarmonythirdfourth', 'VosHarmonyfourthfifth', 'beatstrengthfirstsecond', 'beatstrengthsecondthird', 'beatstrengththirdfourth', 'beatstrengthfourthfifth', 'IOIbeatfractionfirstsecond', 'IOIbeatfractionsecondthird', 'IOIbeatfractionthirdfourth', 'IOIbeatfractionfourthfifth', 'wordstressfirstsecond', 'wordstresssecondthird', 'wordstressthirdfourth', 'wordstressfourthfifth', 'informationcontentfirstsecond', 'informationcontentsecondthird', 'informationcontentthirdfourth', 'informationcontentfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_all_gt'] = { 'scaledegreefirst', 'scaledegreesecond', 'scaledegreethird', 'scaledegreefourth', 'scaledegreefifth', 'diatonicpitchfirst', 'diatonicpitchsecond', 'diatonicpitchthird', 'diatonicpitchfourth', 'diatonicpitchfifth', 'midipitchfirst', 'midipitchsecond', 'midipitchthird', 'midipitchfourth', 'midipitchfifth', 'intervalfirst', 'intervalsecond', 'intervalthird', 'intervalfourth', 'intervalfifth', 'VosCenterGravityfirst', 'VosCenterGravitysecond', 'VosCenterGravitythird', 'VosCenterGravityfourth', 'VosCenterGravityfifth', 'VosHarmonyfirst', 'VosHarmonysecond', 'VosHarmonythird', 'VosHarmonyfourth', 'VosHarmonyfifth', 'informationcontentfirst', 'informationcontentsecond', 'informationcontentthird', 'informationcontentfourth', 'informationcontentfifth', 'contourfirst', 'contoursecond', 'contourthird', 'contourfourth', 'contourfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'isascending', 'isdescending', 'ambitus', 'containsleap', 'meternumerator', 'meterdenominator', 'nextisrestfirst', 'nextisrestsecond', 'nextisrestthird', 'nextisrestfourth', 'nextisrestfifth', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'IOIbeatfractionfirst', 'IOIbeatfractionsecond', 'IOIbeatfractionthird', 'IOIbeatfractionfourth', 'IOIbeatfractionfifth', 'durationcummulation', 'onthebeatfirst', 'onthebeatsecond', 'onthebeatthird', 'onthebeatfourth', 'onthebeatfifth', 'completesmeasurephrase', 'completesmeasuresong', 'completesbeatphrase', 'completesbeatsong', 'grouperfirst', 'groupersecond', 'grouperthird', 'grouperfourth', 'grouperfifth', 'noteoffset', 'beatoffset', 'beatduration', 'beatcount', 'gprsumfirst', 'gprsumsecond', 'gprsumthird', 'gprsumfourth', 'gprsumfifth', 'pitchproximityfirst', 'pitchproximitysecond', 'pitchproximitythird', 'pitchproximityfourth', 'pitchproximityfifth', 'pitchreversalfirst', 'pitchreversalsecond', 'pitchreversalthird', 'pitchreversalfourth', 'pitchreversalfifth', 'lbdmfirst', 'lbdmsecond', 'lbdmthird', 'lbdmfourth', 'lbdmfifth', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'diatonicpitchfirstsecond', 'diatonicpitchsecondthird', 'diatonicpitchthirdfourth', 'diatonicpitchfourthfifth', 'VosHarmonyfirstsecond', 'VosHarmonysecondthird', 'VosHarmonythirdfourth', 'VosHarmonyfourthfifth', 'beatstrengthfirstsecond', 'beatstrengthsecondthird', 'beatstrengththirdfourth', 'beatstrengthfourthfifth', 'IOIbeatfractionfirstsecond', 'IOIbeatfractionsecondthird', 'IOIbeatfractionthirdfourth', 'IOIbeatfractionfourthfifth', 'informationcontentfirstsecond', 'informationcontentsecondthird', 'informationcontentthirdfourth', 'informationcontentfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_all_lyr'] = { 'scaledegreefirst', 'scaledegreesecond', 'scaledegreethird', 'scaledegreefourth', 'scaledegreefifth', 'diatonicpitchfirst', 'diatonicpitchsecond', 'diatonicpitchthird', 'diatonicpitchfourth', 'diatonicpitchfifth', 'midipitchfirst', 'midipitchsecond', 'midipitchthird', 'midipitchfourth', 'midipitchfifth', 'intervalfirst', 'intervalsecond', 'intervalthird', 'intervalfourth', 'intervalfifth', 'VosCenterGravityfirst', 'VosCenterGravitysecond', 'VosCenterGravitythird', 'VosCenterGravityfourth', 'VosCenterGravityfifth', 'VosHarmonyfirst', 'VosHarmonysecond', 'VosHarmonythird', 'VosHarmonyfourth', 'VosHarmonyfifth', 'informationcontentfirst', 'informationcontentsecond', 'informationcontentthird', 'informationcontentfourth', 'informationcontentfifth', 'contourfirst', 'contoursecond', 'contourthird', 'contourfourth', 'contourfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'isascending', 'isdescending', 'ambitus', 'containsleap', 'meternumerator', 'meterdenominator', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'IOIbeatfractionfirst', 'IOIbeatfractionsecond', 'IOIbeatfractionthird', 'IOIbeatfractionfourth', 'IOIbeatfractionfifth', 'durationcummulation', 'onthebeatfirst', 'onthebeatsecond', 'onthebeatthird', 'onthebeatfourth', 'onthebeatfifth', 'completesmeasuresong', 'completesbeatsong', 'grouperfirst', 'groupersecond', 'grouperthird', 'grouperfourth', 'grouperfifth', 'beatduration', 'beatcount', 'gprsumfirst', 'gprsumsecond', 'gprsumthird', 'gprsumfourth', 'gprsumfifth', 'pitchproximityfirst', 'pitchproximitysecond', 'pitchproximitythird', 'pitchproximityfourth', 'pitchproximityfifth', 'pitchreversalfirst', 'pitchreversalsecond', 'pitchreversalthird', 'pitchreversalfourth', 'pitchreversalfifth', 'lbdmfirst', 'lbdmsecond', 'lbdmthird', 'lbdmfourth', 'lbdmfifth', 'wordstressfirst', 'wordstresssecond', 'wordstressthird', 'wordstressfourth', 'wordstressfifth', 'rhymesfirst', 'rhymessecond', 'rhymesthird', 'rhymesfourth', 'rhymesfifth', 'rhyme_noteoffset', 'rhyme_beatoffset', 'noncontentwordfirst', 'noncontentwordsecond', 'noncontentwordthird', 'noncontentwordfourth', 'noncontentwordfifth', 'wordendfirst', 'wordendsecond', 'wordendthird', 'wordendfourth', 'wordendfifth', 'melismastatefirst', 'melismastatesecond', 'melismastatethird', 'melismastatefourth', 'melismastatefifth', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'diatonicpitchfirstsecond', 'diatonicpitchsecondthird', 'diatonicpitchthirdfourth', 'diatonicpitchfourthfifth', 'VosHarmonyfirstsecond', 'VosHarmonysecondthird', 'VosHarmonythirdfourth', 'VosHarmonyfourthfifth', 'beatstrengthfirstsecond', 'beatstrengthsecondthird', 'beatstrengththirdfourth', 'beatstrengthfourthfifth', 'IOIbeatfractionfirstsecond', 'IOIbeatfractionsecondthird', 'IOIbeatfractionthirdfourth', 'IOIbeatfractionfourthfifth', 'wordstressfirstsecond', 'wordstresssecondthird', 'wordstressthirdfourth', 'wordstressfourthfifth', 'informationcontentfirstsecond', 'informationcontentsecondthird', 'informationcontentthirdfourth', 'informationcontentfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_all'] = { 'scaledegreefirst', 'scaledegreesecond', 'scaledegreethird', 'scaledegreefourth', 'scaledegreefifth', 'diatonicpitchfirst', 'diatonicpitchsecond', 'diatonicpitchthird', 'diatonicpitchfourth', 'diatonicpitchfifth', 'midipitchfirst', 'midipitchsecond', 'midipitchthird', 'midipitchfourth', 'midipitchfifth', 'intervalfirst', 'intervalsecond', 'intervalthird', 'intervalfourth', 'intervalfifth', 'VosCenterGravityfirst', 'VosCenterGravitysecond', 'VosCenterGravitythird', 'VosCenterGravityfourth', 'VosCenterGravityfifth', 'VosHarmonyfirst', 'VosHarmonysecond', 'VosHarmonythird', 'VosHarmonyfourth', 'VosHarmonyfifth', 'informationcontentfirst', 'informationcontentsecond', 'informationcontentthird', 'informationcontentfourth', 'informationcontentfifth', 'contourfirst', 'contoursecond', 'contourthird', 'contourfourth', 'contourfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'isascending', 'isdescending', 'ambitus', 'containsleap', 'meternumerator', 'meterdenominator', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'IOIbeatfractionfirst', 'IOIbeatfractionsecond', 'IOIbeatfractionthird', 'IOIbeatfractionfourth', 'IOIbeatfractionfifth', 'durationcummulation', 'onthebeatfirst', 'onthebeatsecond', 'onthebeatthird', 'onthebeatfourth', 'onthebeatfifth', 'completesmeasuresong', 'completesbeatsong', 'grouperfirst', 'groupersecond', 'grouperthird', 'grouperfourth', 'grouperfifth', 'beatduration', 'beatcount', 'gprsumfirst', 'gprsumsecond', 'gprsumthird', 'gprsumfourth', 'gprsumfifth', 'pitchproximityfirst', 'pitchproximitysecond', 'pitchproximitythird', 'pitchproximityfourth', 'pitchproximityfifth', 'pitchreversalfirst', 'pitchreversalsecond', 'pitchreversalthird', 'pitchreversalfourth', 'pitchreversalfifth', 'lbdmfirst', 'lbdmsecond', 'lbdmthird', 'lbdmfourth', 'lbdmfifth', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'diatonicpitchfirstsecond', 'diatonicpitchsecondthird', 'diatonicpitchthirdfourth', 'diatonicpitchfourthfifth', 'VosHarmonyfirstsecond', 'VosHarmonysecondthird', 'VosHarmonythirdfourth', 'VosHarmonyfourthfifth', 'beatstrengthfirstsecond', 'beatstrengthsecondthird', 'beatstrengththirdfourth', 'beatstrengthfourthfifth', 'IOIbeatfractionfirstsecond', 'IOIbeatfractionsecondthird', 'IOIbeatfractionthirdfourth', 'IOIbeatfractionfourthfifth', 'informationcontentfirstsecond', 'informationcontentsecondthird', 'informationcontentthirdfourth', 'informationcontentfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_othermodels'] = { 'informationcontentfirst', 'informationcontentsecond', 'informationcontentthird', 'informationcontentfourth', 'informationcontentfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'durationcummulation', 'grouperfirst', 'groupersecond', 'grouperthird', 'grouperfourth', 'grouperfifth', 'gprsumfirst', 'gprsumsecond', 'gprsumthird', 'gprsumfourth', 'gprsumfifth', 'pitchproximityfirst', 'pitchproximitysecond', 'pitchproximitythird', 'pitchproximityfourth', 'pitchproximityfifth', 'pitchreversalfirst', 'pitchreversalsecond', 'pitchreversalthird', 'pitchreversalfourth', 'pitchreversalfifth', 'lbdmfirst', 'lbdmsecond', 'lbdmthird', 'lbdmfourth', 'lbdmfifth', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'informationcontentfirstsecond', 'informationcontentsecondthird', 'informationcontentthirdfourth', 'informationcontentfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_elementarylyrics'] = { 'wordstressfirst', 'wordstresssecond', 'wordstressthird', 'wordstressfourth', 'wordstressfifth', 'rhymesfirst', 'rhymessecond', 'rhymesthird', 'rhymesfourth', 'rhymesfifth', 'rhyme_noteoffset', 'rhyme_beatoffset', 'noncontentwordfirst', 'noncontentwordsecond', 'noncontentwordthird', 'noncontentwordfourth', 'noncontentwordfifth', 'wordendfirst', 'wordendsecond', 'wordendthird', 'wordendfourth', 'wordendfifth', 'melismastatefirst', 'melismastatesecond', 'melismastatethird', 'melismastatefourth', 'melismastatefifth', 'wordstressfirstsecond', 'wordstresssecondthird', 'wordstressthirdfourth', 'wordstressfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_elementaryrhythm'] = { 'meternumerator', 'meterdenominator', 'beatstrengthfirst', 'beatstrengthsecond', 'beatstrengththird', 'beatstrengthfourth', 'beatstrengthfifth', 'IOIbeatfractionfirst', 'IOIbeatfractionsecond', 'IOIbeatfractionthird', 'IOIbeatfractionfourth', 'IOIbeatfractionfifth', 'durationcummulation', 'onthebeatfirst', 'onthebeatsecond', 'onthebeatthird', 'onthebeatfourth', 'onthebeatfifth', 'completesmeasuresong', 'completesbeatsong', 'beatduration', 'beatcount', 'beatstrengthfirstsecond', 'beatstrengthsecondthird', 'beatstrengththirdfourth', 'beatstrengthfourthfifth', 'IOIbeatfractionfirstsecond', 'IOIbeatfractionsecondthird', 'IOIbeatfractionthirdfourth', 'IOIbeatfractionfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_elementarypitch'] = { 'scaledegreefirst', 'scaledegreesecond', 'scaledegreethird', 'scaledegreefourth', 'scaledegreefifth', 'diatonicpitchfirst', 'diatonicpitchsecond', 'diatonicpitchthird', 'diatonicpitchfourth', 'diatonicpitchfifth', 'midipitchfirst', 'midipitchsecond', 'midipitchthird', 'midipitchfourth', 'midipitchfifth', 'intervalfirst', 'intervalsecond', 'intervalthird', 'intervalfourth', 'intervalfifth', 'VosCenterGravityfirst', 'VosCenterGravitysecond', 'VosCenterGravitythird', 'VosCenterGravityfourth', 'VosCenterGravityfifth', 'VosHarmonyfirst', 'VosHarmonysecond', 'VosHarmonythird', 'VosHarmonyfourth', 'VosHarmonyfifth', 'contourfirst', 'contoursecond', 'contourthird', 'contourfourth', 'contourfifth', 'registraldirectionchange', 'largetosmall', 'contourreversal', 'isascending', 'isdescending', 'ambitus', 'containsleap', 'intervalsizefirstsecond', 'intervalsizesecondthird', 'intervalsizethirdfourth', 'intervalsizefourthfifth', 'intervaldirfirstsecond', 'intervaldirsecondthird', 'intervaldirthirdfourth', 'intervaldirfourthfifth', 'diatonicpitchfirstsecond', 'diatonicpitchsecondthird', 'diatonicpitchthirdfourth', 'diatonicpitchfourthfifth', 'VosHarmonyfirstsecond', 'VosHarmonysecondthird', 'VosHarmonythirdfourth', 'VosHarmonyfourthfifth', } ########################################################################## ismir2020featsets['ismir2020_elementarypitchrhythm'] = ismir2020featsets['ismir2020_elementarypitch'] | ismir2020featsets['ismir2020_elementaryrhythm'] ismir2020featsets['ismir2020_elementaryall'] = ismir2020featsets['ismir2020_elementarypitchrhythm'] | ismir2020featsets['ismir2020_elementarylyrics'] #for k in ismir2020featsets.keys(): # print(k)
/* Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang( 'specialchar', 'ru', { options: 'Выбор специального символа', title: 'Выберите специальный символ', toolbar: 'Вставить специальный символ' } );
/* * This file is part of LibCSS * Licensed under the MIT License, * http://www.opensource.org/licenses/mit-license.php * Copyright 2009 John-Mark Bell <jmb@netsurf-browser.org> */ #include "bytecode/bytecode.h" #include "bytecode/opcodes.h" #include "select/propset.h" #include "select/propget.h" #include "utils/utils.h" #include "select/properties/properties.h" #include "select/properties/helpers.h" css_error css__cascade_font_size(uint32_t opv, css_style *style, css_select_state *state) { uint16_t value = CSS_FONT_SIZE_INHERIT; css_fixed size = 0; uint32_t unit = UNIT_PX; if (isInherit(opv) == false) { switch (getValue(opv)) { case FONT_SIZE_DIMENSION: value = CSS_FONT_SIZE_DIMENSION; size = *((css_fixed *) style->bytecode); advance_bytecode(style, sizeof(size)); unit = *((uint32_t *) style->bytecode); advance_bytecode(style, sizeof(unit)); break; case FONT_SIZE_XX_SMALL: value = CSS_FONT_SIZE_XX_SMALL; break; case FONT_SIZE_X_SMALL: value = CSS_FONT_SIZE_X_SMALL; break; case FONT_SIZE_SMALL: value = CSS_FONT_SIZE_SMALL; break; case FONT_SIZE_MEDIUM: value = CSS_FONT_SIZE_MEDIUM; break; case FONT_SIZE_LARGE: value = CSS_FONT_SIZE_LARGE; break; case FONT_SIZE_X_LARGE: value = CSS_FONT_SIZE_X_LARGE; break; case FONT_SIZE_XX_LARGE: value = CSS_FONT_SIZE_XX_LARGE; break; case FONT_SIZE_LARGER: value = CSS_FONT_SIZE_LARGER; break; case FONT_SIZE_SMALLER: value = CSS_FONT_SIZE_SMALLER; break; } } unit = css__to_css_unit(unit); if (css__outranks_existing(getOpcode(opv), isImportant(opv), state, isInherit(opv))) { return set_font_size(state->computed, value, size, unit); } return CSS_OK; } css_error css__set_font_size_from_hint(const css_hint *hint, css_computed_style *style) { return set_font_size(style, hint->status, hint->data.length.value, hint->data.length.unit); } css_error css__initial_font_size(css_select_state *state) { return set_font_size(state->computed, CSS_FONT_SIZE_MEDIUM, 0, CSS_UNIT_PX); } css_error css__compose_font_size(const css_computed_style *parent, const css_computed_style *child, css_computed_style *result) { css_fixed size = 0; css_unit unit = CSS_UNIT_PX; uint8_t type = get_font_size(child, &size, &unit); if (type == CSS_FONT_SIZE_INHERIT) { type = get_font_size(parent, &size, &unit); } return set_font_size(result, type, size, unit); }
//Print "I" #include <stdio.h> int main() { int height = 5; int i, j; for (i = 0; i < height; i++) { for (j = 0; j < height; j++) { if (i == 0 || i == height-1 || j == height/2) { printf("* "); } else { printf(" "); } } printf("\n"); } return 0; }
import os import boto3 import numpy as np import pandas as pd from datetime import datetime s3 = boto3.client('s3') bucket = os.environ['BUCKET'] def handler(event, context): event['stage'] = 'dataset' prefix = event.get('prefix', 'sagemaker/xgboost_credit_risk') event['bucket'] = bucket event['prefix'] = prefix job_prefix = event.get('job_prefix', 'xgboost-credit') job_uniq_id = int(datetime.now().timestamp()) job_name = f'{job_prefix}-{job_uniq_id}' endpoint_name = f'{job_prefix}-{job_uniq_id}' event['job_name'] = job_name event['endpoint_name'] = endpoint_name key = event.get('key', 'card.xls') data = s3.get_object(Bucket=bucket, Key=key) dataset = pd.read_excel(data['Body'].read()) dataset = dataset.drop('Unnamed: 0', axis=1) dataset = pd.concat([dataset['Y'], dataset.drop(['Y'], axis=1)], axis=1) train_data, validation_data, test_data = np.split( dataset.sample(frac=1, random_state=1729), [int(0.7 * len(dataset)), int(0.9 * len(dataset))] ) train_data.to_csv('/tmp/train.csv', header=False, index=False) validation_data.to_csv('/tmp/validation.csv', header=False, index=False) bucket_resource = boto3.Session().resource('s3').Bucket(bucket) bucket_resource.Object( os.path.join(prefix, 'train/train.csv') ).upload_file('/tmp/train.csv') bucket_resource.Object( os.path.join(prefix, 'validation/validation.csv') ).upload_file('/tmp/validation.csv') return event
/* * Copyright © 2011-2012 Inria. All rights reserved. * See COPYING in top-level directory. */ #include <private/autogen/config.h> #include <hwloc.h> #include <stdlib.h> #include <stdio.h> #include <string.h> static void usage(char *name, FILE *where) { fprintf (where, "Usage: %s [options] <output>.xml [-n <name1] <input1>.xml [-n name2] <input2>.xml ...\n", name); fprintf (where, "Options:\n"); fprintf (where, " -v --verbose Show verbose messages\n"); fprintf (where, " -f --force Ignore errors while reading input files\n"); fprintf (where, " -n --name <name> Set the name of the next input topology\n"); } int main(int argc, char *argv[]) { hwloc_topology_t topology; char *callname; char *output; int verbose = 0; int force = 0; int opt; int i, j; callname = strrchr(argv[0], '/'); if (!callname) callname = argv[0]; else callname++; /* skip argv[0], handle options */ argc--; argv++; while (argc >= 1) { opt = 0; if (!strcmp(argv[0], "-v") || !strcmp(argv[0], "--verbose")) { verbose++; } else if (!strcmp(argv[0], "-f") || !strcmp(argv[0], "--force")) { force = 1; } else if (!strcmp(argv[0], "-h") || !strcmp(argv[0], "--help")) { usage(callname, stdout); exit(EXIT_SUCCESS); } else if (!strcmp(argv[0], "--")) { argc--; argv++; break; } else if (*argv[0] == '-') { fprintf(stderr, "Unrecognized option: %s\n", argv[0]); usage(callname, stderr); exit(EXIT_FAILURE); } else break; argc -= opt+1; argv += opt+1; } if (!argc) { fprintf(stderr, "Missing output file name\n"); usage(callname, stderr); exit(EXIT_FAILURE); } output = argv[0]; argc--; argv++; hwloc_topology_init(&topology); hwloc_topology_set_flags(topology, HWLOC_TOPOLOGY_FLAG_WHOLE_IO|HWLOC_TOPOLOGY_FLAG_ICACHES); hwloc_topology_set_custom(topology); for(i=0, j=0; i<argc; i++, j++) { hwloc_topology_t input; hwloc_obj_t root; char idx[10]; char *name = NULL; if (!strcmp(argv[i], "-n") || !strcmp(argv[i], "--name")) { if (i+2 >= argc) { usage(callname, stderr); exit(EXIT_FAILURE); } name = argv[i+1]; i+=2; } if (verbose) { if (name) printf("Importing XML topology %s with name %s ...\n", argv[i], name); else printf("Importing XML topology %s ...\n", argv[i]); } hwloc_topology_init(&input); hwloc_topology_set_flags(input, HWLOC_TOPOLOGY_FLAG_WHOLE_IO|HWLOC_TOPOLOGY_FLAG_ICACHES); if (hwloc_topology_set_xml(input, argv[i])) { fprintf(stderr, "Failed to set source XML file %s (%s)\n", argv[i], strerror(errno)); hwloc_topology_destroy(input); if (force) continue; else return EXIT_FAILURE; } hwloc_topology_load(input); root = hwloc_get_root_obj(input); hwloc_obj_add_info(root, "AssemblerName", name ? name : argv[i]); snprintf(idx, sizeof(idx), "%d", j); hwloc_obj_add_info(root, "AssemblerIndex", idx); hwloc_custom_insert_topology(topology, hwloc_get_root_obj(topology), input, NULL); hwloc_topology_destroy(input); } if (verbose) printf("Assembling global topology...\n"); hwloc_topology_load(topology); if (hwloc_topology_export_xml(topology, output) < 0) { fprintf(stderr, "Failed to export XML to %s (%s)\n", output, strerror(errno)); return EXIT_FAILURE; } hwloc_topology_destroy(topology); if (verbose) printf("Exported topology to XML file %s\n", output); return 0; }
// SPDX-License-Identifier: GPL-2.0 /* * Copyright (c) 2000-2005 Silicon Graphics, Inc. * All Rights Reserved. */ #include "xfs.h" #include "xfs_fs.h" #include "xfs_shared.h" #include "xfs_format.h" #include "xfs_log_format.h" #include "xfs_trans_resv.h" #include "xfs_bit.h" #include "xfs_mount.h" #include "xfs_inode.h" #include "xfs_bmap.h" #include "xfs_bmap_util.h" #include "xfs_bmap_btree.h" #include "xfs_alloc.h" #include "xfs_error.h" #include "xfs_trans.h" #include "xfs_trans_space.h" #include "xfs_trace.h" #include "xfs_buf.h" #include "xfs_icache.h" #include "xfs_rtalloc.h" /* * Realtime allocator bitmap functions shared with userspace. */ /* * Real time buffers need verifiers to avoid runtime warnings during IO. * We don't have anything to verify, however, so these are just dummy * operations. */ static void xfs_rtbuf_verify_read( struct xfs_buf *bp) { return; } static void xfs_rtbuf_verify_write( struct xfs_buf *bp) { return; } const struct xfs_buf_ops xfs_rtbuf_ops = { .name = "rtbuf", .verify_read = xfs_rtbuf_verify_read, .verify_write = xfs_rtbuf_verify_write, }; /* * Get a buffer for the bitmap or summary file block specified. * The buffer is returned read and locked. */ int xfs_rtbuf_get( xfs_mount_t *mp, /* file system mount structure */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t block, /* block number in bitmap or summary */ int issum, /* is summary not bitmap */ xfs_buf_t **bpp) /* output: buffer for the block */ { xfs_buf_t *bp; /* block buffer, result */ xfs_inode_t *ip; /* bitmap or summary inode */ xfs_bmbt_irec_t map; int nmap = 1; int error; /* error value */ ip = issum ? mp->m_rsumip : mp->m_rbmip; error = xfs_bmapi_read(ip, block, 1, &map, &nmap, XFS_DATA_FORK); if (error) return error; if (nmap == 0 || !xfs_bmap_is_real_extent(&map)) return -EFSCORRUPTED; ASSERT(map.br_startblock != NULLFSBLOCK); error = xfs_trans_read_buf(mp, tp, mp->m_ddev_targp, XFS_FSB_TO_DADDR(mp, map.br_startblock), mp->m_bsize, 0, &bp, &xfs_rtbuf_ops); if (error) return error; xfs_trans_buf_set_type(tp, bp, issum ? XFS_BLFT_RTSUMMARY_BUF : XFS_BLFT_RTBITMAP_BUF); *bpp = bp; return 0; } /* * Searching backward from start to limit, find the first block whose * allocated/free state is different from start's. */ int xfs_rtfind_back( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t start, /* starting block to look at */ xfs_rtblock_t limit, /* last block to look at */ xfs_rtblock_t *rtblock) /* out: start block found */ { xfs_rtword_t *b; /* current word in buffer */ int bit; /* bit number in the word */ xfs_rtblock_t block; /* bitmap block number */ xfs_buf_t *bp; /* buf for the block */ xfs_rtword_t *bufp; /* starting word in buffer */ int error; /* error value */ xfs_rtblock_t firstbit; /* first useful bit in the word */ xfs_rtblock_t i; /* current bit number rel. to start */ xfs_rtblock_t len; /* length of inspected area */ xfs_rtword_t mask; /* mask of relevant bits for value */ xfs_rtword_t want; /* mask for "good" values */ xfs_rtword_t wdiff; /* difference from wanted value */ int word; /* word number in the buffer */ /* * Compute and read in starting bitmap block for starting block. */ block = XFS_BITTOBLOCK(mp, start); error = xfs_rtbuf_get(mp, tp, block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; /* * Get the first word's index & point to it. */ word = XFS_BITTOWORD(mp, start); b = &bufp[word]; bit = (int)(start & (XFS_NBWORD - 1)); len = start - limit + 1; /* * Compute match value, based on the bit at start: if 1 (free) * then all-ones, else all-zeroes. */ want = (*b & ((xfs_rtword_t)1 << bit)) ? -1 : 0; /* * If the starting position is not word-aligned, deal with the * partial word. */ if (bit < XFS_NBWORD - 1) { /* * Calculate first (leftmost) bit number to look at, * and mask for all the relevant bits in this word. */ firstbit = XFS_RTMAX((xfs_srtblock_t)(bit - len + 1), 0); mask = (((xfs_rtword_t)1 << (bit - firstbit + 1)) - 1) << firstbit; /* * Calculate the difference between the value there * and what we're looking for. */ if ((wdiff = (*b ^ want) & mask)) { /* * Different. Mark where we are and return. */ xfs_trans_brelse(tp, bp); i = bit - XFS_RTHIBIT(wdiff); *rtblock = start - i + 1; return 0; } i = bit - firstbit + 1; /* * Go on to previous block if that's where the previous word is * and we need the previous word. */ if (--word == -1 && i < len) { /* * If done with this block, get the previous one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, --block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; word = XFS_BLOCKWMASK(mp); b = &bufp[word]; } else { /* * Go on to the previous word in the buffer. */ b--; } } else { /* * Starting on a word boundary, no partial word. */ i = 0; } /* * Loop over whole words in buffers. When we use up one buffer * we move on to the previous one. */ while (len - i >= XFS_NBWORD) { /* * Compute difference between actual and desired value. */ if ((wdiff = *b ^ want)) { /* * Different, mark where we are and return. */ xfs_trans_brelse(tp, bp); i += XFS_NBWORD - 1 - XFS_RTHIBIT(wdiff); *rtblock = start - i + 1; return 0; } i += XFS_NBWORD; /* * Go on to previous block if that's where the previous word is * and we need the previous word. */ if (--word == -1 && i < len) { /* * If done with this block, get the previous one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, --block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; word = XFS_BLOCKWMASK(mp); b = &bufp[word]; } else { /* * Go on to the previous word in the buffer. */ b--; } } /* * If not ending on a word boundary, deal with the last * (partial) word. */ if (len - i) { /* * Calculate first (leftmost) bit number to look at, * and mask for all the relevant bits in this word. */ firstbit = XFS_NBWORD - (len - i); mask = (((xfs_rtword_t)1 << (len - i)) - 1) << firstbit; /* * Compute difference between actual and desired value. */ if ((wdiff = (*b ^ want) & mask)) { /* * Different, mark where we are and return. */ xfs_trans_brelse(tp, bp); i += XFS_NBWORD - 1 - XFS_RTHIBIT(wdiff); *rtblock = start - i + 1; return 0; } else i = len; } /* * No match, return that we scanned the whole area. */ xfs_trans_brelse(tp, bp); *rtblock = start - i + 1; return 0; } /* * Searching forward from start to limit, find the first block whose * allocated/free state is different from start's. */ int xfs_rtfind_forw( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t start, /* starting block to look at */ xfs_rtblock_t limit, /* last block to look at */ xfs_rtblock_t *rtblock) /* out: start block found */ { xfs_rtword_t *b; /* current word in buffer */ int bit; /* bit number in the word */ xfs_rtblock_t block; /* bitmap block number */ xfs_buf_t *bp; /* buf for the block */ xfs_rtword_t *bufp; /* starting word in buffer */ int error; /* error value */ xfs_rtblock_t i; /* current bit number rel. to start */ xfs_rtblock_t lastbit; /* last useful bit in the word */ xfs_rtblock_t len; /* length of inspected area */ xfs_rtword_t mask; /* mask of relevant bits for value */ xfs_rtword_t want; /* mask for "good" values */ xfs_rtword_t wdiff; /* difference from wanted value */ int word; /* word number in the buffer */ /* * Compute and read in starting bitmap block for starting block. */ block = XFS_BITTOBLOCK(mp, start); error = xfs_rtbuf_get(mp, tp, block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; /* * Get the first word's index & point to it. */ word = XFS_BITTOWORD(mp, start); b = &bufp[word]; bit = (int)(start & (XFS_NBWORD - 1)); len = limit - start + 1; /* * Compute match value, based on the bit at start: if 1 (free) * then all-ones, else all-zeroes. */ want = (*b & ((xfs_rtword_t)1 << bit)) ? -1 : 0; /* * If the starting position is not word-aligned, deal with the * partial word. */ if (bit) { /* * Calculate last (rightmost) bit number to look at, * and mask for all the relevant bits in this word. */ lastbit = XFS_RTMIN(bit + len, XFS_NBWORD); mask = (((xfs_rtword_t)1 << (lastbit - bit)) - 1) << bit; /* * Calculate the difference between the value there * and what we're looking for. */ if ((wdiff = (*b ^ want) & mask)) { /* * Different. Mark where we are and return. */ xfs_trans_brelse(tp, bp); i = XFS_RTLOBIT(wdiff) - bit; *rtblock = start + i - 1; return 0; } i = lastbit - bit; /* * Go on to next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * If done with this block, get the previous one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the previous word in the buffer. */ b++; } } else { /* * Starting on a word boundary, no partial word. */ i = 0; } /* * Loop over whole words in buffers. When we use up one buffer * we move on to the next one. */ while (len - i >= XFS_NBWORD) { /* * Compute difference between actual and desired value. */ if ((wdiff = *b ^ want)) { /* * Different, mark where we are and return. */ xfs_trans_brelse(tp, bp); i += XFS_RTLOBIT(wdiff); *rtblock = start + i - 1; return 0; } i += XFS_NBWORD; /* * Go on to next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * If done with this block, get the next one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the next word in the buffer. */ b++; } } /* * If not ending on a word boundary, deal with the last * (partial) word. */ if ((lastbit = len - i)) { /* * Calculate mask for all the relevant bits in this word. */ mask = ((xfs_rtword_t)1 << lastbit) - 1; /* * Compute difference between actual and desired value. */ if ((wdiff = (*b ^ want) & mask)) { /* * Different, mark where we are and return. */ xfs_trans_brelse(tp, bp); i += XFS_RTLOBIT(wdiff); *rtblock = start + i - 1; return 0; } else i = len; } /* * No match, return that we scanned the whole area. */ xfs_trans_brelse(tp, bp); *rtblock = start + i - 1; return 0; } /* * Read and/or modify the summary information for a given extent size, * bitmap block combination. * Keeps track of a current summary block, so we don't keep reading * it from the buffer cache. * * Summary information is returned in *sum if specified. * If no delta is specified, returns summary only. */ int xfs_rtmodify_summary_int( xfs_mount_t *mp, /* file system mount structure */ xfs_trans_t *tp, /* transaction pointer */ int log, /* log2 of extent size */ xfs_rtblock_t bbno, /* bitmap block number */ int delta, /* change to make to summary info */ xfs_buf_t **rbpp, /* in/out: summary block buffer */ xfs_fsblock_t *rsb, /* in/out: summary block number */ xfs_suminfo_t *sum) /* out: summary info for this block */ { xfs_buf_t *bp; /* buffer for the summary block */ int error; /* error value */ xfs_fsblock_t sb; /* summary fsblock */ int so; /* index into the summary file */ xfs_suminfo_t *sp; /* pointer to returned data */ /* * Compute entry number in the summary file. */ so = XFS_SUMOFFS(mp, log, bbno); /* * Compute the block number in the summary file. */ sb = XFS_SUMOFFSTOBLOCK(mp, so); /* * If we have an old buffer, and the block number matches, use that. */ if (*rbpp && *rsb == sb) bp = *rbpp; /* * Otherwise we have to get the buffer. */ else { /* * If there was an old one, get rid of it first. */ if (*rbpp) xfs_trans_brelse(tp, *rbpp); error = xfs_rtbuf_get(mp, tp, sb, 1, &bp); if (error) { return error; } /* * Remember this buffer and block for the next call. */ *rbpp = bp; *rsb = sb; } /* * Point to the summary information, modify/log it, and/or copy it out. */ sp = XFS_SUMPTR(mp, bp, so); if (delta) { uint first = (uint)((char *)sp - (char *)bp->b_addr); *sp += delta; xfs_trans_log_buf(tp, bp, first, first + sizeof(*sp) - 1); } if (sum) *sum = *sp; return 0; } int xfs_rtmodify_summary( xfs_mount_t *mp, /* file system mount structure */ xfs_trans_t *tp, /* transaction pointer */ int log, /* log2 of extent size */ xfs_rtblock_t bbno, /* bitmap block number */ int delta, /* change to make to summary info */ xfs_buf_t **rbpp, /* in/out: summary block buffer */ xfs_fsblock_t *rsb) /* in/out: summary block number */ { return xfs_rtmodify_summary_int(mp, tp, log, bbno, delta, rbpp, rsb, NULL); } /* * Set the given range of bitmap bits to the given value. * Do whatever I/O and logging is required. */ int xfs_rtmodify_range( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t start, /* starting block to modify */ xfs_extlen_t len, /* length of extent to modify */ int val) /* 1 for free, 0 for allocated */ { xfs_rtword_t *b; /* current word in buffer */ int bit; /* bit number in the word */ xfs_rtblock_t block; /* bitmap block number */ xfs_buf_t *bp; /* buf for the block */ xfs_rtword_t *bufp; /* starting word in buffer */ int error; /* error value */ xfs_rtword_t *first; /* first used word in the buffer */ int i; /* current bit number rel. to start */ int lastbit; /* last useful bit in word */ xfs_rtword_t mask; /* mask o frelevant bits for value */ int word; /* word number in the buffer */ /* * Compute starting bitmap block number. */ block = XFS_BITTOBLOCK(mp, start); /* * Read the bitmap block, and point to its data. */ error = xfs_rtbuf_get(mp, tp, block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; /* * Compute the starting word's address, and starting bit. */ word = XFS_BITTOWORD(mp, start); first = b = &bufp[word]; bit = (int)(start & (XFS_NBWORD - 1)); /* * 0 (allocated) => all zeroes; 1 (free) => all ones. */ val = -val; /* * If not starting on a word boundary, deal with the first * (partial) word. */ if (bit) { /* * Compute first bit not changed and mask of relevant bits. */ lastbit = XFS_RTMIN(bit + len, XFS_NBWORD); mask = (((xfs_rtword_t)1 << (lastbit - bit)) - 1) << bit; /* * Set/clear the active bits. */ if (val) *b |= mask; else *b &= ~mask; i = lastbit - bit; /* * Go on to the next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * Log the changed part of this block. * Get the next one. */ xfs_trans_log_buf(tp, bp, (uint)((char *)first - (char *)bufp), (uint)((char *)b - (char *)bufp)); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } first = b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the next word in the buffer */ b++; } } else { /* * Starting on a word boundary, no partial word. */ i = 0; } /* * Loop over whole words in buffers. When we use up one buffer * we move on to the next one. */ while (len - i >= XFS_NBWORD) { /* * Set the word value correctly. */ *b = val; i += XFS_NBWORD; /* * Go on to the next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * Log the changed part of this block. * Get the next one. */ xfs_trans_log_buf(tp, bp, (uint)((char *)first - (char *)bufp), (uint)((char *)b - (char *)bufp)); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } first = b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the next word in the buffer */ b++; } } /* * If not ending on a word boundary, deal with the last * (partial) word. */ if ((lastbit = len - i)) { /* * Compute a mask of relevant bits. */ mask = ((xfs_rtword_t)1 << lastbit) - 1; /* * Set/clear the active bits. */ if (val) *b |= mask; else *b &= ~mask; b++; } /* * Log any remaining changed bytes. */ if (b > first) xfs_trans_log_buf(tp, bp, (uint)((char *)first - (char *)bufp), (uint)((char *)b - (char *)bufp - 1)); return 0; } /* * Mark an extent specified by start and len freed. * Updates all the summary information as well as the bitmap. */ int xfs_rtfree_range( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t start, /* starting block to free */ xfs_extlen_t len, /* length to free */ xfs_buf_t **rbpp, /* in/out: summary block buffer */ xfs_fsblock_t *rsb) /* in/out: summary block number */ { xfs_rtblock_t end; /* end of the freed extent */ int error; /* error value */ xfs_rtblock_t postblock; /* first block freed > end */ xfs_rtblock_t preblock; /* first block freed < start */ end = start + len - 1; /* * Modify the bitmap to mark this extent freed. */ error = xfs_rtmodify_range(mp, tp, start, len, 1); if (error) { return error; } /* * Assume we're freeing out of the middle of an allocated extent. * We need to find the beginning and end of the extent so we can * properly update the summary. */ error = xfs_rtfind_back(mp, tp, start, 0, &preblock); if (error) { return error; } /* * Find the next allocated block (end of allocated extent). */ error = xfs_rtfind_forw(mp, tp, end, mp->m_sb.sb_rextents - 1, &postblock); if (error) return error; /* * If there are blocks not being freed at the front of the * old extent, add summary data for them to be allocated. */ if (preblock < start) { error = xfs_rtmodify_summary(mp, tp, XFS_RTBLOCKLOG(start - preblock), XFS_BITTOBLOCK(mp, preblock), -1, rbpp, rsb); if (error) { return error; } } /* * If there are blocks not being freed at the end of the * old extent, add summary data for them to be allocated. */ if (postblock > end) { error = xfs_rtmodify_summary(mp, tp, XFS_RTBLOCKLOG(postblock - end), XFS_BITTOBLOCK(mp, end + 1), -1, rbpp, rsb); if (error) { return error; } } /* * Increment the summary information corresponding to the entire * (new) free extent. */ error = xfs_rtmodify_summary(mp, tp, XFS_RTBLOCKLOG(postblock + 1 - preblock), XFS_BITTOBLOCK(mp, preblock), 1, rbpp, rsb); return error; } /* * Check that the given range is either all allocated (val = 0) or * all free (val = 1). */ int xfs_rtcheck_range( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t start, /* starting block number of extent */ xfs_extlen_t len, /* length of extent */ int val, /* 1 for free, 0 for allocated */ xfs_rtblock_t *new, /* out: first block not matching */ int *stat) /* out: 1 for matches, 0 for not */ { xfs_rtword_t *b; /* current word in buffer */ int bit; /* bit number in the word */ xfs_rtblock_t block; /* bitmap block number */ xfs_buf_t *bp; /* buf for the block */ xfs_rtword_t *bufp; /* starting word in buffer */ int error; /* error value */ xfs_rtblock_t i; /* current bit number rel. to start */ xfs_rtblock_t lastbit; /* last useful bit in word */ xfs_rtword_t mask; /* mask of relevant bits for value */ xfs_rtword_t wdiff; /* difference from wanted value */ int word; /* word number in the buffer */ /* * Compute starting bitmap block number */ block = XFS_BITTOBLOCK(mp, start); /* * Read the bitmap block. */ error = xfs_rtbuf_get(mp, tp, block, 0, &bp); if (error) { return error; } bufp = bp->b_addr; /* * Compute the starting word's address, and starting bit. */ word = XFS_BITTOWORD(mp, start); b = &bufp[word]; bit = (int)(start & (XFS_NBWORD - 1)); /* * 0 (allocated) => all zero's; 1 (free) => all one's. */ val = -val; /* * If not starting on a word boundary, deal with the first * (partial) word. */ if (bit) { /* * Compute first bit not examined. */ lastbit = XFS_RTMIN(bit + len, XFS_NBWORD); /* * Mask of relevant bits. */ mask = (((xfs_rtword_t)1 << (lastbit - bit)) - 1) << bit; /* * Compute difference between actual and desired value. */ if ((wdiff = (*b ^ val) & mask)) { /* * Different, compute first wrong bit and return. */ xfs_trans_brelse(tp, bp); i = XFS_RTLOBIT(wdiff) - bit; *new = start + i; *stat = 0; return 0; } i = lastbit - bit; /* * Go on to next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * If done with this block, get the next one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the next word in the buffer. */ b++; } } else { /* * Starting on a word boundary, no partial word. */ i = 0; } /* * Loop over whole words in buffers. When we use up one buffer * we move on to the next one. */ while (len - i >= XFS_NBWORD) { /* * Compute difference between actual and desired value. */ if ((wdiff = *b ^ val)) { /* * Different, compute first wrong bit and return. */ xfs_trans_brelse(tp, bp); i += XFS_RTLOBIT(wdiff); *new = start + i; *stat = 0; return 0; } i += XFS_NBWORD; /* * Go on to next block if that's where the next word is * and we need the next word. */ if (++word == XFS_BLOCKWSIZE(mp) && i < len) { /* * If done with this block, get the next one. */ xfs_trans_brelse(tp, bp); error = xfs_rtbuf_get(mp, tp, ++block, 0, &bp); if (error) { return error; } b = bufp = bp->b_addr; word = 0; } else { /* * Go on to the next word in the buffer. */ b++; } } /* * If not ending on a word boundary, deal with the last * (partial) word. */ if ((lastbit = len - i)) { /* * Mask of relevant bits. */ mask = ((xfs_rtword_t)1 << lastbit) - 1; /* * Compute difference between actual and desired value. */ if ((wdiff = (*b ^ val) & mask)) { /* * Different, compute first wrong bit and return. */ xfs_trans_brelse(tp, bp); i += XFS_RTLOBIT(wdiff); *new = start + i; *stat = 0; return 0; } else i = len; } /* * Successful, return. */ xfs_trans_brelse(tp, bp); *new = start + i; *stat = 1; return 0; } #ifdef DEBUG /* * Check that the given extent (block range) is allocated already. */ STATIC int /* error */ xfs_rtcheck_alloc_range( xfs_mount_t *mp, /* file system mount point */ xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t bno, /* starting block number of extent */ xfs_extlen_t len) /* length of extent */ { xfs_rtblock_t new; /* dummy for xfs_rtcheck_range */ int stat; int error; error = xfs_rtcheck_range(mp, tp, bno, len, 0, &new, &stat); if (error) return error; ASSERT(stat); return 0; } #else #define xfs_rtcheck_alloc_range(m,t,b,l) (0) #endif /* * Free an extent in the realtime subvolume. Length is expressed in * realtime extents, as is the block number. */ int /* error */ xfs_rtfree_extent( xfs_trans_t *tp, /* transaction pointer */ xfs_rtblock_t bno, /* starting block number to free */ xfs_extlen_t len) /* length of extent freed */ { int error; /* error value */ xfs_mount_t *mp; /* file system mount structure */ xfs_fsblock_t sb; /* summary file block number */ xfs_buf_t *sumbp = NULL; /* summary file block buffer */ mp = tp->t_mountp; ASSERT(mp->m_rbmip->i_itemp != NULL); ASSERT(xfs_isilocked(mp->m_rbmip, XFS_ILOCK_EXCL)); error = xfs_rtcheck_alloc_range(mp, tp, bno, len); if (error) return error; /* * Free the range of realtime blocks. */ error = xfs_rtfree_range(mp, tp, bno, len, &sumbp, &sb); if (error) { return error; } /* * Mark more blocks free in the superblock. */ xfs_trans_mod_sb(tp, XFS_TRANS_SB_FREXTENTS, (long)len); /* * If we've now freed all the blocks, reset the file sequence * number to 0. */ if (tp->t_frextents_delta + mp->m_sb.sb_frextents == mp->m_sb.sb_rextents) { if (!(mp->m_rbmip->i_d.di_flags & XFS_DIFLAG_NEWRTBM)) mp->m_rbmip->i_d.di_flags |= XFS_DIFLAG_NEWRTBM; *(uint64_t *)&VFS_I(mp->m_rbmip)->i_atime = 0; xfs_trans_log_inode(tp, mp->m_rbmip, XFS_ILOG_CORE); } return 0; } /* Find all the free records within a given range. */ int xfs_rtalloc_query_range( struct xfs_trans *tp, struct xfs_rtalloc_rec *low_rec, struct xfs_rtalloc_rec *high_rec, xfs_rtalloc_query_range_fn fn, void *priv) { struct xfs_rtalloc_rec rec; struct xfs_mount *mp = tp->t_mountp; xfs_rtblock_t rtstart; xfs_rtblock_t rtend; xfs_rtblock_t rem; int is_free; int error = 0; if (low_rec->ar_startext > high_rec->ar_startext) return -EINVAL; if (low_rec->ar_startext >= mp->m_sb.sb_rextents || low_rec->ar_startext == high_rec->ar_startext) return 0; if (high_rec->ar_startext >= mp->m_sb.sb_rextents) high_rec->ar_startext = mp->m_sb.sb_rextents - 1; /* Iterate the bitmap, looking for discrepancies. */ rtstart = low_rec->ar_startext; rem = high_rec->ar_startext - rtstart; while (rem) { /* Is the first block free? */ error = xfs_rtcheck_range(mp, tp, rtstart, 1, 1, &rtend, &is_free); if (error) break; /* How long does the extent go for? */ error = xfs_rtfind_forw(mp, tp, rtstart, high_rec->ar_startext - 1, &rtend); if (error) break; if (is_free) { rec.ar_startext = rtstart; rec.ar_extcount = rtend - rtstart + 1; error = fn(tp, &rec, priv); if (error) break; } rem -= rtend - rtstart + 1; rtstart = rtend + 1; } return error; } /* Find all the free records. */ int xfs_rtalloc_query_all( struct xfs_trans *tp, xfs_rtalloc_query_range_fn fn, void *priv) { struct xfs_rtalloc_rec keys[2]; keys[0].ar_startext = 0; keys[1].ar_startext = tp->t_mountp->m_sb.sb_rextents - 1; keys[0].ar_extcount = keys[1].ar_extcount = 0; return xfs_rtalloc_query_range(tp, &keys[0], &keys[1], fn, priv); } /* Is the given extent all free? */ int xfs_rtalloc_extent_is_free( struct xfs_mount *mp, struct xfs_trans *tp, xfs_rtblock_t start, xfs_extlen_t len, bool *is_free) { xfs_rtblock_t end; int matches; int error; error = xfs_rtcheck_range(mp, tp, start, len, 1, &end, &matches); if (error) return error; *is_free = matches; return 0; }
/** * gulp-fail v1.0.6 * Copyright (c) 2016 Brandon Sara (http://bsara.github.io) * Licensed under the MIT License */ 'use strict'; // Dependencies // ----------------- var PluginError = require('plugin-error'); var colors = require('ansi-colors'); var through = require('through2'); // Constants // ----------------- var PLUGIN_NAME = 'gulp-fail'; // Task Definition // ----------------- function gulpFail(message, failAfterCompletion) { var shouldFail = false; var getMessage = message; if (typeof getMessage !== 'function') { getMessage = function() { return (message || "Task was forced to fail."); }; } var getError = function() { return new PluginError(PLUGIN_NAME, colors.red(getMessage()), { showStack: false }); }; var checkFile = function(file, _e, cb) { if (failAfterCompletion !== true) { cb(getError()); return; } shouldFail = true; cb(null, file); }; var checkStream = function() { if (failAfterCompletion === true && shouldFail) { this.emit('error', getError()); return; } this.emit('end'); }; var onError = function() { this.emit('end'); }; return through.obj(checkFile, checkStream) .on('error', onError); } // Module Exports // ----------------- module.exports = gulpFail;
import json from django.contrib.auth.decorators import login_required from django.core import serializers from django.shortcuts import render, get_object_or_404, redirect from django.template import loader from django.http import HttpResponse from django import template from django.contrib.auth.models import User from django.conf import settings from apps.authentication.models import Profile, Notification, Task @login_required(login_url="/login/") def dashC(request): isEnableChat = 0 server = settings.CHAT_WS_SERVER_HOST port = settings.CHAT_WS_SERVER_PORT protocol = settings.CHAT_WS_SERVER_PROTOCOL strAddr = protocol + '://' + server + ':' + str(port) + '/' SupportUser = User.objects.all().filter(profile__isSupportChat=True).filter( profile__isSupportChatOnlineStatus='on') # SupportUser = list(User.objects.all().filter(profile__isSupportChat=True)) if 0 < len(SupportUser): isEnableChat = 0 chat = {"server": settings.CHAT_WS_SERVER_HOST, "port": settings.CHAT_WS_SERVER_PORT, "protocol": settings.CHAT_WS_SERVER_PROTOCOL, "fullAddres": strAddr, "isEnableChat": isEnableChat, "SupportUser": SupportUser, } notification = list(Notification.objects.all().filter(user=request.user).filter(isMarkRead=False)) tasks = list(Task.objects.all().filter(user=request.user).filter(isCompleted=False)) profile = Profile.objects.get(user=request.user) data = { "chat": chat, "notification": notification, "tasks": tasks, "profile": profile, "SupportUser": SupportUser, } template_name = "dashC/dashC.html" return render(request, template_name, context=data) @login_required(login_url="/login/") def dashS(request): server = settings.CHAT_WS_SERVER_HOST port = settings.CHAT_WS_SERVER_PORT protocol = settings.CHAT_WS_SERVER_PROTOCOL strAddr = protocol + '://' + server + ':' + str(port) + '/' SupportUser = list(User.objects.all().filter(profile__isSupportChat=True)) chat = {"server": settings.CHAT_WS_SERVER_HOST, "port": settings.CHAT_WS_SERVER_PORT, "protocol": settings.CHAT_WS_SERVER_PROTOCOL, "fullAddres": strAddr, "SupportUser": SupportUser, } notification = list(Notification.objects.all().filter(user=request.user).filter(isMarkRead=False)) tasks = list(Task.objects.all().filter(user=request.user).filter(isCompleted=False)) profile = Profile.objects.get(user=request.user) jso = profile.personalSettingsSite data = { "chat": chat, "notification": notification, "tasks": tasks, "profile": profile, "SupportUser": SupportUser, } # if not request.user.is_staff: # return HttpResponse('You not staff OniCommPro') template_name = "dashS/index.html" return render(request, template_name, context=data) @login_required(login_url="/login/") def pages(request): context = {} try: load_template = request.path.split('/')[-1] html_template = loader.get_template(load_template) return HttpResponse(html_template.render(context, request)) except template.TemplateDoesNotExist: html_template = loader.get_template('dashC/error-404.html') return HttpResponse(html_template.render(context, request)) except: html_template = loader.get_template('dashC/error-500.html') return HttpResponse(html_template.render(context, request)) def getUserSettings(request): res = User print(request) return HttpResponse(request, res) def userPage(request): context = {} # template = 'dashboard/page-user.html' return render(request, template, context)
from ..models import Category, Pitch, Comment, User from flask import render_template, redirect, url_for, request from . import main from flask_login import login_required, current_user from .forms import CommentForm, PitchForm, UpdateProfilePic from sqlalchemy import desc from .. import photos, db # Index page. @main.route('/', methods = ['GET', 'POST']) def index(): ''' View root page function that returns the index page and its data ''' pitches = Pitch.get_pitches() form = PitchForm() title = 'Home | Pitcher' form.category.query = Category.query if form.validate_on_submit(): selected_category = form.category.data pitch = form.pitch.data new_pitch = Pitch(pitch=pitch, user=current_user, category=selected_category) # Save Pitch new_pitch.save_pitch() return redirect(url_for('.index')) return render_template('home.html', title = title, form=form, pitches = pitches) @main.route('/<int:id>', methods = ['GET', 'POST']) def categories(id): ''' View root page function that returns the index page and its data ''' form = PitchForm() title = 'Home | Pitcher' form.category.query = Category.query if form.validate_on_submit(): selected_category = form.category.data pitch = form.pitch.data new_pitch = Pitch(pitch=pitch, user=current_user, category=selected_category) # Save Pitch new_pitch.save_pitch() return redirect(url_for('.index')) pitches = Pitch.get_pitches_by_category(id) return render_template('home.html', title = title, form=form, pitches = pitches) @main.route('/<pitch_id>/comments', methods = ['GET', 'POST']) def comment(pitch_id): comment_form = CommentForm() title = 'Comments | Pitcher' pitch = Pitch.query.filter_by(id=pitch_id).first() # Get comments for pitch. comments = Comment.get_comments(pitch_id) if comment_form.validate_on_submit(): comment = comment_form.comment.data new_comment = Comment(comment=comment, user=current_user, pitch=pitch) new_comment.save_comment() return redirect(request.referrer) return render_template('comments.html', pitch=pitch, form=comment_form, comments=comments, title=title) @main.route('/user/profile/<int:id>', methods = ['GET', 'POST']) @login_required def profile(id): form = UpdateProfilePic() user = User.query.filter_by(id=id).first() pitches = Pitch.get_pitches_by_user(id) title = '{} {} | Profile'.format(user.fname, user.lname) if form.validate_on_submit(): filename = photos.save(form.profile.data) profile_pic_path = f'img/{filename}' user.profile_pic_path = profile_pic_path db.session.commit() return redirect(url_for('main.profile', id=id)) return render_template('profile.html', pitches=pitches, form=form, user=user, title=title) @main.route('/like/<int:id>/<action>', methods = ['GET', 'POST']) @login_required def like_action(id, action): pitch = Pitch.query.filter_by(id=id).first_or_404() if action == 'like': current_user.like_pitch(pitch) db.session.commit() if action == 'unlike': current_user.unlike_pitch(pitch) db.session.commit() return redirect(request.referrer) @main.route('/like/comment/<int:id>/<action>', methods = ['GET', 'POST']) @login_required def like_comment_action(id, action): comment = Comment.query.filter_by(id=id).first_or_404() if action == 'like': current_user.like_comment(comment) db.session.commit() if action == 'unlike': current_user.unlike_comment(comment) db.session.commit() return redirect(url_for('main.comment', pitch_id = comment.pitch.id))
import asyncio import base64 import concurrent.futures import datetime import glob import json import math import os import pathlib import random import sys import time from json import dumps, loads from random import randint import re from re import findall import requests import urllib3 from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from requests import post from googletrans import Translator import io from PIL import Image , ImageFont, ImageDraw import arabic_reshaper from bidi.algorithm import get_display from mutagen.mp3 import MP3 from gtts import gTTS from threading import Thread urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) from difflib import SequenceMatcher from api_rubika import Bot,encryption def similar(a, b): return SequenceMatcher(None, a, b).ratio() def hasInsult(msg): swData = [False,None] if msg != None: for i in open("dontReadMe.txt").read().split("\n"): if i in msg: swData = [True, i] break else: continue return swData def hasAds(msg): links = list(map(lambda ID: ID.strip()[1:],findall("@[\w|_|\d]+", msg))) + list(map(lambda link:link.split("/")[-1],findall("rubika\.ir/\w+",msg))) joincORjoing = "joing" in msg or "joinc" in msg if joincORjoing: return joincORjoing else: for link in links: try: Type = bot.getInfoByUsername(link)["data"]["chat"]["abs_object"]["type"] if Type == "Channel": return True except KeyError: return False def search_i(text,chat,bot): try: search = text[11:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به زودی به پیوی شما ارسال میشوند', chat['last_message']['message_id']) jd = json.loads(requests.get('https://zarebin.ir/api/image/?q=' + search + '&chips=&page=1').text) jd = jd['results'] a = 0 for j in jd: if a <= 8: try: res = requests.get(j['image_link']) if res.status_code == 200 and res.content != b'' and j['cdn_thumbnail'] != '': thumb = str(j['cdn_thumbnail']) thumb = thumb.split('data:image/')[1] thumb = thumb.split(';')[0] if thumb == 'png': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.png', len(b2), 'png') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, j['title'] + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) elif thumb == 'webp': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.webp', len(b2), 'webp') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'webp', tx['dc_id'] , access, j['title'] + '.webp', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) else: b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.jpg', len(b2), 'jpg') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['last_message']['author_object_guid'] ,tx['id'] , 'jpg', tx['dc_id'] , access, j['title'] + '.jpg', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) a += 1 except: print('image error') else: break elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], 'در حال یافتن کمی صبور باشید...', chat['last_message']['message_id']) print('search image') jd = json.loads(requests.get('https://zarebin.ir/api/image/?q=' + search + '&chips=&page=1').text) jd = jd['results'] a = 0 for j in jd: if a < 10: try: res = requests.get(j['image_link']) if res.status_code == 200 and res.content != b'' and j['cdn_thumbnail'] != '' and j['cdn_thumbnail'].startswith('data:image'): thumb = str(j['cdn_thumbnail']) thumb = thumb.split('data:image/')[1] thumb = thumb.split(';')[0] if thumb == 'png': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.png', len(b2), 'png') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, j['title'] + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) elif thumb == 'webp': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile(j['title'] + '.webp', len(b2), 'webp') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'webp', tx['dc_id'] , access, j['title'] + '.webp', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) else: b2 = res.content tx = bot.requestFile(j['title'] + '.jpg', len(b2), 'jpg') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'jpg', tx['dc_id'] , access, j['title'] + '.jpg', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, j['title'], chat['last_message']['message_id']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) a += 1 except: print('image erorr') return True except: print('image search err') return False def write_image(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] paramiters = text[8:-1] paramiters = paramiters.split(':') if len(paramiters) == 5: b2 = bot.write_text_image(txt_xt,paramiters[0],int(paramiters[1]),str(paramiters[2]),int(paramiters[3]),int(paramiters[4])) tx = bot.requestFile('code_image.png', len(b2), 'png') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'code_image.png', len(b2) , str(bot.getThumbInline(b2))[2:-1] , width, height ,message_id= c_id) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) return True return False except: print('server ban bug') return False def uesr_remove(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if chat['last_message']['author_object_guid'] in admins: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if not msg_data['author_object_guid'] in admins: bot.banGroupMember(chat['object_guid'], msg_data['author_object_guid']) bot.sendMessage(chat['object_guid'], 'انجام شد' , chat['last_message']['message_id']) return True return False except: print('server ban bug') return False def speak_after(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] speech = gTTS(txt_xt) changed_voice = io.BytesIO() speech.write_to_fp(changed_voice) b2 = changed_voice.getvalue() tx = bot.requestFile('sound.ogg', len(b2), 'sound.ogg') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) f = io.BytesIO() f.write(b2) f.seek(0) audio = MP3(f) dur = audio.info.length bot.sendVoice(chat['object_guid'],tx['id'] , 'ogg', tx['dc_id'] , access, 'sound.ogg', len(b2), dur * 1000 ,message_id= c_id) print('sended voice') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) return True return False except: print('server gtts bug') return False def joker(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) return True except: print('code bz server err') return False def info_qroz(text,chat,bot): try: user_info = bot.getInfoByUsername(text[7:]) if user_info['data']['exist'] == True: if user_info['data']['type'] == 'User': bot.sendMessage(chat['object_guid'], 'name:\n ' + user_info['data']['user']['first_name'] + ' ' + user_info['data']['user']['last_name'] + '\n\nbio:\n ' + user_info['data']['user']['bio'] + '\n\nguid:\n ' + user_info['data']['user']['user_guid'] , chat['last_message']['message_id']) print('sended response') else: bot.sendMessage(chat['object_guid'], 'کانال است' , chat['last_message']['message_id']) print('sended response') else: bot.sendMessage(chat['object_guid'], 'وجود ندارد' , chat['last_message']['message_id']) print('sended response') return True except: print('server bug6') return False def search(text,chat,bot): try: search = text[9:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + '\n\n' bot.sendMessage(chat['object_guid'], 'نتایج به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + '\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) return True except: print('search zarebin err') bot.sendMessage(chat['object_guid'], 'در حال حاضر این دستور محدود یا در حال تعمیر است' , chat['last_message']['message_id']) return False def p_danesh(text,chat,bot): try: res = requests.get('http://api.codebazan.ir/danestani/pic/') if res.status_code == 200 and res.content != b'': b2 = res.content width, height = bot.getImageSize(b2) tx = bot.requestFile('jok_'+ str(random.randint(1000000, 9999999)) + '.png', len(b2), 'png') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'jok_'+ str(random.randint(1000000, 9999999)) + '.png', len(b2), str(bot.getThumbInline(b2))[2:-1] , width, height, message_id=chat['last_message']['message_id']) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) return True except: print('code bz danesh api bug') return False def anti_insult(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not chat['last_message']['author_object_guid'] in admins: print('yek ahmagh fohsh dad: ' + chat['last_message']['author_object_guid']) bot.deleteMessages(chat['object_guid'], [chat['last_message']['message_id']]) return True return False except: print('delete the fohsh err') def anti_tabligh(text,chat,bot): try: admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not chat['last_message']['author_object_guid'] in admins: print('yek ahmagh tabligh kard: ' + chat['last_message']['author_object_guid']) bot.deleteMessages(chat['object_guid'], [chat['last_message']['message_id']]) return True return False except: print('tabligh delete err') def get_curruncy(text,chat,bot): try: t = json.loads(requests.get('https://api.codebazan.ir/arz/?type=arz').text) text = '' for i in t: price = i['price'].replace(',','')[:-1] + ' تومان' text += i['name'] + ' : ' + price + '\n' bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('code bz arz err') return True def shot_image(text,chat,bot): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] res = requests.get('https://api.otherapi.tk/carbon?type=create&code=' + txt_xt + '&theme=vscode') if res.status_code == 200 and res.content != b'': b2 = res.content tx = bot.requestFile('code_image.png', len(b2), 'png') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) width, height = bot.getImageSize(b2) bot.sendImage(chat['object_guid'] ,tx['id'] , 'png', tx['dc_id'] , access, 'code_image.png', len(b2) , str(bot.getThumbInline(b2))[2:-1] , width, height ,message_id= c_id) print('sended file') else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) except: print('code bz shot err') return True def get_ip(text,chat,bot): try: ip = text[5:-1] if hasInsult(ip)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/ipinfo/?ip=' + ip).text) text = 'نام شرکت:\n' + jd['company'] + '\n\nکشور : \n' + jd['country_name'] + '\n\nارائه دهنده : ' + jd['isp'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz ip err') return True def get_weather(text,chat,bot): try: city = text[10:-1] if hasInsult(city)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/weather/?city=' + city).text) text = 'دما : \n'+jd['result']['دما'] + '\n سرعت باد:\n' + jd['result']['سرعت باد'] + '\n وضعیت هوا: \n' + jd['result']['وضعیت هوا'] + '\n\n بروز رسانی اطلاعات امروز: ' + jd['result']['به روز رسانی'] + '\n\nپیش بینی هوا فردا: \n دما: ' + jd['فردا']['دما'] + '\n وضعیت هوا : ' + jd['فردا']['وضعیت هوا'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz weather err') return True def get_whois(text,chat,bot): try: site = text[8:-1] jd = json.loads(requests.get('https://api.codebazan.ir/whois/index.php?type=json&domain=' + site).text) text = 'مالک : \n'+jd['owner'] + '\n\n آیپی:\n' + jd['ip'] + '\n\nآدرس مالک : \n' + jd['address'] + '\n\ndns1 : \n' + jd['dns']['1'] + '\ndns2 : \n' + jd['dns']['2'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz whois err') return True def get_font(text,chat,bot): try: name_user = text[7:-1] jd = json.loads(requests.get('https://api.codebazan.ir/font/?text=' + name_user).text) jd = jd['result'] text = '' for i in range(1,100): text += jd[str(i)] + '\n' if hasInsult(name_user)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + name_user + ') : \n\n'+text) elif hasInsult(name_user)[0] == False and chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz font err') return True def get_ping(text,chat,bot): try: site = text[7:-1] jd = requests.get('https://api.codebazan.ir/ping/?url=' + site).text text = str(jd) bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz ping err') return True def get_gold(text,chat,bot): try: r = json.loads(requests.get('https://www.wirexteam.ga/gold').text) change = str(r['data']['last_update']) r = r['gold'] text = '' for o in r: text += o['name'] + ' : ' + o['nerkh_feli'] + '\n' text += '\n\nآخرین تغییر : ' + change bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('gold server err') return True def get_wiki(text,chat,bot): try: t = text[7:-1] t = t.split(':') mozoa = '' t2 = '' page = int(t[0]) for i in range(1,len(t)): t2 += t[i] mozoa = t2 if hasInsult(mozoa)[0] == False and chat['abs_object']['type'] == 'Group' and page > 0: text_t = requests.get('https://api.codebazan.ir/wiki/?search=' + mozoa).text if not 'codebazan.ir' in text_t: CLEANR = re.compile('<.*?>') def cleanhtml(raw_html): cleantext = re.sub(CLEANR, '', raw_html) return cleantext text_t = cleanhtml(text_t) n = 4200 text_t = text_t.strip() max_t = page * n min_t = max_t - n text = text_t[min_t:max_t] bot.sendMessage(chat['object_guid'], 'مقاله "'+ mozoa + '" صفحه : ' + str(page) + ' به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + mozoa + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User' and page > 0: text_t = requests.get('https://api.codebazan.ir/wiki/?search=' + mozoa).text if not 'codebazan.ir' in text_t: CLEANR = re.compile('<.*?>') def cleanhtml(raw_html): cleantext = re.sub(CLEANR, '', raw_html) return cleantext text_t = cleanhtml(text_t) n = 4200 text_t = text_t.strip() max_t = page * n min_t = max_t - n text = text_t[min_t:max_t] bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('code bz wiki err') return True def get_pa_na_pa(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/pa-na-pa/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz pa na pa err') return True def get_dastan(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/dastan/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz dastan err') return True def get_search_k(text,chat,bot): try: search = text[11:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + ':\n\n ' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\n' bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'] text = '' for result in results: text += result['title'] + ':\n\n ' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('zarebin search err') return True def get_bio(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/bio/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz bio err') return True def get_trans(text,chat,bot): try: t = text[8:-1] t = t.split(':') lang = t[0] t2 = '' for i in range(1,len(t)): t2 += t[i] text_trans = t2 if hasInsult(text_trans)[0] == False: t = Translator() text = 'متن ترجمه شده به ('+lang + ') :\n\n' + t.translate(text_trans,lang).text jj = hasInsult(text) if jj[0] != True: bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) elif chat['abs_object']['type'] == 'User': t = Translator() text = 'متن ترجمه شده به ('+lang + ') :\n\n' + t.translate(text_trans,lang).text jj = hasInsult(text) if jj[0] != True: bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) except: print('google trans err') return True def get_khatere(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/khatere/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz khatere err') return True def get_danesh(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/danestani/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz danesh err') return True def get_alaki_masala(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/jok/alaki-masalan/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz alaki masala err') return True def name_shakh(text,chat,bot): try: jd = requests.get('https://api.codebazan.ir/name/').text bot.sendMessage(chat['object_guid'], jd, chat['last_message']['message_id']) except: print('code bz name err') def get_vaj(text,chat,bot): try: vaj = text[6:-1] if hasInsult(vaj)[0] == False: jd = json.loads(requests.get('https://api.codebazan.ir/vajehyab/?text=' + vaj).text) jd = jd['result'] text = 'معنی : \n'+jd['mani'] + '\n\n لغتنامه معین:\n' + jd['Fmoein'] + '\n\nلغتنامه دهخدا : \n' + jd['Fdehkhoda'] + '\n\nمترادف و متضاد : ' + jd['motaradefmotezad'] bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz vaj err') def get_font_fa(text,chat,bot): try: site = text[10:-1] jd = json.loads(requests.get('https://api.codebazan.ir/font/?type=fa&text=' + site).text) jd = jd['Result'] text = '' for i in range(1,10): text += jd[str(i)] + '\n' if hasInsult(site)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + site + ') : \n\n'+text) elif hasInsult(name_user)[0] == False and chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('code bz font fa err') def get_leaved(text,chat,bot): try: send_text = 'بای بای 🖖' bot.sendMessage(chat['object_guid'], send_text, chat['last_message']['message_id']) except: print('rub server err') def get_added(text,chat,bot): try: group = chat['abs_object']['title'] send_text = 'سلام دوست عزیز به ' + group + ' خوش آمدی ❤ \n لطفا قوانین رو رعایت کن ✅' bot.sendMessage(chat['object_guid'], send_text, chat['last_message']['message_id']) except: print('rub server err') def get_qroz_me(text,chat,bot): try: send_text = open('qroz-aks.txt','r').read() bot.sendMessage(chat['object_guid'], send_text, chat['last_message']['message_id']) except: print('rub server err') def get_help(text,chat,bot): text = open('help.txt','r').read() if chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], text) elif chat['abs_object']['type'] == 'User': bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) print('help guid sended') def get_lang_help(text,chat,bot): text = open('lang_help.txt','r').read() bot.sendMessage(chat['object_guid'], text, chat['last_message']['message_id']) print('help guid sended') def usvl_save_data(text,chat,bot,auths:list): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] ll = hasInsult(text) if ll[0] != True: f3 = len(open('farsi-dic.json','rb').read()) if f3 < 83886080: f2 = json.loads(open('farsi-dic.json','r').read()) if not txt_xt in f2.keys(): f2[txt_xt] = [text.replace('"', "'")] else: if not text in f2[txt_xt]: f2[txt_xt].append(text.replace('"', "'")) c1 = open('farsi-dic.json','w') c1.write(json.dumps(f2)) c1.close() else: bot.sendMessage(chat['object_guid'], '!usvl_stop') b2 = open('farsi-dic.json','rb').read() tx = bot.requestFile('farsi-dic.json', len(b2), 'json') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'json', tx['dc_id'] , access, 'farsi-dic.json', len(b2), message_id=c_id) else: print('many request') bot2 = Bot(auths[1]) tx = bot2.requestFile('farsi-dic.json', len(b2), 'json') if tx != 'many_request': access = bot2.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot2.sendFile(chat['object_guid'] ,tx['id'] , 'json', tx['dc_id'] , access, 'farsi-dic.json', len(b2), message_id=chat['last_message']['message_id']) else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) jj = True return True jj = True except: print('usvl save data is err') def usvl_test_data(text,chat,bot): t = False while t == False: try: f2 = json.loads(open('farsi-dic.json','r').read()) shebahat = 0.0 a = 0 shabih_tarin = None shabih_tarin2 = None for text2 in f2.keys(): sh2 = similar(text, text2) if sh2 > shebahat: shebahat = sh2 shabih_tarin = a shabih_tarin2 = text2 a += 1 print('shabih tarin: ' + str(shabih_tarin) , '|| darsad shebaht :' + str(shebahat)) if shabih_tarin2 != None and shebahat > .45: t8 = str(random.choice(f2[shabih_tarin2])) jj = hasInsult(t8) if jj[0] != True: bot.sendMessage(chat['object_guid'], t8, chat['last_message']['message_id']) t = True except: print('test error new server or code') def get_backup(text,chat,bot,auths:list): if bot != None: b2 = open('farsi-dic.json','rb').read() tx = bot.requestFile('farsi-dic.json', len(b2), 'json') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'json', tx['dc_id'] , access, 'farsi-dic.json', len(b2), message_id=chat['last_message']['message_id']) else: print('many request') bot2 = Bot(auths[1]) tx = bot2.requestFile('farsi-dic.json', len(b2), 'json') if tx != 'many_request': access = bot2.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot2.sendFile(chat['object_guid'] ,tx['id'] , 'json', tx['dc_id'] , access, 'farsi-dic.json', len(b2), message_id=chat['last_message']['message_id']) else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) #send verfied users file b2 = open('verfied_users.txt','rb').read() tx = bot.requestFile('verfied_users.txt', len(b2), 'txt') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'verfied_users.txt', len(b2), message_id=chat['last_message']['message_id']) else: print('many request') bot2 = Bot(auths[1]) tx = bot2.requestFile('verfied_users.txt', len(b2), 'txt') if tx != 'many_request': access = bot2.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot2.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'verfied_users.txt', len(b2), message_id=chat['last_message']['message_id']) else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) #send black users file b2 = open('black_list.txt','rb').read() tx = bot.requestFile('black_list.txt', len(b2), 'txt') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'black_list.txt', len(b2), message_id=chat['last_message']['message_id']) else: print('many request') bot2 = Bot(auths[1]) tx = bot2.requestFile('black_list.txt', len(b2), 'txt') if tx != 'many_request': access = bot2.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot2.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'black_list.txt', len(b2), message_id=chat['last_message']['message_id']) else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) #send qroz admins file b2 = open('qrozAdmins.txt','rb').read() tx = bot.requestFile('qrozAdmins.txt', len(b2), 'txt') if tx != 'many_request': access = bot.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'qrozAdmins.txt', len(b2), message_id=chat['last_message']['message_id']) else: print('many request') bot2 = Bot(auths[1]) tx = bot2.requestFile('qrozAdmins.txt', len(b2), 'txt') if tx != 'many_request': access = bot2.fileUpload(b2, tx['access_hash_send'], tx['id'], tx['upload_url']) bot2.sendFile(chat['object_guid'] ,tx['id'] , 'txt', tx['dc_id'] , access, 'qrozAdmins.txt', len(b2), message_id=chat['last_message']['message_id']) bot.sendMessage(chat['object_guid'], 'نسخه پشتیبان با موفقیت تهیه گردید' , chat['last_message']['message_id']) else: print('many request err') bot.sendMessage(chat['object_guid'], 'در حال حاضر بات قادر به ارسال فایل نمی باشد' , chat['last_message']['message_id']) else: print('back err') def code_run(text,chat,bot,lang_id): try: c_id = chat['last_message']['message_id'] msg_data = bot.getMessagesInfo(chat['object_guid'], [c_id]) msg_data = msg_data[0] if 'reply_to_message_id' in msg_data.keys(): msg_data = bot.getMessagesInfo(chat['object_guid'], [msg_data['reply_to_message_id']])[0] if 'text' in msg_data.keys() and msg_data['text'].strip() != '': txt_xt = msg_data['text'] h = { "Origin":"https://sourcesara.com", "User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:97.0) Gecko/20100101 Firefox/97.0", } p = requests.post('https://sourcesara.com/tryit_codes/runner.php',{'LanguageChoiceWrapper':lang_id,'Program':txt_xt},headers=h) p = p.json() jj = hasInsult(p['Result']) jj2 = hasInsult(p['Errors']) time_run = p['Stats'].split(',')[0].split(':')[1].strip() if jj[0] != True and jj2[0] != True: if p['Errors'] != None: if len(p['Result']) < 4200: bot.sendMessage(chat['object_guid'], 'Code runned at '+ time_run +'\nErrors:\n' + p['Errors'] + '\n\nResponse:\n'+ p['Result'], chat['last_message']['message_id']) else: bot.sendMessage(chat['object_guid'], 'Code runned at '+ time_run +'\nErrors:\n' + p['Errors'] + '\n\nResponse:\nپاسخ بیش از حد تصور بزرگ است' , chat['last_message']['message_id']) else: if len(p['Result']) < 4200: bot.sendMessage(chat['object_guid'], 'Code runned at '+ time_run +'\nResponse:\n'+ p['Result'], chat['last_message']['message_id']) else: bot.sendMessage(chat['object_guid'], 'Code runned at '+ time_run +'\nResponse:\nپاسخ بیش از حد تصور بزرگ است', chat['last_message']['message_id']) except: print('server code runer err') g_usvl = '' test_usvl = '' auths = open('multi_acconting.txt','r').read().split('\n') auth = auths[0] bot = Bot(auth) list_message_seened = [] time_reset = math.floor(datetime.datetime.today().timestamp()) + 350 while(2 > 1): try: chats_list:list = bot.get_updates_all_chats() if chats_list != []: for chat in chats_list: access = chat['access'] if chat['abs_object']['type'] == 'User' or chat['abs_object']['type'] == 'Group': text:str = chat['last_message']['text'] qrozAdmins = open('qrozAdmins.txt','r').read().split('\n') if 'SendMessages' in access and chat['last_message']['type'] == 'Text' and text.strip() != '': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: verfied_users = open('verfied_users.txt','r').read().split('\n') black_users = open('black_list.txt','r').read().split('\n') print('new message') accept = True if chat['abs_object']['type'] == 'User' and not chat['object_guid'] in qrozAdmins and open('your_channel.txt','r').read() != '' and not chat['object_guid'] in black_users: if not chat['object_guid'] in verfied_users: is_joined = bot.checkJoinChannel(chat['object_guid'], open('your_channel.txt','r').read()) if is_joined == 'no exist': accept = False bot.sendMessage(chat['object_guid'], '❤\nکاربر گرامی برای استفاده از ربات ابتدا عضو کانال زیر شوید \n@learn_b4a\n\n سپس پیام زیر را بفرستید \n\n!start\n❤') elif is_joined == 'need for username' or is_joined == 'Profile not success': accept = False bot.sendMessage(chat['object_guid'], '❤\nکاربر گرامی برای استفاده از ربات ابتدا نام کاربری خود را تنظیم کنید یا نام و نام خانوادگی را به متنی مفهوم تغییر دهید\nسپس در صورت عضو نبودن عضو کانال زیر شوید \n@learn_b4a\n\n سپس پیام زیر را بفرستید \n\n!start\n❤') elif is_joined == 'is exist': fp = open('verfied_users.txt','a') fp.write('\n' + chat['object_guid']) fp.close() if text.startswith('!') == False: bot.sendMessage(chat['object_guid'], 'برای استفاده از ربات دستور زیر را بفرستید\n!start\n❤') accept = True elif chat['object_guid'] in black_users: accept = False if accept == True: if text == '!start': print('message geted and sinned') try: bot.sendMessage(chat['object_guid'], 'سلام \n به ابر سرویس کروز خوش آمدید ❤\n\n لطفا جهت راهنما \n!help \nرا ارسال کنید',chat['last_message']['message_id']) print('sended response') except: print('server bug1') elif text.startswith('!nim http://') == True or text.startswith('!nim https://') == True: try: bot.sendMessage(chat['object_guid'], "در حال آماده سازی لینک ...",chat['last_message']['message_id']) print('sended response') link = text[4:] nim_baha_link=requests.post("https://www.digitalbam.ir/DirectLinkDownloader/Download",params={'downloadUri':link}) pg:str = nim_baha_link.text pg = pg.split('{"fileUrl":"') pg = pg[1] pg = pg.split('","message":""}') pg = pg[0] nim_baha = pg try: bot.sendMessage(chat['object_guid'], 'لینک نیم بها شما با موفقیت آماده شد ✅ \n لینک : \n' + nim_baha ,chat['last_message']['message_id']) print('sended response') except: print('server bug2') except: print('server bug3') elif text.startswith('!info @'): tawd10 = Thread(target=info_qroz, args=(text, chat, bot,)) tawd10.start() elif text.startswith('!search ['): tawd11 = Thread(target=search, args=(text, chat, bot,)) tawd11.start() elif text.startswith('!wiki-s ['): try: search = text[9:-1] search = search + ' ویکی پدیا' if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'][0:4] text = '' for result in results: if ' - ویکی‌پدیا، دانشنامهٔ آزاد' in result['title']: title = result['title'].replace(' - ویکی‌پدیا، دانشنامهٔ آزاد','') text += title + ' :\n\n' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\nمقاله کامل صفحه 1 : \n' + '!wiki [1:' + title + ']\n\n' bot.sendMessage(chat['object_guid'], 'نتایج به پیوی شما ارسال شد', chat['last_message']['message_id']) bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://zarebin.ir/api/?q=' + search + '&page=1&limit=10').text) results = jd['results']['webs'][0:4] text = '' for result in results: if ' - ویکی‌پدیا، دانشنامهٔ آزاد' in result['title']: title = result['title'].replace(' - ویکی‌پدیا، دانشنامهٔ آزاد','') text += title + ' :\n\n' + str(result['description']).replace('</em>', '').replace('<em>', '').replace('(Meta Search Engine)', '').replace('&quot;', '').replace(' — ', '').replace(' AP', '') + '\n\nمقاله کامل صفحه 1 : \n' + '!wiki [1:' + title + ']\n\n' bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('wiki s err') elif text.startswith('!jok'): tawd9 = Thread(target=joker, args=(text, chat, bot,)) tawd9.start() elif text.startswith('!name_shakh'): tawd32 = Thread(target=name_shakh, args=(text, chat, bot,)) tawd32.start() elif text.startswith('!khatere'): tawd29 = Thread(target=get_khatere, args=(text, chat, bot,)) tawd29.start() elif text.startswith('!danesh'): tawd30 = Thread(target=get_danesh, args=(text, chat, bot,)) tawd30.start() elif text.startswith('!pa_na_pa'): tawd24 = Thread(target=get_pa_na_pa, args=(text, chat, bot,)) tawd24.start() elif text.startswith('!alaki_masala'): tawd31 = Thread(target=get_alaki_masala, args=(text, chat, bot,)) tawd31.start() elif text.startswith('!dastan'): tawd25 = Thread(target=get_dastan, args=(text, chat, bot,)) tawd25.start() elif text.startswith('!bio'): tawd27 = Thread(target=get_bio, args=(text, chat, bot,)) tawd27.start() elif text.startswith('!search-k ['): tawd26 = Thread(target=get_search_k, args=(text, chat, bot,)) tawd26.start() elif text.startswith('!ban [') and chat['abs_object']['type'] == 'Group' and 'BanMember' in access: try: user = text[6:-1].replace('@', '') guid = bot.getInfoByUsername(user)["data"]["chat"]["abs_object"]["object_guid"] admins = [i["member_guid"] for i in bot.getGroupAdmins(chat['object_guid'])["data"]["in_chat_members"]] if not guid in admins and chat['last_message']['author_object_guid'] in admins: bot.banGroupMember(chat['object_guid'], guid) bot.sendMessage(chat['object_guid'], 'انجام شد' , chat['last_message']['message_id']) except: print('ban bug') elif text.startswith('!search-i ['): print('mpa started') tawd = Thread(target=search_i, args=(text, chat, bot,)) tawd.start() elif text.startswith('!remove') and chat['abs_object']['type'] == 'Group' and 'BanMember' in access: print('mpa started') tawd2 = Thread(target=uesr_remove, args=(text, chat, bot,)) tawd2.start() elif text.startswith('!trans ['): tawd28 = Thread(target=get_trans, args=(text, chat, bot,)) tawd28.start() elif text.startswith('!myket-s ['): try: search = text[10:-1] if hasInsult(search)[0] == False and chat['abs_object']['type'] == 'Group': bot.sendMessage(chat['object_guid'], 'نتایج کامل به زودی به پیوی شما ارسال میشوند', chat['last_message']['message_id']) jd = json.loads(requests.get('https://www.wirexteam.ga/myket?type=search&query=' + search).text) jd = jd['search'] a = 0 text = '' for j in jd: if a <= 7: text += '🔸 عنوان : ' + j['title_fa'] + '\nℹ️ توضیحات : '+ j['tagline'] + '\n🆔 نام یکتا برنامه : ' + j['package_name'] + '\n⭐️امتیاز: ' + str(j['rate']) + '\n✳ نام نسخه : ' + j['version'] + '\nقیمت : ' + j['price'] + '\nحجم : ' + j['size'] + '\nبرنامه نویس : ' + j['developer'] + '\n\n' a += 1 else: break if text != '': bot.sendMessage(chat['last_message']['author_object_guid'], 'نتایج یافت شده برای (' + search + ') : \n\n'+text) elif chat['abs_object']['type'] == 'User': jd = json.loads(requests.get('https://www.wirexteam.ga/myket?type=search&query=' + search).text) jd = jd['search'] a = 0 text = '' for j in jd: if a <= 7: text += '🔸 عنوان : ' + j['title_fa'] + '\nℹ️ توضیحات : '+ j['tagline'] + '\n🆔 نام یکتا برنامه : ' + j['package_name'] + '\n⭐️امتیاز: ' + str(j['rate']) + '\n✳ نام نسخه : ' + j['version'] + '\nقیمت : ' + j['price'] + '\nحجم : ' + j['size'] + '\nبرنامه نویس : ' + j['developer'] + '\n\n' a += 1 else: break if text != '': bot.sendMessage(chat['object_guid'], text , chat['last_message']['message_id']) except: print('myket server err') elif text.startswith('!wiki ['): tawd23 = Thread(target=get_wiki, args=(text, chat, bot,)) tawd23.start() elif text.startswith('!currency'): print('mpa started') tawd15 = Thread(target=get_curruncy, args=(text, chat, bot,)) tawd15.start() elif text.startswith('!gold'): tawd22 = Thread(target=get_gold, args=(text, chat, bot,)) tawd22.start() elif text.startswith('!ping ['): tawd21 = Thread(target=get_ping, args=(text, chat, bot,)) tawd21.start() elif text.startswith('!font ['): tawd20 = Thread(target=get_font, args=(text, chat, bot,)) tawd20.start() elif text.startswith('!font-fa ['): tawd34 = Thread(target=get_font_fa, args=(text, chat, bot,)) tawd34.start() elif text.startswith('!whois ['): tawd19 = Thread(target=get_whois, args=(text, chat, bot,)) tawd19.start() elif text.startswith('!vaj ['): tawd33 = Thread(target=get_vaj, args=(text, chat, bot,)) tawd33.start() elif text.startswith('!weather ['): tawd18 = Thread(target=get_weather, args=(text, chat, bot,)) tawd18.start() elif text.startswith('!ip ['): tawd17 = Thread(target=get_ip, args=(text, chat, bot,)) tawd17.start() elif text.startswith("!add [") and chat['abs_object']['type'] == 'Group' and 'AddMember' in access: try: user = text[6:-1] bot.invite(chat['object_guid'], [bot.getInfoByUsername(user.replace('@', ''))["data"]["chat"]["object_guid"]]) bot.sendMessage(chat['object_guid'], 'اضافه شد' , chat['last_message']['message_id']) except: print('add not successd') elif text.startswith('!math ['): try: amal_and_value = text[7:-1] natije = '' if amal_and_value.count('*') == 1: value1 = float(amal_and_value.split('*')[0].strip()) value2 = float(amal_and_value.split('*')[1].strip()) natije = value1 * value2 elif amal_and_value.count('/') > 0: value1 = float(amal_and_value.split('/')[0].strip()) value2 = float(amal_and_value.split('/')[1].strip()) natije = value1 / value2 elif amal_and_value.count('+') > 0: value1 = float(amal_and_value.split('+')[0].strip()) value2 = float(amal_and_value.split('+')[1].strip()) natije = value1 + value2 elif amal_and_value.count('-') > 0: value1 = float(amal_and_value.split('-')[0].strip()) value2 = float(amal_and_value.split('-')[1].strip()) natije = value1 - value2 elif amal_and_value.count('**') > 0: value1 = float(amal_and_value.split('**')[0].strip()) value2 = float(amal_and_value.split('**')[1].strip()) natije = value1 ** value2 if natije != '': bot.sendMessage(chat['object_guid'], natije , chat['last_message']['message_id']) except: print('math err') elif text.startswith('!shot'): tawd16 = Thread(target=shot_image, args=(text, chat, bot,)) tawd16.start() elif text.startswith('!speak'): print('mpa started') tawd6 = Thread(target=speak_after, args=(text, chat, bot,)) tawd6.start() elif text.startswith('!p_danesh'): tawd12 = Thread(target=p_danesh, args=(text, chat, bot,)) tawd12.start() elif text.startswith('!write ['): print('mpa started') tawd5 = Thread(target=write_image, args=(text, chat, bot,)) tawd5.start() elif text.startswith('!qroz'): print('qroz lovolier for ever (:') tawd45 = Thread(target=get_qroz_me, args=(text, chat, bot,)) tawd45.start() elif text.startswith('!code_'): text2 = text.replace('!code_','') dict_langs = {'c#':1,'java':4,'js':17,'kotlin':43,'nodejs':23,'perl':13,'php':8,'py':24,'ruby':12} if text2 in dict_langs.keys(): tawd46 = Thread(target=code_run, args=(text, chat, bot, dict_langs[text2],)) tawd46.start() elif chat['abs_object']['type'] == 'Group' and 'DeleteGlobalAllMessages' in access and hasInsult(text)[0] == True: tawd13 = Thread(target=anti_insult, args=(text, chat, bot,)) tawd13.start() elif chat['abs_object']['type'] == 'Group' and 'DeleteGlobalAllMessages' in access and hasAds(text) == True: tawd14 = Thread(target=anti_tabligh, args=(text, chat, bot,)) tawd14.start() elif text.startswith('!help'): tawd38 = Thread(target=get_help, args=(text, chat, bot,)) tawd38.start() elif text.startswith('!lang_help'): tawd47 = Thread(target=get_lang_help, args=(text, chat, bot,)) tawd47.start() elif text.startswith('!usvl_start') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl == '': g_usvl = chat['object_guid'] print('usvl started on ' + g_usvl) bot.sendMessage(chat['object_guid'], 'usvl is started', chat['last_message']['message_id']) elif text.startswith('!usvl_stop') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl != '': print('usvl sttoped on ' + g_usvl) g_usvl = '' bot.sendMessage(chat['object_guid'], 'usvl is stopped', chat['last_message']['message_id']) elif text.startswith('!usvl_test') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and g_usvl == '' and test_usvl == '': test_usvl = chat['object_guid'] print('usvl started test on ' + test_usvl) bot.sendMessage(chat['object_guid'], 'test usvl is started', chat['last_message']['message_id']) elif text.startswith('!usvl_untest') and chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and test_usvl == chat['object_guid']: print('usvl stopped test on ' + test_usvl) test_usvl = '' bot.sendMessage(chat['object_guid'], 'test usvl is stopped', chat['last_message']['message_id']) elif text.startswith('!backup') and chat['object_guid'] in qrozAdmins: tawd44 = Thread(target=get_backup, args=(text, chat, bot, auths,)) tawd44.start() elif text.startswith('!black [') and chat['object_guid'] in qrozAdmins: try: guid = text[8:-1] if len(guid) == 32 and not guid in black_users: fp = open('black_list.txt','a') fp.write('\n' + guid) fp.close() bot.sendMessage(guid, 'متاسفانه شما فعلا به لیست سیاه کروز اضافه شدید') except: print('black list err') elif text.startswith('!unblack [') and chat['object_guid'] in qrozAdmins: try: guid = text[10:-1] if len(guid) == 32 and guid in black_users: black_users = open('black_list.txt','r').read() bl = open('black_list.txt','w') bl.write(black_users.replace('\n' + guid, '')) bl.close() bot.sendMessage(guid, 'خوشبختانه از لیست سیاه کروز بیرون آمدید') except: print('black list err') elif chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and text.startswith('!black') and not chat['object_guid'] in black_users: fp = open('black_list.txt','a') fp.write('\n'+ chat['object_guid']) fp.close() bot.sendMessage(chat['object_guid'], 'گروه به لیست سیاه کروز اضافه گردید ):') elif chat['object_guid'] == g_usvl and chat['last_message']['author_object_guid'] != open('me_guid.txt','r').read() and chat['abs_object']['type'] == 'Group' and not text.startswith('!'): tawd42 = Thread(target=usvl_save_data, args=(text, chat, bot, auths,)) tawd42.start() elif test_usvl == chat['object_guid'] and chat['last_message']['author_object_guid'] != open('me_guid.txt','r').read() and chat['abs_object']['type'] == 'Group' and not text.startswith('!'): print('usvl tested') tawd43 = Thread(target=usvl_test_data, args=(text, chat, bot,)) tawd43.start() elif chat['abs_object']['type'] == 'Group' and chat['last_message']['author_object_guid'] in qrozAdmins and text.startswith('!unblack') and chat['object_guid'] in black_users: black_users = open('black_list.txt','r').read() bl = open('black_list.txt','w') bl.write(black_users.replace('\n' + chat['object_guid'], '')) bl.close() bot.sendMessage(chat['object_guid'], 'گروه از لیست سیاه کروز در آمد (:') list_message_seened.append(m_id) elif 'SendMessages' in access and chat['last_message']['type'] == 'Other' and text.strip() != '' and chat['abs_object']['type'] == 'Group' and chat['abs_object']['type'] == 'Group': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: if text == 'یک عضو گروه را ترک کرد.': tawd35 = Thread(target=get_leaved, args=(text, chat, bot,)) tawd35.start() elif text == '1 عضو جدید به گروه افزوده شد.' or text == 'یک عضو از طریق لینک به گروه افزوده شد.': tawd36 = Thread(target=get_added, args=(text, chat, bot,)) tawd36.start() list_message_seened.append(m_id) elif 'SendMessages' in access and text.strip() != '' and chat['abs_object']['type'] == 'Group': text = text.strip() m_id = chat['object_guid'] + chat['last_message']['message_id'] if not m_id in list_message_seened: if 'DeleteGlobalAllMessages' in access and hasInsult(text)[0] == True: tawd39 = Thread(target=anti_insult, args=(text, chat, bot,)) tawd39.start() list_message_seened.append(m_id) elif 'DeleteGlobalAllMessages' in access and hasAds(text) == True: tawd40 = Thread(target=anti_tabligh, args=(text, chat, bot,)) tawd40.start() list_message_seened.append(m_id) else: print('no update ') except: print('qroz err koli') time_reset2 = math.floor(datetime.datetime.today().timestamp()) if list_message_seened != [] and time_reset2 > time_reset: list_message_seened = [] time_reset = math.floor(datetime.datetime.today().timestamp()) + 350
// SilvervineUE4Lua / devCAT studio // Copyright 2016 - 2020. Nexon Korea Corporation. All rights reserved. #pragma once #include "Modules/ModuleInterface.h" #include "LuaFileLoader.h" // // SUE4Lua 모듈 정의 // class SILVERVINEUE4LUA_API FSilvervineUE4LuaModule : public IModuleInterface { /** IModuleInterface implementation */ virtual void StartupModule() override; virtual void ShutdownModule() override; public: static FSilvervineUE4LuaModule& Get(); DECLARE_MULTICAST_DELEGATE_OneParam(FFileModifiedDelegate, const FString& /*Filename*/); public: // 설정되어 있는 파일 로더를 반환. // 기본값은 내부에서 생성한 null 로더 인스턴스. 이 로더는 아무 동작도 하지 않고 경고 메시지만 출력합니다. FSUE4LuaFileLoader& GetFileLoader() const; // 파일 로더를 설정합니다. // null 객체를 전달하면 기본값(null 로더 인스턴스)으로 설정된다. // 반환값은 이전에 설정된 파일 로더. TSharedPtr<FSUE4LuaFileLoader> SetFileLoader(TSharedPtr<FSUE4LuaFileLoader> NewFileLoader); // 파일이 수정되었을 때 발생하는 이벤트 FFileModifiedDelegate FileModifiedDelegate; private: // 현재 설정된 파일 로더 TSharedPtr<FSUE4LuaFileLoader> FileLoader; };
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( int_or_none, remove_start, ) class RozhlasIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?prehravac\.rozhlas\.cz/audio/(?P<id>[0-9]+)' _TESTS = [{ 'url': 'http://prehravac.rozhlas.cz/audio/3421320', 'md5': '504c902dbc9e9a1fd50326eccf02a7e2', 'info_dict': { 'id': '3421320', 'ext': 'mp3', 'title': 'Echo Pavla Klusáka (30.06.2015 21:00)', 'description': 'Osmdesátiny Terryho Rileyho jsou skvělou příležitostí proletět se elektronickými i akustickými díly zakladatatele minimalismu, který je aktivní už přes padesát let' } }, { 'url': 'http://prehravac.rozhlas.cz/audio/3421320/embed', 'skip_download': True, }] def _real_extract(self, url): audio_id = self._match_id(url) webpage = self._download_webpage( 'http://prehravac.rozhlas.cz/audio/%s' % audio_id, audio_id) title = self._html_search_regex( r'<h3>(.+?)</h3>\s*<p[^>]*>.*?</p>\s*<div[^>]+id=["\']player-track', webpage, 'title', default=None) or remove_start( self._og_search_title(webpage), 'Radio Wave - ') description = self._html_search_regex( r'<p[^>]+title=(["\'])(?P<url>(?:(?!\1).)+)\1[^>]*>.*?</p>\s*<div[^>]+id=["\']player-track', webpage, 'description', fatal=False, group='url') duration = int_or_none(self._search_regex( r'data-duration=["\'](\d+)', webpage, 'duration', default=None)) return { 'id': audio_id, 'url': 'http://media.rozhlas.cz/_audio/%s.mp3' % audio_id, 'title': title, 'description': description, 'duration': duration, 'vcodec': 'none', }
""" Copyright (c) 2018-2022 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import numpy as np from .base_profiler import MetricProfiler def preprocess_prediction_list(prediction_label, data_type=int, to_scalar=False): if np.isscalar(prediction_label): pred_label = [data_type(prediction_label)] if not to_scalar else data_type(prediction_label) else: if np.shape(prediction_label): if to_scalar and np.size(prediction_label) == 1: return data_type(prediction_label[0]) pred_label = ( prediction_label.astype(data_type).tolist() ) else: pred_label = prediction_label.astype(data_type) if np.size(prediction_label) == 1 and to_scalar: if np.ndim(prediction_label) == 0: return prediction_label.tolist() return prediction_label[0] pred_label = [pred_label.tolist()] if isinstance(prediction_label, type(np.array(0))) else '' return pred_label class ClassificationMetricProfiler(MetricProfiler): __provider__ = 'classification' fields = ['identifier', 'annotation_label', 'prediction_label'] def generate_profiling_data( self, identifier, annotation_label, prediction_label, metric_name, metric_result, prediction_scores=None ): if self._last_profile and self._last_profile['identifier'] == identifier: self._last_profile['{}_result'.format(metric_name)] = metric_result.tolist() return self._last_profile if 'prediction_scores' not in self.fields and self.report_type == 'json' and prediction_scores is not None: self.fields.append('prediction_scores') result_name = '{}_result'.format(metric_name) if self.report_type != 'json' else 'result' result = { 'identifier': identifier, 'annotation_label': int(annotation_label), 'prediction_label': preprocess_prediction_list(prediction_label), result_name: preprocess_prediction_list(metric_result, float, to_scalar=True) } if self.report_type == 'json': result['prediction_scores'] = preprocess_prediction_list(prediction_scores, float) return result class CharRecognitionMetricProfiler(MetricProfiler): __provider__ = 'char_classification' fields = ['identifier', 'annotation_label', 'prediction_label', 'result'] def generate_profiling_data(self, identifier, annotation_label, prediction_label, metric_name, metric_result): if self._last_profile and self._last_profile['identifier'] == identifier: self._last_profile['{}_result'.format(metric_name)] = metric_result.tolist() return self._last_profile return { 'identifier': identifier, 'annotation_label': int(annotation_label), 'prediction_label': preprocess_prediction_list(prediction_label), '{}_result'.format(metric_name): preprocess_prediction_list(metric_result, float, to_scalar=True) } class ClipAccuracyProfiler(MetricProfiler): __provider__ = 'clip_classification' fields = [ 'identifier', 'annotation_label', 'prediction_label', 'clip_accuracy', 'video_average', 'video_average_accuracy' ] def generate_profiling_data(self, identifier, annotation_label, prediction_label, metric_name, metric_result): if self._last_profile and self._last_profile['identifier'] == identifier: self._last_profile['{}_result'.format(metric_name)] = metric_result.tolist() return self._last_profile return { 'identifier': identifier, 'annotation_label': annotation_label, 'prediction_label': preprocess_prediction_list(prediction_label), '{}_result'.format(metric_name): metric_result } class BinaryClassificationProfiler(MetricProfiler): __provider__ = 'binary_classification' fields = ['identifier', 'annotation_label', 'prediction_label', 'TP', 'TN', 'FP', 'FN', 'result'] def generate_profiling_data( self, identifier, annotation_label, prediction_label, tp, tn, fp, fn, metric_name, metric_result): if self._last_profile and self._last_profile['identifier'] == identifier: self._last_profile['{}_result'.format(metric_name)] = metric_result return self._last_profile return { 'identifier': identifier, 'annotation_label': annotation_label, 'prediction_label': preprocess_prediction_list(prediction_label), 'TP': tp, 'TN': tn, 'FP': fp, 'FN': fn, '{}_result'.format(metric_name): metric_result }
import React, { Component, Fragment } from 'react'; import {Row, Col} from 'antd'; export default class Home extends Component { constructor(props, context) { super(props); this.state = { } } render() { return ( <Fragment> <div style={{width: '100%', height: '100%', background: 'url("")', backgroundSize: 'cover', zIndex:'-1'}}> <Row type="flex" justify="center"> </Row> </div> <Row type="flex" justify="center" > <Col span={8} style={{backgroundColor: 'white'}}> </Col> </Row> </Fragment> ) } }
""" pygments.lexers._csound_builtins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ # Opcodes in Csound 6.14.0 using: # python3 -c " # import re # from subprocess import Popen, PIPE # output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1] # opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split() # output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1] # all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split() # deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes] # # Remove opcodes that csound.py treats as keywords. # keyword_opcodes = [ # 'cggoto', # https://csound.com/docs/manual/cggoto.html # 'cigoto', # https://csound.com/docs/manual/cigoto.html # 'cingoto', # (undocumented) # 'ckgoto', # https://csound.com/docs/manual/ckgoto.html # 'cngoto', # https://csound.com/docs/manual/cngoto.html # 'cnkgoto', # (undocumented) # 'endin', # https://csound.com/docs/manual/endin.html # 'endop', # https://csound.com/docs/manual/endop.html # 'goto', # https://csound.com/docs/manual/goto.html # 'igoto', # https://csound.com/docs/manual/igoto.html # 'instr', # https://csound.com/docs/manual/instr.html # 'kgoto', # https://csound.com/docs/manual/kgoto.html # 'loop_ge', # https://csound.com/docs/manual/loop_ge.html # 'loop_gt', # https://csound.com/docs/manual/loop_gt.html # 'loop_le', # https://csound.com/docs/manual/loop_le.html # 'loop_lt', # https://csound.com/docs/manual/loop_lt.html # 'opcode', # https://csound.com/docs/manual/opcode.html # 'reinit', # https://csound.com/docs/manual/reinit.html # 'return', # https://csound.com/docs/manual/return.html # 'rireturn', # https://csound.com/docs/manual/rireturn.html # 'rigoto', # https://csound.com/docs/manual/rigoto.html # 'tigoto', # https://csound.com/docs/manual/tigoto.html # 'timout' # https://csound.com/docs/manual/timout.html # ] # opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes] # newline = '\n' # print(f'''OPCODES = set(\''' # {newline.join(opcodes)} # \'''.split()) # # DEPRECATED_OPCODES = set(\''' # {newline.join(deprecated_opcodes)} # \'''.split()) # ''') # " OPCODES = set(''' ATSadd ATSaddnz ATSbufread ATScross ATSinfo ATSinterpread ATSpartialtap ATSread ATSreadnz ATSsinnoi FLbox FLbutBank FLbutton FLcloseButton FLcolor FLcolor2 FLcount FLexecButton FLgetsnap FLgroup FLgroupEnd FLgroup_end FLhide FLhvsBox FLhvsBoxSetValue FLjoy FLkeyIn FLknob FLlabel FLloadsnap FLmouse FLpack FLpackEnd FLpack_end FLpanel FLpanelEnd FLpanel_end FLprintk FLprintk2 FLroller FLrun FLsavesnap FLscroll FLscrollEnd FLscroll_end FLsetAlign FLsetBox FLsetColor FLsetColor2 FLsetFont FLsetPosition FLsetSize FLsetSnapGroup FLsetText FLsetTextColor FLsetTextSize FLsetTextType FLsetVal FLsetVal_i FLsetVali FLsetsnap FLshow FLslidBnk FLslidBnk2 FLslidBnk2Set FLslidBnk2Setk FLslidBnkGetHandle FLslidBnkSet FLslidBnkSetk FLslider FLtabs FLtabsEnd FLtabs_end FLtext FLupdate FLvalue FLvkeybd FLvslidBnk FLvslidBnk2 FLxyin JackoAudioIn JackoAudioInConnect JackoAudioOut JackoAudioOutConnect JackoFreewheel JackoInfo JackoInit JackoMidiInConnect JackoMidiOut JackoMidiOutConnect JackoNoteOut JackoOn JackoTransport K35_hpf K35_lpf MixerClear MixerGetLevel MixerReceive MixerSend MixerSetLevel MixerSetLevel_i OSCbundle OSCcount OSCinit OSCinitM OSClisten OSCraw OSCsend OSCsend_lo S STKBandedWG STKBeeThree STKBlowBotl STKBlowHole STKBowed STKBrass STKClarinet STKDrummer STKFMVoices STKFlute STKHevyMetl STKMandolin STKModalBar STKMoog STKPercFlut STKPlucked STKResonate STKRhodey STKSaxofony STKShakers STKSimple STKSitar STKStifKarp STKTubeBell STKVoicForm STKWhistle STKWurley a abs active adsr adsyn adsynt adsynt2 aftouch allpole alpass alwayson ampdb ampdbfs ampmidi ampmidicurve ampmidid apoleparams arduinoRead arduinoStart arduinoStop areson aresonk atone atonek atonex babo balance balance2 bamboo barmodel bbcutm bbcuts beadsynt beosc betarand bexprnd bformdec1 bformenc1 binit biquad biquada birnd bob bpf bpfcos bqrez butbp butbr buthp butlp butterbp butterbr butterhp butterlp button buzz c2r cabasa cauchy cauchyi cbrt ceil cell cent centroid ceps cepsinv chanctrl changed2 chani chano chebyshevpoly checkbox chn_S chn_a chn_k chnclear chnexport chnget chngeta chngeti chngetk chngetks chngets chnmix chnparams chnset chnseta chnseti chnsetk chnsetks chnsets chuap clear clfilt clip clockoff clockon cmp cmplxprod cntCreate cntCycles cntRead cntReset cntState comb combinv compilecsd compileorc compilestr compress compress2 connect control convle convolve copya2ftab copyf2array cos cosh cosinv cosseg cossegb cossegr count count_i cps2pch cpsmidi cpsmidib cpsmidinn cpsoct cpspch cpstmid cpstun cpstuni cpsxpch cpumeter cpuprc cross2 crossfm crossfmi crossfmpm crossfmpmi crosspm crosspmi crunch ctlchn ctrl14 ctrl21 ctrl7 ctrlinit cuserrnd dam date dates db dbamp dbfsamp dcblock dcblock2 dconv dct dctinv deinterleave delay delay1 delayk delayr delayw deltap deltap3 deltapi deltapn deltapx deltapxw denorm diff diode_ladder directory diskgrain diskin diskin2 dispfft display distort distort1 divz doppler dot downsamp dripwater dssiactivate dssiaudio dssictls dssiinit dssilist dumpk dumpk2 dumpk3 dumpk4 duserrnd dust dust2 envlpx envlpxr ephasor eqfil evalstr event event_i exciter exitnow exp expcurve expon exprand exprandi expseg expsega expsegb expsegba expsegr fareylen fareyleni faustaudio faustcompile faustctl faustdsp faustgen faustplay fft fftinv ficlose filebit filelen filenchnls filepeak filescal filesr filevalid fillarray filter2 fin fini fink fiopen flanger flashtxt flooper flooper2 floor fluidAllOut fluidCCi fluidCCk fluidControl fluidEngine fluidInfo fluidLoad fluidNote fluidOut fluidProgramSelect fluidSetInterpMethod fmanal fmax fmb3 fmbell fmin fmmetal fmod fmpercfl fmrhode fmvoice fmwurlie fof fof2 fofilter fog fold follow follow2 foscil foscili fout fouti foutir foutk fprintks fprints frac fractalnoise framebuffer freeverb ftaudio ftchnls ftconv ftcps ftexists ftfree ftgen ftgenonce ftgentmp ftlen ftload ftloadk ftlptim ftmorf ftom ftprint ftresize ftresizei ftsamplebank ftsave ftsavek ftset ftslice ftslicei ftsr gain gainslider gauss gaussi gausstrig gbuzz genarray genarray_i gendy gendyc gendyx getcfg getcol getftargs getrow getrowlin getseed gogobel grain grain2 grain3 granule gtf guiro harmon harmon2 harmon3 harmon4 hdf5read hdf5write hilbert hilbert2 hrtfearly hrtfmove hrtfmove2 hrtfreverb hrtfstat hsboscil hvs1 hvs2 hvs3 hypot i ihold imagecreate imagefree imagegetpixel imageload imagesave imagesetpixel imagesize in in32 inch inh init initc14 initc21 initc7 inleta inletf inletk inletkid inletv ino inq inrg ins insglobal insremot int integ interleave interp invalue inx inz jacktransport jitter jitter2 joystick jspline k la_i_add_mc la_i_add_mr la_i_add_vc la_i_add_vr la_i_assign_mc la_i_assign_mr la_i_assign_t la_i_assign_vc la_i_assign_vr la_i_conjugate_mc la_i_conjugate_mr la_i_conjugate_vc la_i_conjugate_vr la_i_distance_vc la_i_distance_vr la_i_divide_mc la_i_divide_mr la_i_divide_vc la_i_divide_vr la_i_dot_mc la_i_dot_mc_vc la_i_dot_mr la_i_dot_mr_vr la_i_dot_vc la_i_dot_vr la_i_get_mc la_i_get_mr la_i_get_vc la_i_get_vr la_i_invert_mc la_i_invert_mr la_i_lower_solve_mc la_i_lower_solve_mr la_i_lu_det_mc la_i_lu_det_mr la_i_lu_factor_mc la_i_lu_factor_mr la_i_lu_solve_mc la_i_lu_solve_mr la_i_mc_create la_i_mc_set la_i_mr_create la_i_mr_set la_i_multiply_mc la_i_multiply_mr la_i_multiply_vc la_i_multiply_vr la_i_norm1_mc la_i_norm1_mr la_i_norm1_vc la_i_norm1_vr la_i_norm_euclid_mc la_i_norm_euclid_mr la_i_norm_euclid_vc la_i_norm_euclid_vr la_i_norm_inf_mc la_i_norm_inf_mr la_i_norm_inf_vc la_i_norm_inf_vr la_i_norm_max_mc la_i_norm_max_mr la_i_print_mc la_i_print_mr la_i_print_vc la_i_print_vr la_i_qr_eigen_mc la_i_qr_eigen_mr la_i_qr_factor_mc la_i_qr_factor_mr la_i_qr_sym_eigen_mc la_i_qr_sym_eigen_mr la_i_random_mc la_i_random_mr la_i_random_vc la_i_random_vr la_i_size_mc la_i_size_mr la_i_size_vc la_i_size_vr la_i_subtract_mc la_i_subtract_mr la_i_subtract_vc la_i_subtract_vr la_i_t_assign la_i_trace_mc la_i_trace_mr la_i_transpose_mc la_i_transpose_mr la_i_upper_solve_mc la_i_upper_solve_mr la_i_vc_create la_i_vc_set la_i_vr_create la_i_vr_set la_k_a_assign la_k_add_mc la_k_add_mr la_k_add_vc la_k_add_vr la_k_assign_a la_k_assign_f la_k_assign_mc la_k_assign_mr la_k_assign_t la_k_assign_vc la_k_assign_vr la_k_conjugate_mc la_k_conjugate_mr la_k_conjugate_vc la_k_conjugate_vr la_k_current_f la_k_current_vr la_k_distance_vc la_k_distance_vr la_k_divide_mc la_k_divide_mr la_k_divide_vc la_k_divide_vr la_k_dot_mc la_k_dot_mc_vc la_k_dot_mr la_k_dot_mr_vr la_k_dot_vc la_k_dot_vr la_k_f_assign la_k_get_mc la_k_get_mr la_k_get_vc la_k_get_vr la_k_invert_mc la_k_invert_mr la_k_lower_solve_mc la_k_lower_solve_mr la_k_lu_det_mc la_k_lu_det_mr la_k_lu_factor_mc la_k_lu_factor_mr la_k_lu_solve_mc la_k_lu_solve_mr la_k_mc_set la_k_mr_set la_k_multiply_mc la_k_multiply_mr la_k_multiply_vc la_k_multiply_vr la_k_norm1_mc la_k_norm1_mr la_k_norm1_vc la_k_norm1_vr la_k_norm_euclid_mc la_k_norm_euclid_mr la_k_norm_euclid_vc la_k_norm_euclid_vr la_k_norm_inf_mc la_k_norm_inf_mr la_k_norm_inf_vc la_k_norm_inf_vr la_k_norm_max_mc la_k_norm_max_mr la_k_qr_eigen_mc la_k_qr_eigen_mr la_k_qr_factor_mc la_k_qr_factor_mr la_k_qr_sym_eigen_mc la_k_qr_sym_eigen_mr la_k_random_mc la_k_random_mr la_k_random_vc la_k_random_vr la_k_subtract_mc la_k_subtract_mr la_k_subtract_vc la_k_subtract_vr la_k_t_assign la_k_trace_mc la_k_trace_mr la_k_upper_solve_mc la_k_upper_solve_mr la_k_vc_set la_k_vr_set lag lagud lastcycle lenarray lfo limit limit1 lincos line linen linenr lineto link_beat_force link_beat_get link_beat_request link_create link_enable link_is_enabled link_metro link_peers link_tempo_get link_tempo_set linlin linrand linseg linsegb linsegr liveconv locsend locsig log log10 log2 logbtwo logcurve loopseg loopsegp looptseg loopxseg lorenz loscil loscil3 loscil3phs loscilphs loscilx lowpass2 lowres lowresx lpcanal lpcfilter lpf18 lpform lpfreson lphasor lpinterp lposcil lposcil3 lposcila lposcilsa lposcilsa2 lpread lpreson lpshold lpsholdp lpslot lua_exec lua_iaopcall lua_iaopcall_off lua_ikopcall lua_ikopcall_off lua_iopcall lua_iopcall_off lua_opdef lufs mac maca madsr mags mandel mandol maparray maparray_i marimba massign max max_k maxabs maxabsaccum maxaccum maxalloc maxarray mclock mdelay median mediank metro metro2 mfb midglobal midiarp midic14 midic21 midic7 midichannelaftertouch midichn midicontrolchange midictrl mididefault midifilestatus midiin midinoteoff midinoteoncps midinoteonkey midinoteonoct midinoteonpch midion midion2 midiout midiout_i midipgm midipitchbend midipolyaftertouch midiprogramchange miditempo midremot min minabs minabsaccum minaccum minarray mincer mirror mode modmatrix monitor moog moogladder moogladder2 moogvcf moogvcf2 moscil mp3bitrate mp3in mp3len mp3nchnls mp3scal mp3sr mpulse mrtmsg mtof mton multitap mute mvchpf mvclpf1 mvclpf2 mvclpf3 mvclpf4 mxadsr nchnls_hw nestedap nlalp nlfilt nlfilt2 noise noteoff noteon noteondur noteondur2 notnum nreverb nrpn nsamp nstance nstrnum nstrstr ntof ntom ntrpol nxtpow2 octave octcps octmidi octmidib octmidinn octpch olabuffer oscbnk oscil oscil1 oscil1i oscil3 oscili oscilikt osciliktp oscilikts osciln oscils oscilx out out32 outc outch outh outiat outic outic14 outipat outipb outipc outkat outkc outkc14 outkpat outkpb outkpc outleta outletf outletk outletkid outletv outo outq outq1 outq2 outq3 outq4 outrg outs outs1 outs2 outvalue outx outz p p5gconnect p5gdata pan pan2 pareq part2txt partials partikkel partikkelget partikkelset partikkelsync passign paulstretch pcauchy pchbend pchmidi pchmidib pchmidinn pchoct pchtom pconvolve pcount pdclip pdhalf pdhalfy peak pgmassign pgmchn phaser1 phaser2 phasor phasorbnk phs pindex pinker pinkish pitch pitchac pitchamdf planet platerev plltrack pluck poisson pol2rect polyaft polynomial port portk poscil poscil3 pow powershape powoftwo pows prealloc prepiano print print_type printarray printf printf_i printk printk2 printks printks2 println prints printsk product pset ptablew ptrack puts pvadd pvbufread pvcross pvinterp pvoc pvread pvs2array pvs2tab pvsadsyn pvsanal pvsarp pvsbandp pvsbandr pvsbandwidth pvsbin pvsblur pvsbuffer pvsbufread pvsbufread2 pvscale pvscent pvsceps pvscfs pvscross pvsdemix pvsdiskin pvsdisp pvsenvftw pvsfilter pvsfread pvsfreeze pvsfromarray pvsftr pvsftw pvsfwrite pvsgain pvshift pvsifd pvsin pvsinfo pvsinit pvslock pvslpc pvsmaska pvsmix pvsmooth pvsmorph pvsosc pvsout pvspitch pvstanal pvstencil pvstrace pvsvoc pvswarp pvsynth pwd pyassign pyassigni pyassignt pycall pycall1 pycall1i pycall1t pycall2 pycall2i pycall2t pycall3 pycall3i pycall3t pycall4 pycall4i pycall4t pycall5 pycall5i pycall5t pycall6 pycall6i pycall6t pycall7 pycall7i pycall7t pycall8 pycall8i pycall8t pycalli pycalln pycallni pycallt pyeval pyevali pyevalt pyexec pyexeci pyexect pyinit pylassign pylassigni pylassignt pylcall pylcall1 pylcall1i pylcall1t pylcall2 pylcall2i pylcall2t pylcall3 pylcall3i pylcall3t pylcall4 pylcall4i pylcall4t pylcall5 pylcall5i pylcall5t pylcall6 pylcall6i pylcall6t pylcall7 pylcall7i pylcall7t pylcall8 pylcall8i pylcall8t pylcalli pylcalln pylcallni pylcallt pyleval pylevali pylevalt pylexec pylexeci pylexect pylrun pylruni pylrunt pyrun pyruni pyrunt qinf qnan r2c rand randc randh randi random randomh randomi rbjeq readclock readf readfi readk readk2 readk3 readk4 readks readscore readscratch rect2pol release remoteport remove repluck reshapearray reson resonbnk resonk resonr resonx resonxk resony resonz resyn reverb reverb2 reverbsc rewindscore rezzy rfft rifft rms rnd rnd31 rndseed round rspline rtclock s16b14 s32b14 samphold sandpaper sc_lag sc_lagud sc_phasor sc_trig scale scalearray scanhammer scans scantable scanu schedkwhen schedkwhennamed schedule schedulek schedwhen scoreline scoreline_i seed sekere select semitone sense sensekey seqtime seqtime2 serialBegin serialEnd serialFlush serialPrint serialRead serialWrite serialWrite_i setcol setctrl setksmps setrow setscorepos sfilist sfinstr sfinstr3 sfinstr3m sfinstrm sfload sflooper sfpassign sfplay sfplay3 sfplay3m sfplaym sfplist sfpreset shaker shiftin shiftout signum sin sinh sininv sinsyn sleighbells slicearray slicearray_i slider16 slider16f slider16table slider16tablef slider32 slider32f slider32table slider32tablef slider64 slider64f slider64table slider64tablef slider8 slider8f slider8table slider8tablef sliderKawai sndloop sndwarp sndwarpst sockrecv sockrecvs socksend socksends sorta sortd soundin space spat3d spat3di spat3dt spdist splitrig sprintf sprintfk spsend sqrt squinewave statevar sterrain stix strcat strcatk strchar strchark strcmp strcmpk strcpy strcpyk strecv streson strfromurl strget strindex strindexk string2array strlen strlenk strlower strlowerk strrindex strrindexk strset strstrip strsub strsubk strtod strtodk strtol strtolk strupper strupperk stsend subinstr subinstrinit sum sumarray svfilter syncgrain syncloop syncphasor system system_i tab tab2array tab2pvs tab_i tabifd table table3 table3kt tablecopy tablefilter tablefilteri tablegpw tablei tableicopy tableigpw tableikt tableimix tablekt tablemix tableng tablera tableseg tableshuffle tableshufflei tablew tablewa tablewkt tablexkt tablexseg tabmorph tabmorpha tabmorphak tabmorphi tabplay tabrec tabrowlin tabsum tabw tabw_i tambourine tan tanh taninv taninv2 tbvcf tempest tempo temposcal tempoval timedseq timeinstk timeinsts timek times tival tlineto tone tonek tonex tradsyn trandom transeg transegb transegr trcross trfilter trhighest trigger trighold trigphasor trigseq trim trim_i trirand trlowest trmix trscale trshift trsplit turnoff turnoff2 turnon tvconv unirand unwrap upsamp urandom urd vactrol vadd vadd_i vaddv vaddv_i vaget valpass vaset vbap vbapg vbapgmove vbaplsinit vbapmove vbapz vbapzmove vcella vco vco2 vco2ft vco2ift vco2init vcomb vcopy vcopy_i vdel_k vdelay vdelay3 vdelayk vdelayx vdelayxq vdelayxs vdelayxw vdelayxwq vdelayxws vdivv vdivv_i vecdelay veloc vexp vexp_i vexpseg vexpv vexpv_i vibes vibr vibrato vincr vlimit vlinseg vlowres vmap vmirror vmult vmult_i vmultv vmultv_i voice vosim vphaseseg vport vpow vpow_i vpowv vpowv_i vps vpvoc vrandh vrandi vsubv vsubv_i vtaba vtabi vtabk vtable1k vtablea vtablei vtablek vtablewa vtablewi vtablewk vtabwa vtabwi vtabwk vwrap waveset websocket weibull wgbow wgbowedbar wgbrass wgclar wgflute wgpluck wgpluck2 wguide1 wguide2 wiiconnect wiidata wiirange wiisend window wrap writescratch wterrain xadsr xin xout xscanmap xscans xscansmap xscanu xtratim xyscale zacl zakinit zamod zar zarg zaw zawm zdf_1pole zdf_1pole_mode zdf_2pole zdf_2pole_mode zdf_ladder zfilter2 zir ziw ziwm zkcl zkmod zkr zkw zkwm '''.split()) DEPRECATED_OPCODES = set(''' array bformdec bformenc changed copy2ftab copy2ttab hrtfer ktableseg lentab maxtab mintab pop pop_f ptable ptable3 ptablei ptableiw push push_f scalet sndload soundout soundouts specaddm specdiff specdisp specfilt spechist specptrk specscal specsum spectrum stack sumtab tabgen tableiw tabmap tabmap_i tabslice tb0 tb0_init tb1 tb10 tb10_init tb11 tb11_init tb12 tb12_init tb13 tb13_init tb14 tb14_init tb15 tb15_init tb1_init tb2 tb2_init tb3 tb3_init tb4 tb4_init tb5 tb5_init tb6 tb6_init tb7 tb7_init tb8 tb8_init tb9 tb9_init vbap16 vbap4 vbap4move vbap8 vbap8move xyin '''.split())
export Header from './Header' export TodoList from './TodoList' export Footer from './Footer'
import Emitter from 'emitter-component'; import CommandFactory from './commands/commandFactory'; import {exists, checkMethod, checkParam} from '../utils'; export default class ClientContext{ constructor(dolphin, beanManager, controllerManager, connector){ checkMethod('ClientContext(dolphin, beanManager, controllerManager, connector)'); checkParam(dolphin, 'dolphin'); checkParam(beanManager, 'beanManager'); checkParam(controllerManager, 'controllerManager'); checkParam(connector, 'connector'); this.dolphin = dolphin; this.beanManager = beanManager; this._controllerManager = controllerManager; this._connector = connector; this.connectionPromise = null; this.isConnected = false; } connect(){ let self = this; this.connectionPromise = new Promise((resolve, reject) => { self._connector.connect(); self._connector.invoke(CommandFactory.createCreateContextCommand()).then(() => { self.isConnected = true; resolve(); }).catch(reject); }); return this.connectionPromise; } onConnect(){ if(exists(this.connectionPromise)){ if(!this.isConnected){ return this.connectionPromise; }else{ return new Promise((resolve) => { resolve(); }); } }else{ return this.connect(); } } createController(name){ checkMethod('ClientContext.createController(name)'); checkParam(name, 'name'); return this._controllerManager.createController(name); } disconnect(){ let self = this; this.dolphin.stopPushListening(); return new Promise((resolve) => { self._controllerManager.destroy().then(() => { self._connector.invoke(CommandFactory.createDestroyContextCommand()); self.dolphin = null; self.beanManager = null; self._controllerManager = null; self._connector = null; resolve(); }); }); } } Emitter(ClientContext.prototype);
#!/usr/bin/python """Setup module for caasp-setup-admin""" import sys try: import setuptools except ImportError: sys.stderr.write('Python setuptools required, please install.') sys.exit(1) if __name__ == '__main__': setuptools.setup( name='caasp-admin-setup', description=( 'Script to set up a SUSE CaaSP admin node'), url='https://github.com/SUSE/pubcloud', license='MIT', author='SUSE Public Cloud Team', author_email='public-cloud-dev@susecloud.net', version='1.6.4', packages=setuptools.find_packages('lib'), package_dir={ '': 'lib', }, scripts=['caasp-admin-setup'] )
from ahh import vis, ext, sci import pandas as pd import numpy as np import matplotlib.pyplot as plt import matplotlib.dates as mdates import datetime from scipy.stats import pearsonr sleep_df = pd.read_pickle('sleep_data_fmt.pkl') sleep_hours = sleep_df['minutes'] / 60 sleep_quality = sleep_df['quality'] * 100 sleep_df_interp = sleep_df sleep_df_interp['minutes'][sleep_df_interp.index[0]] = 9 sleep_hours_interp = sleep_df_interp['minutes'].interpolate() / 60 wx_df = pd.read_csv('kcmi_wx.csv') # https://www.wunderground.com/history/airport/KCMI/2014/1/1/ tmp = wx_df['Mean TemperatureF'] x = mdates.date2num(sleep_df_interp.index) xx = np.linspace(x.min(), x.max(), len(sleep_df_interp.index)) z4 = np.polyfit(x, sleep_hours_interp, 4) p4 = np.poly1d(z4) sleep_fit = p4(xx) fig, ax = vis.plot(sleep_df.index, sleep_hours, y2=sleep_fit, bar=True, bar_dates=True, save='andrew_sleep', sharex=True, figsize=(70, 20), major='months', interval=3, width=0.65, title="Andrew's Daily Sleep (2014 - 2016)", ylabel='Hours', titlescale=4, fontscale=3.5, labelscale=3.5, linewidth2=5, minor='years') years = range(2014,2017) yearly_sleep_avg_list = [] yearly_sleep_std_list = [] months = range(1, 13) monthly_sleep_avg_list = [] monthly_sleep_std_list = [] sleep_quality_avg_list = [] yr_monthly_sleep_avg_list = [] yr_monthly_quality_avg_list = [] sleep_masked = np.ma.masked_array(sleep_hours, np.isnan(sleep_hours)) quality_masked = np.ma.masked_array(sleep_quality, np.isnan(sleep_quality)) for year in years: year_idc = np.where(pd.DatetimeIndex(sleep_df.index).year == year)[0] yearly_sleep_avg_list.append(np.ma.average(sleep_masked[year_idc])) yearly_sleep_std_list.append(np.std(sleep_hours[year_idc])) for month in months: month_idc = np.where(pd.DatetimeIndex(sleep_df.index).month == month)[0] monthly_sleep_avg_list.append(np.ma.average(sleep_masked[month_idc])) monthly_sleep_std_list.append(np.std(sleep_hours[month_idc])) sleep_quality_avg_list.append(np.ma.average(quality_masked[month_idc])) months_avg = np.ones(len(months)) * np.average(monthly_sleep_avg_list) quality_months_avg = np.ones(len(months)) * np.average(sleep_quality_avg_list) caption = """ Yearly Avg: 2014:{avg2014:02.2f}, 2015:{avg2015:02.2f}, 2016:{avg2016:02.2f} Yearly Std: 2014:{std2014:02.2f}, 2015:{std2015:02.2f}, 2016:{std2016:02.2f} Monthly Avg: Jan:{jan:02.2f}, Feb:{feb:02.2f}, Mar:{mar:02.2f}, Apr:{apr:02.2f}, May:{may:02.2f}, Jun:{jun:02.2f}, Jul:{jul:02.2f}, Aug:{aug:02.2f}, Sep:{sep:02.2f}, Oct:{oct:02.2f}, Nov:{nov:02.2f}, Dec:{dec:02.2f} Monthly Std: Jan:{jan_std:02.2f}, Feb:{feb_std:02.2f}, Mar:{mar_std:02.2f}, Apr:{apr_std:02.2f}, May:{may_std:02.2f}, Jun:{jun_std:02.2f}, Jul:{jul_std:02.2f}, Aug:{aug_std:02.2f}, Sep:{sep_std:02.2f}, Oct:{oct_std:02.2f}, Nov:{nov_std:02.2f}, Dec:{dec_std:02.2f} """ plt.figtext(0.5, 0.005, caption.format( avg2014=yearly_sleep_avg_list[0], std2014=yearly_sleep_std_list[0], avg2015=yearly_sleep_avg_list[1], std2015=yearly_sleep_std_list[1], avg2016=yearly_sleep_avg_list[2], std2016=yearly_sleep_std_list[2], jan=monthly_sleep_avg_list[0], feb=monthly_sleep_avg_list[1], mar=monthly_sleep_avg_list[2], apr=monthly_sleep_avg_list[3], may=monthly_sleep_avg_list[4], jun=monthly_sleep_avg_list[5], jul=monthly_sleep_avg_list[6], aug=monthly_sleep_avg_list[7], sep=monthly_sleep_avg_list[8], oct=monthly_sleep_avg_list[9], nov=monthly_sleep_avg_list[10], dec=monthly_sleep_avg_list[11], jan_std=monthly_sleep_std_list[0], feb_std=monthly_sleep_std_list[1], mar_std=monthly_sleep_std_list[2], apr_std=monthly_sleep_std_list[3], may_std=monthly_sleep_std_list[4], jun_std=monthly_sleep_std_list[5], jul_std=monthly_sleep_std_list[6], aug_std=monthly_sleep_std_list[7], sep_std=monthly_sleep_std_list[8], oct_std=monthly_sleep_std_list[9], nov_std=monthly_sleep_std_list[10], dec_std=monthly_sleep_std_list[11], ), ha='center', size=40, color='.5', ) plt.savefig("andrew_sleep") for year in years: for month in months: yr_month_idc = np.where((pd.DatetimeIndex(sleep_df.index).month == month) & (pd.DatetimeIndex(sleep_df.index).year == year))[0] yr_monthly_sleep_avg_list.append(np.ma.average(sleep_masked[yr_month_idc])) yr_monthly_quality_avg_list.append(np.ma.average(quality_masked[yr_month_idc])) start = datetime.datetime(2013, 12, 31) dates = pd.date_range(start, periods=len(yr_monthly_sleep_avg_list), freq='m') x = mdates.date2num(dates[:-2]) xx = np.linspace(x.min(), x.max(), len(dates)) z4 = np.polyfit(x, np.array(yr_monthly_sleep_avg_list[:-2]), 4) p4 = np.poly1d(z4) yearly_monthly_sleep_fit = p4(xx) monthly_qual_norm = sci.get_norm_anom(np.array(yr_monthly_sleep_avg_list[:-2])) monthly_hour_norm = sci.get_norm_anom(np.array(yr_monthly_quality_avg_list[:-2])) coeff, pval = pearsonr(monthly_qual_norm, monthly_hour_norm) plt.figure() title_fmt = 'Monthly Average Hours of Sleep' fig, ax = vis.plot(dates, yr_monthly_sleep_avg_list, y2=yearly_monthly_sleep_fit, ylabel='Hours', sharex=True, extra=True, xlabel='Month', bar_dates=True, linewidth2=2, title=title_fmt.format(coeff), ylabel2='Quality', bar=True, ylim=(7, 9.5), width=15, figsize=(20,15), major='months', interval=3, fontscale=1.5, labelscale=1.5, minor='years') plt.savefig('yr_monthly_andrew_quality_hour.png') plt.figure() vis.plot(months, monthly_sleep_avg_list, y2=months_avg, ylabel='Hours', sharex=True, extra=True, title='Monthly Average Hours of Sleep (2014 - 2016)', xlabel='Month', save='monthly_andrew_sleep', figsize=(20,15), xlim=(1, 12)) plt.figure() vis.plot(months, sleep_quality_avg_list, y2=quality_months_avg, ylabel='%', sharex=True, extra=True, xlabel='Month', title='Monthly Average Quality of Sleep (2014 - 2016)', save='monthly_andrew_quality', figsize=(20,15), xlim=(1, 12)) plt.figure() hist, bins = np.histogram(sleep_hours, bins=20, range=(6, 11)) width = 0.7 * (bins[1] - bins[0]) center = (bins[:-1] + bins[1:]) / 2 vis.plot(center, hist, width=width, ylabel='Count', title='Hours of Sleep Histogram (2014 - 2016)', xlabel='Hours', save='histogram_andrew_sleep', figsize=(20,15), bar=True, xlim=(6, 11)) monthly_qual_norm = sci.get_norm_anom(np.array(monthly_sleep_avg_list)) monthly_hour_norm = sci.get_norm_anom(np.array(sleep_quality_avg_list)) coeff, pval = pearsonr(monthly_qual_norm, monthly_hour_norm) plt.figure() title_fmt = 'Monthly Quality of Sleep vs Hours of Sleep Correlation = {:.2f}' vis.plot(months, monthly_sleep_avg_list, y2=sleep_quality_avg_list, ylabel='Hours', sharex=True, extra=True, xlabel='Month', extray=True, title=title_fmt.format(coeff), ylabel2='Quality', save='monthly_andrew_quality_hour', figsize=(20,15), xlim=(1, 12)) qual_norm = sci.get_norm_anom(quality_masked) tmp_norm = sci.get_norm_anom(tmp) qual_norm_cut = qual_norm[~qual_norm.mask] tmp_norm_cut = tmp_norm[~qual_norm.mask] coeff, pval = pearsonr(qual_norm_cut, tmp_norm_cut) fig, ax = vis.plot(sleep_df.index, quality_masked, y2=tmp, dates=True, save='qual_vs_tmp', sharex=True, figsize=(70, 20), major='months', interval=3, extray=True, title="Sleep Quality vs Temperature Correlation: {}".format(coeff), ylabel='Sleep Quality', titlescale=4, fontscale=3.5, labelscale=3.5, linewidth2=5, minor='years')
export { default as MenuList } from "./MenuList"
// example include file void myPrintHelloMake(void);
import React, { Component, PropTypes } from 'react'; class MainHeaderComponent extends Component { render() { return ( <div> <ul className="nav navbar-top-links navbar-right"> <li className="dropdown"> <a className="dropdown-toggle" data-toggle="dropdown" href="#"> <i className="fa fa-envelope fa-fw"></i> <i className="fa fa-caret-down"></i> </a> <ul className="dropdown-menu dropdown-messages"> <li> <a href="#"> <div> <strong>John Smith</strong> <span className="pull-right text-muted"> <em>Yesterday</em> </span> </div> <div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <strong>John Smith</strong> <span className="pull-right text-muted"> <em>Yesterday</em> </span> </div> <div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <strong>John Smith</strong> <span className="pull-right text-muted"> <em>Yesterday</em> </span> </div> <div>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque eleifend...</div> </a> </li> <li className="divider"></li> <li> <a className="text-center" href="#"> <strong>Read All Messages</strong> <i className="fa fa-angle-right"></i> </a> </li> </ul> </li> <li className="dropdown"> <a className="dropdown-toggle" data-toggle="dropdown" href="#"> <i className="fa fa-tasks fa-fw"></i> <i className="fa fa-caret-down"></i> </a> <ul className="dropdown-menu dropdown-tasks"> <li> <a href="#"> <div> <p> <strong>Task 1</strong> <span className="pull-right text-muted">40% Complete</span> </p> <div className="progress progress-striped active"> <div className="progress-bar progress-bar-success" role="progressbar" aria-valuenow="40" aria-valuemin="0" aria-valuemax="100" style={{width: "40%"}}> <span className="sr-only">40% Complete (success)</span> </div> </div> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <p> <strong>Task 2</strong> <span className="pull-right text-muted">20% Complete</span> </p> <div className="progress progress-striped active"> <div className="progress-bar progress-bar-info" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100" style={{width: "20%"}}> <span className="sr-only">20% Complete</span> </div> </div> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <p> <strong>Task 3</strong> <span className="pull-right text-muted">60% Complete</span> </p> <div className="progress progress-striped active"> <div className="progress-bar progress-bar-warning" role="progressbar" aria-valuenow="60" aria-valuemin="0" aria-valuemax="100" style={{width: "60%"}}> <span className="sr-only">60% Complete (warning)</span> </div> </div> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <p> <strong>Task 4</strong> <span className="pull-right text-muted">80% Complete</span> </p> <div className="progress progress-striped active"> <div className="progress-bar progress-bar-danger" role="progressbar" aria-valuenow="80" aria-valuemin="0" aria-valuemax="100" style={{width: "80%"}}> <span className="sr-only">80% Complete (danger)</span> </div> </div> </div> </a> </li> <li className="divider"></li> <li> <a className="text-center" href="#"> <strong>See All Tasks</strong> <i className="fa fa-angle-right"></i> </a> </li> </ul> </li> <li className="dropdown"> <a className="dropdown-toggle" data-toggle="dropdown" href="#"> <i className="fa fa-bell fa-fw"></i> <i className="fa fa-caret-down"></i> </a> <ul className="dropdown-menu dropdown-alerts"> <li> <a href="#"> <div> <i className="fa fa-comment fa-fw"></i> New Comment <span className="pull-right text-muted small">4 minutes ago</span> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <i className="fa fa-twitter fa-fw"></i> 3 New Followers <span className="pull-right text-muted small">12 minutes ago</span> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <i className="fa fa-envelope fa-fw"></i> Message Sent <span className="pull-right text-muted small">4 minutes ago</span> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <i className="fa fa-tasks fa-fw"></i> New Task <span className="pull-right text-muted small">4 minutes ago</span> </div> </a> </li> <li className="divider"></li> <li> <a href="#"> <div> <i className="fa fa-upload fa-fw"></i> Server Rebooted <span className="pull-right text-muted small">4 minutes ago</span> </div> </a> </li> <li className="divider"></li> <li> <a className="text-center" href="#"> <strong>See All Alerts</strong> <i className="fa fa-angle-right"></i> </a> </li> </ul> </li> <li className="dropdown"> <a className="dropdown-toggle" data-toggle="dropdown" href="#"> <i className="fa fa-user fa-fw"></i> <i className="fa fa-caret-down"></i> </a> <ul className="dropdown-menu dropdown-user"> <li><a href="#"><i className="fa fa-user fa-fw"></i> User Profile</a> </li> <li><a href="#"><i className="fa fa-gear fa-fw"></i> Settings</a> </li> <li className="divider"></li> <li><a href="login.html"><i className="fa fa-sign-out fa-fw"></i> Logout</a> </li> </ul> </li> </ul> </div> ); } } MainHeaderComponent.propTypes = { }; export default MainHeaderComponent;
import itertools import multiprocessing import sys import traceback from PyQt4 import QtCore from ags_service_publisher.logging_io import setup_logger from logutils.queue import QueueHandler, QueueListener from loghandlers.qtloghandler import QtLogHandler log = setup_logger(__name__) class SubprocessWorker(QtCore.QObject): """ Worker object that runs the target function in a separate sub-process, checking its exitcode periodically. Signals and parameters: - messageEmitted: Emitted whenever the target function's root logger emits a message - Worker ID (int) - Log level name (str) - Log message (str) - resultEmitted: Emitted when the process ends. - Worker ID (int) - Exit code (int) - Return value or exception instance (object) """ messageEmitted = QtCore.pyqtSignal(int, str, str) resultEmitted = QtCore.pyqtSignal(int, int, object) get_next_worker_id = itertools.count().next def __init__(self, parent=None, target=None, args=(), kwargs={}, timer_check_interval=1000, log_handler=None): super(SubprocessWorker, self).__init__(parent) self.id = self.get_next_worker_id() self.running = False self.timer = None self.timer_check_interval=timer_check_interval self.process = None self.log_handler = log_handler if log_handler is not None else QtLogHandler() self.log_handler.messageEmitted.connect(self.handle_message) self.log_queue = multiprocessing.Queue() self.result_queue = multiprocessing.Queue() self.log_queue_listener = QueueListener(self.log_queue, self.log_handler) self.target = target self.args = tuple(args) self.kwargs = dict(kwargs) self.thread = QtCore.QThread() self.moveToThread(self.thread) self.thread.started.connect(self.start) self.thread.finished.connect(self.stop) log.debug('Worker {} initialized on thread {}'.format(self.id, str(self.thread))) def check_process_status(self): if not self.running: message = 'Cannot check process status while worker is not running!' log.error(message) raise RuntimeError(message) log.debug('Checking status of subprocess {} (pid {})'.format(self.process.name, self.process.pid)) if not self.process.is_alive(): message = 'Subprocess {} ended (pid {}, exit code {})'.format( self.process.name, self.process.pid, self.process.exitcode ) log.debug(message) self.resultEmitted.emit(self.id, self.process.exitcode, self.result_queue.get()) else: message = 'Subprocess {} (pid {}) is still active'.format(self.process.name, self.process.pid) log.debug(message) @QtCore.pyqtSlot() def start(self): if self.running: log.warn('Worker {} already started on thread {}'.format(self.id, str(self.thread))) return log.debug('Worker {} started on thread {}'.format(self.id, str(self.thread))) self.running = True self.timer = QtCore.QTimer() self.timer.timeout.connect(self.check_process_status) self.log_queue_listener.start() self.process = multiprocessing.Process( target=wrap_target_function, args=(self.target, self.log_queue, self.result_queue) + self.args, kwargs=self.kwargs ) self.process.start() self.timer.start(self.timer_check_interval) log.debug('Subprocess {} (pid {}) started'.format(self.process.name, self.process.pid)) @QtCore.pyqtSlot() def stop(self): if not self.running: log.warn('Worker {} already stopped on thread {}'.format(self.id, str(self.thread))) return self.running = False self.timer.stop() if self.process.is_alive(): log.debug('Terminating subprocess {} (pid {})'.format(self.process.name, self.process.pid)) self.process.terminate() self.process.join() log.debug('Worker {} stopped on thread {}'.format(self.id, str(self.thread))) self.log_queue_listener.stop() @QtCore.pyqtSlot(str, str) def handle_message(self, level, message): self.messageEmitted.emit(self.id, level, message) def wrap_target_function(target, log_queue, result_queue, *args, **kwargs): try: setup_logger(handler=QueueHandler(log_queue)) result = target(*args, **kwargs) result_queue.put(result) except: result = Exception(''.join(traceback.format_exception(*sys.exc_info()))) result_queue.put(result) raise
import numpy as np from shapely.geometry import Point, Polygon class LinearAnalyticalModel_SnaikiWu_2017: def __init__(self, cyclone_param = [], storm_track = []): """ __init__: initializing the tropical cyclone cyclone_param: 6-dimensional array - cyclone_param[0]: landfall Latitude - cyclone_param[1]: landfall Longitude - cyclone_param[2]: landfall angle (degree) - cyclone_param[3]: central pressure different (hPa) - cyclone_param[4]: moving speed (km/h) - cyclone_param[5]: cyclone radius of the maximum winnds (km) storm_track: - storm_track['Latitude']: latitude values of the storm track - storm_track['Longittude']: longitude values of the storm track """ # constants self.R = 6371.0 * 1e3 self.EDDY_VISCOCITY = 75.0 self.AIR_DENSITY = 1.1 self.RA = 180.0 / np.pi self.EPS = np.spacing(1) # saving cyclone parameters try: self.landfall_lat = cyclone_param[0] self.landfall_lon = cyclone_param[1] self.landfall_ang = cyclone_param[2] self.cyclone_pres = cyclone_param[3] * 100.0 self.cyclone_sped = cyclone_param[4] * 1000.0 / 3600.0 self.cyclone_radi = cyclone_param[5] self.cyclone_radm = self.cyclone_radi * 1000.0 self.Holland_B = 1.38 + 0.00184 * self.cyclone_pres / 100.0 - 0.00309 * self.cyclone_radi except: print('WindFieldSimulaiton: please check the cyclone_param input.') # saving storm track data try: self.track_lat = storm_track['Latitude'] self.track_lon = storm_track['Longitude'] if (len(self.track_lat) != len(self.track_lon)): print('WindFieldSimulation: warning - storm track Latitude and Longitude sizes are different, data truncated.') self.track_lat = self.track_lat[0:int(min(len(self.track_lat), len(self.track_lon)))] self.track_lon = self.track_lon[0:int(min(len(self.track_lat), len(self.track_lon)))] except: print('WindFieldSimulaiton: please check the strom_track input.') # initiation self.station_num = 0 self.station = { 'Latitude': [], 'Longitude': [], 'z0': [], 'PWS': { 'height': [], 'duration': 600.0, 'windspeed': [] } } self.terrain_num = 0 self.terrain_poly = [] self.terrain_z0 = [] self.delta_path = np.zeros(3) self.r = [] self.theta = [] self.zp = [] self.mesh_info = [] def set_delta_path(self, delta_path): """ set_delta_path: perturbing the path coordiates and heading angle of the storm track """ if (len(delta_path) == 3): self.delta_path = delta_path else: print('WindFieldSimulation: the delta_path should have a size of 3, default delta_path used.') def set_delta_feat(self, delta_feat): """ set_delta_feat: perturbing the central pressure difference, traslational speed, and max-wind-speed radius """ if (len(delta_feat) == 3): self.cyclone_pres = delta_feat[0] * 100.0 self.cyclone_sped = delta_feat[1] * 1000.0 / 3600.0 self.cyclone_radi = delta_feat[2] self.cyclone_radm = self.cyclone_radi * 1000.0 self.Holland_B = 1.38 + 0.00184 * self.cyclone_pres / 100.0 - 0.00309 * self.cyclone_radi else: print('WindFieldSimulation: the delta_feat should have a size of 3, default delta_feat used.') def __interp_z0(self, lat, lon): """ __interp_z0: finding the z0 at (lat, lon) by interpolating reference terrain polygons """ z0 = [] if (not self.terrain_z0): # no reference terrain provided, using default reference z0 = 0.03 z0 = 0.03 else: pt = Point(lat, lon) for p, z in zip(self.terrain_poly, self.terrain_z0): if pt.within(p): z0 = z if (not z0): z0 = 0.01 # return return z0 def add_reference_terrain(self, terrain_info): """ add_reference_terrainL specifying reference z0 values for a set of polygons terrain_info: geojson formatted polygon and z0 data """ for p in terrain_info['features']: if (p['geometry']['type'] == 'Polygon'): # creating a new polygon new_poly = Polygon(p['geometry']['coordinates']) self.terrain_poly.append(new_poly) self.terrain_z0.append(p['properties']['z0']) self.terrain_num += 1 def set_cyclone_mesh(self, mesh_info): """ set_cyclone_meesh: meshing the cyclone in radius and cycle mesh_info[0]: interal R mesh_info[1]: interval delta_R mesh_info[2]: external R mesh_info[3]: starting angle (usually 0) mesh_info[4]: interval angle mesh_info[5]: ending angle (usually 360) """ try: self.mesh_info = mesh_info self.r = np.arange(mesh_info[0], mesh_info[2] + mesh_info[1], mesh_info[1]) self.theta = np.arange(mesh_info[3], mesh_info[5] + mesh_info[4], mesh_info[4]) print('WindFieldSimulation: cyclone meshed.') except: print('WindFieldSimulation: input format error in set_cyclone_mesh.') def set_track_mesh(self, mesh_lat): """ set_track_meesh: meshing the storm track mesh_lat[0]: starting latitude value of the meshed track mesh_lat[1]: interval latitude value mesh_lat[2]: ending latitude value of the meshed track """ try: lat0 = mesh_lat[0] dlat = mesh_lat[1] lat1 = mesh_lat[2] except: print('WindFieldSimulation: input format error in set_track_mesh.') # boundary checks if (max(lat0, lat1) > max(self.track_lat)) or (min(lat0, lat1) < min(self.track_lat)): print('WindFieldSimulation: warning - forcing the track mesh consistent with the original track boundary.') lat0 = min(lat0, max(self.track_lat)) lat1 = min(lat1, max(self.track_lat)) lat0 = max(lat0, min(self.track_lat)) lat1 = max(lat1, min(self.track_lat)) # computing meshed track's Latitude and Longitude values self.track_lat_m = np.arange(lat0, lat1, dlat).tolist() self.track_lon_m = np.abs(np.interp(self.track_lat_m, self.track_lat, self.track_lon)) print('WindFieldSimulation: track meshed.') def define_track(self, track_lat): """ set_track_meesh: meshing the storm track mesh_lat[0]: starting latitude value of the meshed track mesh_lat[1]: interval latitude value mesh_lat[2]: ending latitude value of the meshed track """ # computing meshed track's Latitude and Longitude values self.track_lat_m = track_lat self.track_lon_m = np.abs(np.interp(self.track_lat_m, self.track_lat, self.track_lon)) print('WindFieldSimulation: track defined.') def set_measure_height(self, measure_info): """ set_measure_height: defining the height for calculating wind speed """ try: self.zp = np.arange(measure_info[0], measure_info[2] + measure_info[1], measure_info[1]).tolist() print('WindFieldSimulation: measurement height defined.') except: print('WindFieldSimulation: input format error in set_measure_height.') def add_stations(self, station_list): """ add_stations: adding stations to the model station_list: - station_list['Latitude']: latitude values of stations - station_list['Longitude']: longitude values of stations - station_list['z0']: surface roughness (optional) """ # z0 default if (station_list['Latitude']) and ('z0' not in station_list.keys()): # default value = 0 (no specified z0) station_list['z0'] = np.zeros(len(station_list['Latitude'])) # adding stations (without duplication) for lat, lon, z0 in zip(station_list['Latitude'], station_list['Longitude'], station_list['z0']): self.station['Latitude'].append(lat) self.station['Longitude'].append(lon) if (z0 == 0): # interpolating z0 from terrain feature self.station['z0'].append(self.__interp_z0(lat, lon)) else: self.station['z0'].append(z0) # updating station number self.station_num += 1 def __calculate_heading(self): """ __calculate_heading: computing the heading path """ self.beta_c = np.zeros(len(self.track_lat_m)) for i in range(len(self.track_lat_m) - 1): Delta = self.track_lon_m[i + 1] - self.track_lon_m[i] + self.EPS ** 2 self.beta_c[i] = -self.delta_path[2] + 90.0 + self.RA * np.arctan2(np.sin(Delta / self.RA) \ * np.cos(self.track_lat_m[i + 1] / self.RA), np.cos(self.track_lat_m[i] / self.RA) \ * np.sin(self.track_lat_m[i + 1] / self.RA) - np.sin(self.track_lat_m[i] / self.RA) \ * np.cos(self.track_lat_m[i + 1] / self.RA) * np.cos(Delta / self.RA)) # positive angle values for beta_c self.beta_c = [x if x >= 0 else x + 360.0 for x in self.beta_c] # fixing the last value self.beta_c[-1] = self.beta_c[-2] def compute_wind_field(self): """ compute_wind_field: computing the peak wind speed (10-min gust duraiton) """ print('WindFieldSimulation: running linear analytical model.') # checking if all parameters are defined # calculating heading self.__calculate_heading() # initializing matrices station_lat = self.station['Latitude'] station_lon = self.station['Longitude'] station_umax = np.zeros((len(station_lat), len(self.zp))) u = np.zeros((len(self.theta), len(self.r), len(self.zp))) v = np.zeros((len(self.theta), len(self.r), len(self.zp))) vg1 = np.zeros((len(self.theta), len(self.r))) z0 = np.zeros(len(self.r)) # looping over different storm cyclone locations for i in range(len(self.track_lat_m)): # location and heading lat = self.track_lat_m[i] + self.delta_path[0] lon = self.track_lon_m[i] -0.3 * self.delta_path[1] beta = self.beta_c[i] # coriolis omega = 0.7292 * 1e-4 f = 2.0 * omega * np.sin(lat * np.pi / 180.0) # looping over different polar coordinates theta for j in range(len(self.theta)): Ctheta = -self.cyclone_sped * np.sin((self.theta[j] - beta) / self.RA) if (self.theta[j] >= 0) and (self.theta[j] <= 90): THETA = 90.0 - self.theta[j] else: THETA = 450 - self.theta[j] lat_t = self.RA * np.arcsin(np.sin(lat / self.RA) * np.cos(self.r / self.R) \ + np.cos(lat / self.RA) * np.sin(self.r / self.R) * np.cos(THETA / self.RA)) lon_t = lon + self.RA * np.arctan2(np.sin(THETA / self.RA) * np.sin(self.r / self.R) \ * np.cos(lat / self.RA), np.cos(self.r / self.R) - np.sin(lat / self.RA) * np.sin(lat_t)) # looping over different polar coordinates r for k in range(len(self.r)): z0[k] = self.__interp_z0(lat_t[k], lon_t[k]) # configuring coefficients z10 = 10.0 A = 11.4 h = A * z0 ** 0.86 d = 0.75 * h kappa = 0.40 Cd = kappa ** 2 / (np.log((z10 + h - d) / z0)) ** 2 # der_p = self.Holland_B * self.cyclone_radm ** self.Holland_B * self.cyclone_pres * (self.r ** (-self.Holland_B - 1)) \ * np.exp(-(self.cyclone_radm * self.r ** (-1.0)) ** self.Holland_B) der_p_2 = (-(self.Holland_B + 1) * (self.r ** (-1.0)) + self.Holland_B * self.cyclone_radm ** self.Holland_B \ * (self.r ** (-self.Holland_B - 1))) * der_p # vg1[j, :] = 0.5 * (Ctheta - f * self.r) + ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** 0.5 der_vg1_r = -0.5 * f + 0.5 * ((((Ctheta - f * self.r) / 2.0) ** 2.0 + self.r / self.AIR_DENSITY * der_p) ** (-0.5)) \ * (-(Ctheta - f * self.r) * f / 2.0 + 1.0 / self.AIR_DENSITY * der_p + 1.0 / self.AIR_DENSITY * self.r * der_p_2) der_vg1_theta = -self.cyclone_sped * np.cos((self.theta[j] - beta) / self.RA) / 2.0 \ + 0.25 * self.cyclone_sped * np.cos((self.theta[j] - beta) / self.RA) * (-Ctheta + f * self.r) \ * ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** (-0.5) BB = 1.0 / (2.0 * self.EDDY_VISCOCITY * self.r) * der_vg1_theta Eta = ((0.5 * (Ctheta - f * self.r)) ** 2.0 + (self.r / self.AIR_DENSITY) * der_p) ** 0.5 ALPHA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * (f + 2.0 * vg1[j, :] / self.r) BETA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * (f + vg1[j, :] / self.r + der_vg1_r) GAMMA = 1.0 / (2.0 * self.EDDY_VISCOCITY) * vg1[j, :] / self.r ALPHA = np.array([complex(x, y) for x, y in zip(np.real(ALPHA), np.imag(ALPHA))]) BETA = np.array([complex(x, y) for x, y in zip(np.real(BETA), np.imag(BETA))]) # XXX = -(ALPHA * BETA) ** 0.25 YYY = -(ALPHA * BETA) ** 0.25 PP_zero = np.array([complex(x, y) for x, y in zip(XXX, YYY)]) PP_one = -complex(1, 1) * ((GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5) PP_minus_one = -complex(1, 1) * ((-GAMMA + np.sqrt(ALPHA * BETA) - BB) ** 0.5) # X1 = PP_zero + f * self.r * Cd / self.EDDY_VISCOCITY - 2.0 * Eta * Cd / self.EDDY_VISCOCITY \ - self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (PP_one - np.conj(PP_minus_one))) \ + self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (np.conj(PP_one) - PP_minus_one)) X2 = -np.conj(PP_zero) - f * self.r * Cd / self.EDDY_VISCOCITY + 2.0 * Eta * Cd / self.EDDY_VISCOCITY \ - self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (PP_one - np.conj(PP_minus_one))) \ + self.cyclone_sped ** 2.0 * Cd ** 2.0 / (4.0 * self.EDDY_VISCOCITY ** 2.0 * (np.conj(PP_one) - PP_minus_one)) X3 = complex(0, -2) * Cd / self.EDDY_VISCOCITY * (Eta - f * self.r / 2.0) ** 2.0 X4 = -(-PP_zero - f * self.r * Cd / (2.0 * self.EDDY_VISCOCITY) + Eta * Cd / self.EDDY_VISCOCITY) \ / (-np.conj(PP_zero) - f * self.r * Cd / (2.0 *self.EDDY_VISCOCITY) + Eta * Cd / self.EDDY_VISCOCITY) A_zero = -X3 / (X1 + X2 * X4) A_one = complex(0, 1) * self.cyclone_sped * Cd * np.exp(complex(0, -1) * beta) \ / (4.0 * self.EDDY_VISCOCITY * (PP_one - np.conj(PP_minus_one))) * (A_zero + np.conj(A_zero)) A_minus_one = -np.conj(A_one) # looping over different heights zp for ii in range(len(self.zp)): u_zero = np.sqrt(ALPHA / BETA) * np.real(A_zero * np.exp(PP_zero * self.zp[ii])) v_zero = np.imag(A_zero * np.exp(PP_zero * self.zp[ii])) u_one = np.sqrt(ALPHA / BETA) * np.real(A_one * np.exp(PP_one * self.zp[ii] + complex(0, 1) * self.theta[j] / self.RA)) u_minus_one = np.sqrt(ALPHA / BETA) * np.real(A_minus_one * np.exp(PP_minus_one * self.zp[ii] - complex(0, 1) * self.theta[j] / self.RA)) v_one = np.imag(A_one * np.exp(PP_one * self.zp[ii] + complex(0, 1) * self.theta[j] / self.RA)) v_minus_one = np.imag(A_minus_one * np.exp(PP_minus_one * self.zp[ii] - complex(0, 1) * self.theta[j] / self.RA)) # for tmptag in range(u.shape[1]): u[j, tmptag, ii] = np.real(u_zero)[tmptag] + np.real(u_one)[tmptag] + np.real(u_minus_one)[tmptag] v[j, tmptag, ii] = v_zero[tmptag] + v_one[tmptag] + v_minus_one[tmptag] # wind speed components v1 = v for m in range(v.shape[2]): v1[:, :, m] = v1[:, :, m] + vg1 U = (v1 ** 2.0 + u ** 2.0) ** 0.5 # mapping to staitons dd = np.arccos(np.cos(np.array(station_lat) / self.RA) * np.cos(lat / self.RA) * np.cos((np.abs(np.array(station_lon)) - lon) / self.RA) \ + np.sin(np.array(station_lat) / self.RA) * np.sin(lat / self.RA)) * 6371.0 * 180.0 / np.pi / self.RA * 1000.0 Delta = np.abs(np.array(station_lon)) - lon + self.EPS ** 2.0 bearing = 90.0 + self.RA * np.arctan2(np.sin(Delta / self.RA) * np.cos(np.array(station_lat) / self.RA), \ np.cos(lat / self.RA) * np.sin(np.array(station_lat) /self.RA) - np.sin(lat / self.RA) * np.cos(np.array(station_lat) / self.RA) * np.cos(Delta / self.RA)) bearing = [x if x >= 0 else x + 360.0 for x in bearing] jj = [int(x / self.mesh_info[4]) for x in bearing] kk = [min(int(x / self.mesh_info[1]), len(self.r) - 1) for x in dd] for ii in range(len(self.zp)): tmp = U[:, :, ii].tolist() wind_speed = [tmp[jtag][ktag] for jtag, ktag in zip(jj, kk)] station_umax[:, ii] = [max(x, y) for x, y in zip(wind_speed, station_umax[:, ii])] # copying results self.station['PWS']['height'] = self.zp self.station['PWS']['windspeed'] = station_umax.tolist() print('WindFieldSimulation: linear analytical simulation completed.') def get_station_data(self): """ get_station_data: returning station data """ # return station dictionary return self.station