INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Deep Learning model demo.
def deeplearning(interactive=True, echo=True, testing=False): """Deep Learning model demo.""" def demo_body(go): """ Demo of H2O's Deep Learning model. This demo uploads a dataset to h2o, parses it, and shows a description. Then it divides the dataset into training and test set...
GLM model demo.
def glm(interactive=True, echo=True, testing=False): """GLM model demo.""" def demo_body(go): """ Demo of H2O's Generalized Linear Estimator. This demo uploads a dataset to h2o, parses it, and shows a description. Then it divides the dataset into training and test sets, builds ...
Execute the demo echoing commands and pausing for user input.
def _run_demo(body_fn, interactive, echo, testing): """ Execute the demo, echoing commands and pausing for user input. :param body_fn: function that contains the sequence of demo's commands. :param interactive: If True, the user will be prompted to continue the demonstration after every segment. :p...
Wait for a key press on the console and return it.
def _wait_for_keypress(): """ Wait for a key press on the console and return it. Borrowed from http://stackoverflow.com/questions/983354/how-do-i-make-python-to-wait-for-a-pressed-key """ result = None if os.name == "nt": # noinspection PyUnresolvedReferences import msvcrt ...
Create new H2OTwoDimTable object from list of ( key value ) tuples which are a pre - cursor to JSON dict.
def make(keyvals): """ Create new H2OTwoDimTable object from list of (key,value) tuples which are a pre-cursor to JSON dict. :param keyvals: list of (key, value) tuples :return: new H2OTwoDimTable object """ kwargs = {} for key, value in keyvals: if k...
Convert to a python data frame.
def as_data_frame(self): """Convert to a python 'data frame'.""" if can_use_pandas(): import pandas pandas.options.display.max_colwidth = 70 return pandas.DataFrame(self._cell_values, columns=self._col_header) return self
Print the contents of this table.
def show(self, header=True): """Print the contents of this table.""" # if h2o.can_use_pandas(): # import pandas # pandas.options.display.max_rows = 20 # print pandas.DataFrame(self._cell_values,columns=self._col_header) # return if header and self._table_heade...
r CSV reader yielding lists of unicode strings ( PY3: str ).
def reader(stream, dialect=DIALECT, encoding=False, **fmtparams): r"""CSV reader yielding lists of ``unicode`` strings (PY3: ``str``). Args: stream: Iterable of text (``unicode``, PY3: ``str``) lines. If an ``encoding`` is given, iterable of encoded (``str``, PY3: ``bytes``) lin...
Start new H2O server on the local machine.
def start(jar_path=None, nthreads=-1, enable_assertions=True, max_mem_size=None, min_mem_size=None, ice_root=None, log_dir=None, log_level=None, port="54321+", name=None, extra_classpath=None, verbose=True, jvm_custom_args=None, bind_to_localhost=True): """ Start new H2O serv...
Shut down the server by trying to terminate/ kill its process.
def shutdown(self): """ Shut down the server by trying to terminate/kill its process. First we attempt to terminate the server process gracefully (sending SIGTERM signal). However after _TIME_TO_KILL seconds if the process didn't shutdown, we forcefully kill it with a SIGKILL signal. ...
Return the location of an h2o. jar executable.
def _find_jar(self, path0=None): """ Return the location of an h2o.jar executable. :param path0: Explicitly given h2o.jar path. If provided, then we will simply check whether the file is there, otherwise we will search for an executable in locations returned by ._jar_paths(). ...
Produce potential paths for an h2o. jar executable.
def _jar_paths(): """Produce potential paths for an h2o.jar executable.""" # PUBDEV-3534 hook to use arbitrary h2o.jar own_jar = os.getenv("H2O_JAR_PATH", "") if own_jar != "": if not os.path.isfile(own_jar): raise H2OStartupError("Environment variable H2O_JA...
Actually start the h2o. jar executable ( helper method for. start () ).
def _launch_server(self, port, baseport, mmax, mmin, ea, nthreads, jvm_custom_args, bind_to_localhost, log_dir=None, log_level=None): """Actually start the h2o.jar executable (helper method for `.start()`).""" self._ip = "127.0.0.1" # Find Java and check version. (Note that subprocess.check_out...
Find location of the java executable ( helper for. _launch_server () ).
def _find_java(): """ Find location of the java executable (helper for `._launch_server()`). This method is not particularly robust, and may require additional tweaking for different platforms... :return: Path to the java executable. :raises H2OStartupError: if java cannot be fo...
Generate names for temporary files ( helper method for. _launch_server () ).
def _tmp_file(self, kind): """ Generate names for temporary files (helper method for `._launch_server()`). :param kind: one of "stdout", "stderr" or "salt". The "salt" kind is used for process name, not for a file, so it doesn't contain a path. All generated names are based on the u...
Check server s output log and determine its scheme/ IP/ port ( helper method for. _launch_server () ).
def _get_server_info_from_logs(self): """ Check server's output log, and determine its scheme / IP / port (helper method for `._launch_server()`). This method is polled during process startup. It looks at the server output log and checks for a presence of a particular string ("INFO: Ope...
Returns a confusion matrix based of H2O s default prediction threshold for a dataset.
def confusion_matrix(self, data): """ Returns a confusion matrix based of H2O's default prediction threshold for a dataset. :param H2OFrame data: the frame with the prediction results for which the confusion matrix should be extracted. """ assert_is_type(data, H2OFrame) ...
Retrieve the Hit Ratios.
def hit_ratio_table(self, train=False, valid=False, xval=False): """ Retrieve the Hit Ratios. If all are False (default), then return the training metric value. If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid", and ...
Equivalent of csv. DictWriter but allows delimiter to be a unicode string on Py2.
def csv_dict_writer(f, fieldnames, **kwargs): """Equivalent of csv.DictWriter, but allows `delimiter` to be a unicode string on Py2.""" import csv if "delimiter" in kwargs: kwargs["delimiter"] = str(kwargs["delimiter"]) return csv.DictWriter(f, fieldnames, **kwargs)
Given a string return an iterator over this string s bytes ( as ints ).
def bytes_iterator(s): """Given a string, return an iterator over this string's bytes (as ints).""" if s is None: return if PY2 or PY3 and isinstance(s, str): for ch in s: yield ord(ch) elif PY3 and isinstance(s, bytes): for ch in s: yield ch else: rai...
Analogous to repr () but will suppress u prefix when repr - ing a unicode string.
def repr2(x): """Analogous to repr(), but will suppress 'u' prefix when repr-ing a unicode string.""" s = repr(x) if len(s) >= 2 and s[0] == "u" and (s[1] == "'" or s[1] == '"'): s = s[1:] return s
Get second token in line >>> docwriter = ApiDocWriter ( sphinx ) >>> docwriter. _get_object_name ( def func (): ) func >>> docwriter. _get_object_name ( class Klass ( object ): ) Klass >>> docwriter. _get_object_name ( class Klass: ) Klass
def _get_object_name(self, line): ''' Get second token in line >>> docwriter = ApiDocWriter('sphinx') >>> docwriter._get_object_name(" def func(): ") 'func' >>> docwriter._get_object_name(" class Klass(object): ") 'Klass' >>> docwriter._get_object_name(" clas...
Convert uri to absolute filepath
def _uri2path(self, uri): ''' Convert uri to absolute filepath Parameters ---------- uri : string URI of python module to return path for Returns ------- path : None or string Returns None if there is no valid path for this URI ...
Convert directory path to uri
def _path2uri(self, dirpath): ''' Convert directory path to uri ''' relpath = dirpath.replace(self.root_path, self.package_name) if relpath.startswith(os.path.sep): relpath = relpath[1:] return relpath.replace(os.path.sep, '.')
Parse module defined in * uri *
def _parse_module(self, uri): ''' Parse module defined in *uri* ''' filename = self._uri2path(uri) if filename is None: # nothing that we could handle here. return ([],[]) f = open(filename, 'rt') functions, classes = self._parse_lines(f) f.close()...
Parse lines of text for functions and classes
def _parse_lines(self, linesource): ''' Parse lines of text for functions and classes ''' functions = [] classes = [] for line in linesource: if line.startswith('def ') and line.count('('): # exclude private stuff name = self._get_object_name(l...
Make autodoc documentation template string for a module
def generate_api_doc(self, uri): '''Make autodoc documentation template string for a module Parameters ---------- uri : string python location of module - e.g 'sphinx.builder' Returns ------- S : string Contents of API doc ''' ...
Returns True if * matchstr * does not match patterns
def _survives_exclude(self, matchstr, match_type): ''' Returns True if *matchstr* does not match patterns ``self.package_name`` removed from front of string if present Examples -------- >>> dw = ApiDocWriter('sphinx') >>> dw._survives_exclude('sphinx.okpkg', 'package') ...
Return module sequence discovered from self. package_name
def discover_modules(self): ''' Return module sequence discovered from ``self.package_name`` Parameters ---------- None Returns ------- mods : sequence Sequence of module names within ``self.package_name`` Examples -------- ...
Generate API reST files.
def write_api_docs(self, outdir): """Generate API reST files. Parameters ---------- outdir : string Directory name in which to store files We create automatic filenames for each module Returns ------- None Notes ...
Make a reST API index file from written files
def write_index(self, outdir, froot='gen', relative_to=None): """Make a reST API index file from written files Parameters ---------- path : string Filename to write index to outdir : string Directory to which to write generated index file froot : ...
Main program.
def main(argv): """ Main program. @return: none """ global g_script_name g_script_name = os.path.basename(argv[0]) parse_config_file() parse_args(argv) url = 'https://0xdata.atlassian.net/rest/api/2/search?jql=sprint="' + urllib.quote(g_sprint) + '"&maxResults=1000' r = reques...
Convert this confusion matrix into a 2x2 plain list of values.
def to_list(self): """Convert this confusion matrix into a 2x2 plain list of values.""" return [[int(self.table.cell_values[0][1]), int(self.table.cell_values[0][2])], [int(self.table.cell_values[1][1]), int(self.table.cell_values[1][2])]]
Read confusion matrices from the list of sources ( ? ).
def read_cms(cms=None, domains=None): """Read confusion matrices from the list of sources (?).""" assert_is_type(cms, [list]) return [ConfusionMatrix(cm, domains) for cm in cms]
Load java messages that can be ignored pickle file into a dict structure g_ok_java_messages.
def load_dict(): """ Load java messages that can be ignored pickle file into a dict structure g_ok_java_messages. :return: none """ global g_load_java_message_filename global g_ok_java_messages if os.path.isfile(g_load_java_message_filename): # only load dict from file if it ex...
Add new java messages to ignore from user text file. It first reads in the new java ignored messages from the user text file and generate a dict structure to out of the new java ignored messages. This is achieved by function extract_message_to_dict. Next new java messages will be added to the original ignored java mess...
def add_new_message(): """ Add new java messages to ignore from user text file. It first reads in the new java ignored messages from the user text file and generate a dict structure to out of the new java ignored messages. This is achieved by function extract_message_to_dict. Next, new java messages ...
Remove java messages from ignored list if users desired it. It first reads in the java ignored messages from user stored in g_old_messages_to_remove and build a dict structure ( old_message_dict ) out of it. Next it removes the java messages contained in old_message_dict from g_ok_java_messages.: return: none
def remove_old_message(): """ Remove java messages from ignored list if users desired it. It first reads in the java ignored messages from user stored in g_old_messages_to_remove and build a dict structure (old_message_dict) out of it. Next, it removes the java messages contained in old_message_dict f...
Update the g_ok_java_messages dict structure by 1. add the new java ignored messages stored in message_dict if action == 1 2. remove the java ignored messages stired in message_dict if action == 2.
def update_message_dict(message_dict,action): """ Update the g_ok_java_messages dict structure by 1. add the new java ignored messages stored in message_dict if action == 1 2. remove the java ignored messages stired in message_dict if action == 2. Parameters ---------- message_dict : Pyth...
Read in a text file that java messages to be ignored and generate a dictionary structure out of it with key and value pairs. The keys are test names and the values are lists of java message strings associated with that test name where we are either going to add to the existing java messages to ignore or remove them fro...
def extract_message_to_dict(filename): """ Read in a text file that java messages to be ignored and generate a dictionary structure out of it with key and value pairs. The keys are test names and the values are lists of java message strings associated with that test name where we are either going to ad...
Add new key val ( ignored java message ) to dict message_dict.
def add_to_dict(val,key,message_dict): """ Add new key, val (ignored java message) to dict message_dict. Parameters ---------- val : Str contains ignored java messages. key : Str key for the ignored java messages. It can be "general" or any R or Python unit test names...
Save the ignored java message dict stored in g_ok_java_messages into a pickle file for future use.
def save_dict(): """ Save the ignored java message dict stored in g_ok_java_messages into a pickle file for future use. :return: none """ global g_ok_java_messages global g_save_java_message_filename global g_dict_changed if g_dict_changed: with open(g_save_java_message_filenam...
Write the java ignored messages in g_ok_java_messages into a text file for humans to read.
def print_dict(): """ Write the java ignored messages in g_ok_java_messages into a text file for humans to read. :return: none """ global g_ok_java_messages global g_java_messages_to_ignore_text_filename allKeys = sorted(g_ok_java_messages.keys()) with open(g_java_messages_to_ignore_t...
Parse user inputs and set the corresponing global variables to perform the necessary tasks.
def parse_args(argv): """ Parse user inputs and set the corresponing global variables to perform the necessary tasks. Parameters ---------- argv : string array contains flags and input options from users :return: """ global g_new_messages_to_exclude global g_old_messag...
Illustrate what the various input flags are and the options should be.
def usage(): """ Illustrate what the various input flags are and the options should be. :return: none """ global g_script_name # name of the script being run. print("") print("Usage: " + g_script_name + " [...options...]") print("") print(" --help print out this help menu a...
Main program.
def main(argv): """ Main program. @return: none """ global g_script_name global g_test_root_dir global g_new_messages_to_exclude global g_old_messages_to_remove global g_load_java_message_filename global g_save_java_message_filename global g_print_java_messages global g_...
Find all python files in the given directory and all subfolders.
def locate_files(root_dir): """Find all python files in the given directory and all subfolders.""" all_files = [] root_dir = os.path.abspath(root_dir) for dir_name, subdirs, files in os.walk(root_dir): for f in files: if f.endswith(".py"): all_files.append(os.path.joi...
Search the file for any magic incantations.
def find_magic_in_file(filename): """ Search the file for any magic incantations. :param filename: file to search :returns: a tuple containing the spell and then maybe some extra words (or None if no magic present) """ with open(filename, "rt", encoding="utf-8") as f: for line in f: ...
Parse file into chunks/ objects.
def parse_python_file(filename): """Parse file into chunks / objects.""" with open(filename, "rt", encoding="utf-8") as f: tokens = list(tokenize.generate_tokens(f.readline)) tokens = normalize_tokens(tokens) module = ChunkCode(tokens, 0, len(tokens)) module.parse() pr...
Executed when script is run as - is.
def main(): """Executed when script is run as-is.""" # magic_files = {} for filename in locate_files(ROOT_DIR): print("Processing %s" % filename) with open(filename, "rt") as f: tokens = list(tokenize.generate_tokens(f.readline)) text1 = tokenize.untokenize(tokens) ...
Returns H2OPCA object which implements fit and transform method to be used in sklearn. Pipeline properly. All parameters defined in self. __params should be input parameters in H2OPCA. __init__ method.
def init_for_pipeline(self): """ Returns H2OPCA object which implements fit and transform method to be used in sklearn.Pipeline properly. All parameters defined in self.__params, should be input parameters in H2OPCA.__init__ method. :returns: H2OPCA object """ import ins...
Transform H2OFrame using a MOJO Pipeline.
def transform(self, data, allow_timestamps=False): """ Transform H2OFrame using a MOJO Pipeline. :param data: Frame to be transformed. :param allow_timestamps: Allows datetime columns to be used directly with MOJO pipelines. It is recommended to parse your datetime columns as St...
This function will look at the local directory and pick out files that have the correct start name and summarize the results into one giant dict.
def summarizeFailedRuns(): """ This function will look at the local directory and pick out files that have the correct start name and summarize the results into one giant dict. :return: None """ global g_summary_dict_all onlyFiles = [x for x in listdir(g_test_root_dir) if isfile(join(g_tes...
This function will print out the intermittents onto the screen for casual viewing. It will also print out where the giant summary dictionary is going to be stored.
def extractPrintSaveIntermittens(): """ This function will print out the intermittents onto the screen for casual viewing. It will also print out where the giant summary dictionary is going to be stored. :return: None """ # extract intermittents from collected failed tests global g_summary...
Main program. Expect script name plus inputs in the following order: - This script name 1. threshold: integer that will denote when a failed test will be declared an intermittent 2. string denote filename of where our final dict structure will be stored. 3. string that denote the beginning of a file containing failed t...
def main(argv): """ Main program. Expect script name plus inputs in the following order: - This script name 1. threshold: integer that will denote when a failed test will be declared an intermittent 2. string denote filename of where our final dict structure will be stored. 3. string that deno...
Display a short summary of the metrics.
def show(self): """Display a short summary of the metrics.""" if self._metric_json==None: print("WARNING: Model metrics cannot be calculated and metric_json is empty due to the absence of the response column in your dataset.") return metric_type = self._metric_json['__met...
: param thresholds: thresholds parameter must be a list ( i. e. [ 0. 01 0. 5 0. 99 ] ). If None then the thresholds in this set of metrics will be used.: returns: mean per class error.
def mean_per_class_error(self, thresholds=None): """ :param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used. :returns: mean per class error. """ return [[x[0], 1 - x[1]] for x i...
: param str metric: The desired metric.: param thresholds: thresholds parameter must be a list ( i. e. [ 0. 01 0. 5 0. 99 ] ). If None then the thresholds in this set of metrics will be used.: returns: The set of metrics for the list of thresholds.
def metric(self, metric, thresholds=None): """ :param str metric: The desired metric. :param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used. :returns: The set of metrics for the list o...
Produce the desired metric plot.
def plot(self, type="roc", server=False): """ Produce the desired metric plot. :param type: the type of metric plot (currently, only ROC supported). :param server: if True, generate plot inline using matplotlib's "Agg" backend. :returns: None """ # TODO: add more...
Get the confusion matrix for the specified metric
def confusion_matrix(self, metrics=None, thresholds=None): """ Get the confusion matrix for the specified metric :param metrics: A string (or list of strings) among metrics listed in :const:`max_metrics`. Defaults to 'f1'. :param thresholds: A value (or list of values) between 0 and 1. ...
: param metrics: A string among the metrics listed in: const: max_metrics.: returns: the threshold at which the given metric is maximal.
def find_threshold_by_max_metric(self, metric): """ :param metrics: A string among the metrics listed in :const:`max_metrics`. :returns: the threshold at which the given metric is maximal. """ crit2d = self._metric_json['max_criteria_and_metric_scores'] for e in crit2d.c...
Retrieve the index in this metric s threshold list at which the given threshold is located.
def find_idx_by_threshold(self, threshold): """ Retrieve the index in this metric's threshold list at which the given threshold is located. :param threshold: Find the index of this input threshold. :returns: the index :raises ValueError: if no such index can be found. ""...
Generate C# declaration file for a schema.
def generate_schema(class_name, schema): """ Generate C# declaration file for a schema. """ has_map = False for field in schema["fields"]: if field["type"].startswith("Map"): has_map = True superclass = schema["superclass"] if superclass == "Iced": superclass = "Object" yield "...
Returns True if a deep water model can be built or False otherwise.
def available(): """Returns True if a deep water model can be built, or False otherwise.""" builder_json = h2o.api("GET /3/ModelBuilders", data={"algo": "deepwater"}) visibility = builder_json["model_builders"]["deepwater"]["visibility"] if visibility == "Experimental": print...
Grab the console output from Jenkins and save the content into a temp file ( g_temp_filename ). From the saved text file we can grab the names of failed tests.
def get_console_out(url_string): """ Grab the console output from Jenkins and save the content into a temp file (g_temp_filename). From the saved text file, we can grab the names of failed tests. Parameters ---------- url_string : str contains information on the jenkins job whos...
This method will remove data from the summary text file and the dictionary file for tests that occurs before the number of months specified by monthToKeep.
def trim_data_back_to(monthToKeep): """ This method will remove data from the summary text file and the dictionary file for tests that occurs before the number of months specified by monthToKeep. :param monthToKeep: :return: """ global g_failed_tests_info_dict current_time = time.time()...
Main program. Expect script name plus 7 inputs in the following order: - This script name 1. timestamp: time in s 2. jenkins_job_name ( JOB_NAME ) 3. build_id ( BUILD_ID ) 4. git hash ( GIT_COMMIT ) 5. node name ( NODE_NAME ) 6. unit test category ( JUnit PyUnit RUnit Hadoop ) 7. Jenkins URL ( JENKINS_URL ) 8. Text fil...
def main(argv): """ Main program. Expect script name plus 7 inputs in the following order: - This script name 1. timestamp: time in s 2. jenkins_job_name (JOB_NAME) 3. build_id (BUILD_ID) 4. git hash (GIT_COMMIT) 5. node name (NODE_NAME) 6. unit test category (JUnit, PyUnit, RUnit, ...
Entry point for the bindings module. It parses the command line arguments and verifies their correctness.: param language -- name of the target language ( used to show the command - line description ).: param output_dir -- folder where the bindings files will be generated. If the folder does not exist it will be create...
def init(language, output_dir, clear_dir=True): """ Entry point for the bindings module. It parses the command line arguments and verifies their correctness. :param language -- name of the target language (used to show the command-line description). :param output_dir -- folder where the bindings...
Print the provided string { msg } but only when the -- verbose option is on.: param msg String to print.: param pretty If on then pprint () will be used instead of the regular print function.
def vprint(msg, pretty=False): """ Print the provided string {msg}, but only when the --verbose option is on. :param msg String to print. :param pretty If on, then pprint() will be used instead of the regular print function. """ if not config["verbose"]: return if pretty: ...
Helper function that wraps msg to 120 - chars page width. All lines ( except maybe 1st ) will be prefixed with string { indent }. First line is prefixed only if { indent_first } is True.: param msg: string to indent: param indent: string that will be used for indentation: param indent_first: if True then the first line...
def wrap(msg, indent, indent_first=True): """ Helper function that wraps msg to 120-chars page width. All lines (except maybe 1st) will be prefixed with string {indent}. First line is prefixed only if {indent_first} is True. :param msg: string to indent :param indent: string that will be used fo...
Return the list of REST API endpoints. The data is enriched with the following fields: class_name: which back - end class handles this endpoint ( the class is derived from the URL ) ; ischema: input schema object ( input_schema is the schema s name ) oschema: output schema object ( output_schema is the schema s name ) ...
def endpoints(raw=False): """ Return the list of REST API endpoints. The data is enriched with the following fields: class_name: which back-end class handles this endpoint (the class is derived from the URL); ischema: input schema object (input_schema is the schema's name) oschema: output sche...
Return endpoints grouped by the class which handles them.
def endpoint_groups(): """Return endpoints, grouped by the class which handles them.""" groups = defaultdict(list) for e in endpoints(): groups[e["class_name"]].append(e) return groups
Return the list of H₂O schemas.
def schemas(raw=False): """ Return the list of H₂O schemas. :param raw: if True, then the complete response to .../schemas is returned (including the metadata) """ json = _request_or_exit("/3/Metadata/schemas") if raw: return json assert "schemas" in json, "Unexpected result from /3/Metadat...
Returns a dictionary of H₂O schemas indexed by their name.
def schemas_map(add_generics=False): """ Returns a dictionary of H₂O schemas, indexed by their name. """ m = {} for schema in schemas(): if schema["name"].startswith('AutoML'): continue # Generation code doesn't know how to deal with defaults for complex objects yet if schema["name"...
Return the dictionary of H₂O enums retrieved from data in schemas (). For each entry in the dictionary its key is the name of the enum and the value is the set of all enum values.
def enums(): """ Return the dictionary of H₂O enums, retrieved from data in schemas(). For each entry in the dictionary its key is the name of the enum, and the value is the set of all enum values. """ enumset = defaultdict(set) for schema in schemas(): for field in schema["fields"]: ...
Writes content to the given file. The file s directory will be created if needed.: param filename: name of the output file relative to the destination folder provided by the user: param content: iterable ( line - by - line ) that should be written to the file. Either a list or a generator. Each line will be appended wi...
def write_to_file(filename, content): """ Writes content to the given file. The file's directory will be created if needed. :param filename: name of the output file, relative to the "destination folder" provided by the user :param content: iterable (line-by-line) that should be written to the file. ...
Internal function: retrieve and return json data from the provided endpoint or die with an error message if the URL cannot be retrieved.
def _request_or_exit(endpoint): """ Internal function: retrieve and return json data from the provided endpoint, or die with an error message if the URL cannot be retrieved. """ if endpoint[0] == "/": endpoint = endpoint[1:] if endpoint in requests_memo: return requests_memo[endp...
Creates a new Amazon S3 client internally with specified credentials. There are no validations done to the credentials. Incorrect credentials are thus revealed with first S3 import call. secretKeyId Amazon S3 Secret Key ID ( provided by Amazon ) secretAccessKey Amazon S3 Secret Access Key ( provided by Amazon )
def set_s3_credentials(secret_key_id, secret_access_key): """Creates a new Amazon S3 client internally with specified credentials. There are no validations done to the credentials. Incorrect credentials are thus revealed with first S3 import call. secretKeyId Amazon S3 Secret Key ID (provided by Amazon...
Return the resulting H2OFrame containing the result ( s ) of aggregation ( s ) of the group by.
def get_frame(self): """ Return the resulting H2OFrame containing the result(s) of aggregation(s) of the group by. The number of rows denote the number of groups generated by the group by operation. The number of columns depend on the number of aggregations performed, the number of col...
This is a helper function to order all schemas according to their usage. For example if schema A uses schemas B and C then they should be reordered as { B C A }.: param schema: schema object that we are processing right now: param ordered_schemas: an OrderedDict of schemas that were already encountered. This is also th...
def add_schema_to_dependency_array(schema, ordered_schemas, schemas_map): """ This is a helper function to order all schemas according to their usage. For example, if schema A uses schemas B and C, then they should be reordered as {B, C, A}. :param schema: schema object that we are processing right no...
Set site domain and name.
def update_site_forward(apps, schema_editor): """Set site domain and name.""" Site = apps.get_model("sites", "Site") Site.objects.update_or_create( id=settings.SITE_ID, defaults={ "domain": "{{cookiecutter.domain_name}}", "name": "{{cookiecutter.project_name}}", ...
Example: opting out for 50 symbol - long [ a - z ] [ A - Z ] [ 0 - 9 ] string would yield log_2 (( 26 + 26 + 50 ) ^50 ) ~ = 334 bit strength.
def generate_random_string( length, using_digits=False, using_ascii_letters=False, using_punctuation=False ): """ Example: opting out for 50 symbol-long, [a-z][A-Z][0-9] string would yield log_2((26+26+50)^50) ~= 334 bit strength. """ if not using_sysrandom: return None ...
: param self: bot: param text: text of message: param user_ids: list of user_ids for creating group or one user_id for send to one person: param thread_id: thread_id
def send_message(self, text, user_ids, thread_id=None): """ :param self: bot :param text: text of message :param user_ids: list of user_ids for creating group or one user_id for send to one person :param thread_id: thread_id """ user_ids = _get_user_ids(self, user_ids) if not isinstance(...
: param media_id:: param self: bot: param text: text of message: param user_ids: list of user_ids for creating group or one user_id for send to one person: param thread_id: thread_id
def send_media(self, media_id, user_ids, text='', thread_id=None): """ :param media_id: :param self: bot :param text: text of message :param user_ids: list of user_ids for creating group or one user_id for send to one person :param thread_id: thread_id """ user_ids = _get_user_ids(self, ...
: param hashtag: hashtag: param self: bot: param text: text of message: param user_ids: list of user_ids for creating group or one user_id for send to one person: param thread_id: thread_id
def send_hashtag(self, hashtag, user_ids, text='', thread_id=None): """ :param hashtag: hashtag :param self: bot :param text: text of message :param user_ids: list of user_ids for creating group or one user_id for send to one person :param thread_id: thread_id """ user_ids = _get_user_id...
: param profile_user_id: profile_id: param self: bot: param text: text of message: param user_ids: list of user_ids for creating group or one user_id for send to one person: param thread_id: thread_id
def send_profile(self, profile_user_id, user_ids, text='', thread_id=None): """ :param profile_user_id: profile_id :param self: bot :param text: text of message :param user_ids: list of user_ids for creating group or one user_id for send to one person :param thread_id: thread_id """ prof...
Adds the default_data to data and dumps it to a json.
def json_data(self, data=None): """Adds the default_data to data and dumps it to a json.""" if data is None: data = {} data.update(self.default_data) return json.dumps(data)
Input: user_ids - a list of user_id Output: dictionary: user_id - stories data. Basically for each user output the same as after self. get_user_reel
def get_users_reel(self, user_ids): """ Input: user_ids - a list of user_id Output: dictionary: user_id - stories data. Basically, for each user output the same as after self.get_user_reel """ url = 'feed/reels_media/' res = self.send_request( url, ...
Input - the list of reels jsons They can be aquired by using get_users_reel () or get_user_reel () methods
def see_reels(self, reels): """ Input - the list of reels jsons They can be aquired by using get_users_reel() or get_user_reel() methods """ if not isinstance(reels, list): reels = [reels] story_seen = {} now = int(time.time()) for i, ...
Comments last user_id s medias
def comment_user(self, user_id, amount=None): """ Comments last user_id's medias """ if not self.check_user(user_id, filter_closed_acc=True): return False self.logger.info("Going to comment user_%s's feed:" % user_id) user_id = self.convert_to_user_id(user_id) medias = self.get_user_medias(u...
Sleep only if elapsed time since self. last [ key ] < self. delay [ key ].
def delay(self, key): """Sleep only if elapsed time since `self.last[key]` < `self.delay[key]`.""" last_action, target_delay = self.last[key], self.delays[key] elapsed_time = time.time() - last_action if elapsed_time < target_delay: t_remaining = target_delay - elapsed_time ...
Returns login and password stored in secret. txt.
def get_credentials(username=None): """Returns login and password stored in `secret.txt`.""" while not check_secret(): pass while True: try: with open(SECRET_FILE, "r") as f: lines = [line.strip().split(":", 2) for line in f.readlines()] except ValueError:...
Likes last user_id s medias
def like_user(self, user_id, amount=None, filtration=True): """ Likes last user_id's medias """ if filtration: if not self.check_user(user_id): return False self.logger.info("Liking user_%s's feed:" % user_id) user_id = self.convert_to_user_id(user_id) medias = self.get_user_medi...
Likes last medias from hashtag
def like_hashtag(self, hashtag, amount=None): """ Likes last medias from hashtag """ self.logger.info("Going to like media with hashtag #%s." % hashtag) medias = self.get_total_hashtag_medias(hashtag, amount) return self.like_medias(medias)
Filter bot from real users.
def check_not_bot(self, user_id): """ Filter bot from real users. """ self.small_delay() user_id = self.convert_to_user_id(user_id) if not user_id: return False if user_id in self.whitelist: return True if user_id in self.blacklist: return False user_info = self.get_...
Reads list from file. One line - one item. Returns the list if file items.
def read_list_from_file(file_path, quiet=False): """ Reads list from file. One line - one item. Returns the list if file items. """ try: if not check_if_file_exists(file_path, quiet=quiet): return [] with codecs.open(file_path, "r", encoding="utf-8") as f: ...
Gets tweets for a given user via the Twitter frontend API.
def get_tweets(user, pages=25): """Gets tweets for a given user, via the Twitter frontend API.""" url = f'https://twitter.com/i/profiles/show/{user}/timeline/tweets?include_available_features=1&include_entities=1&include_new_items_bar=true' headers = { 'Accept': 'application/json, text/javascript, ...
Add a specific enqueue time to the message.
def schedule(self, schedule_time): """Add a specific enqueue time to the message. :param schedule_time: The scheduled time to enqueue the message. :type schedule_time: ~datetime.datetime """ if not self.properties.message_id: self.properties.message_id = str(uuid.uui...
Complete the message.
def complete(self): """Complete the message. This removes the message from the queue. :raises: ~azure.servicebus.common.errors.MessageAlreadySettled if the message has been settled. :raises: ~azure.servicebus.common.errors.MessageLockExpired if message lock has already expired. ...
Move the message to the Dead Letter queue.
def dead_letter(self, description=None): """Move the message to the Dead Letter queue. The Dead Letter queue is a sub-queue that can be used to store messages that failed to process correctly, or otherwise require further inspection or processing. The queue can also be configured to sen...
Abandon the message.
def abandon(self): """Abandon the message. This message will be returned to the queue to be reprocessed. :raises: ~azure.servicebus.common.errors.MessageAlreadySettled if the message has been settled. :raises: ~azure.servicebus.common.errors.MessageLockExpired if message lock has alrea...