content
stringlengths
1
1.05M
input_ids
listlengths
1
883k
ratio_char_token
float64
1
22.9
token_count
int64
1
883k
from __future__ import absolute_import import six from rest_framework.response import Response from sentry.api.bases.project import ProjectEndpoint from sentry.models import TagKey, TagKeyStatus
[ 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 198, 198, 11748, 2237, 198, 198, 6738, 1334, 62, 30604, 13, 26209, 1330, 18261, 198, 198, 6738, 1908, 563, 13, 15042, 13, 65, 1386, 13, 16302, 1330, 4935, 12915, 4122, 198, 6738, 1908, 5...
3.826923
52
from __future__ import print_function # For Py2/3 compatibility import async_eel import random import asyncio loop = asyncio.get_event_loop() async def print_num(n): """callback of js_random""" print('Got this from Javascript:', n) if __name__ == '__main__': asyncio.run_coroutine_threadsafe(main(), loop) loop.run_forever()
[ 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 197, 2, 1114, 9485, 17, 14, 18, 17764, 198, 11748, 30351, 62, 68, 417, 198, 11748, 4738, 198, 11748, 30351, 952, 628, 198, 26268, 796, 30351, 952, 13, 1136, 62, 15596, 62, 26268, 3419, ...
2.837398
123
# coding=utf-8 """ Access methods for indexing datasets & products. """ import logging from datacube.config import LocalConfig from datacube.drivers import index_driver_by_name, index_drivers from .index import Index _LOG = logging.getLogger(__name__) def index_connect(local_config=None, application_name=None, validate_connection=True): # type: (LocalConfig, str, bool) -> Index """ Create a Data Cube Index that can connect to a PostgreSQL server It contains all the required connection parameters, but doesn't actually check that the server is available. :param application_name: A short, alphanumeric name to identify this application. :param datacube.config.LocalConfig local_config: Config object to use. (optional) :param validate_connection: Validate database connection and schema immediately :rtype: datacube.index.index.Index :raises datacube.drivers.postgres._connections.IndexSetupError: """ if local_config is None: local_config = LocalConfig.find() driver_name = local_config.get('index_driver', 'default') index_driver = index_driver_by_name(driver_name) if not index_driver: raise RuntimeError( "No index driver found for %r. %s available: %s" % ( driver_name, len(index_drivers()), ', '.join(index_drivers()) ) ) return index_driver.connect_to_index(local_config, application_name=application_name, validate_connection=validate_connection)
[ 2, 19617, 28, 40477, 12, 23, 198, 37811, 198, 15457, 5050, 329, 6376, 278, 40522, 1222, 3186, 13, 198, 37811, 198, 198, 11748, 18931, 198, 198, 6738, 4818, 330, 3266, 13, 11250, 1330, 10714, 16934, 198, 6738, 4818, 330, 3266, 13, 3670...
2.753927
573
#!/usr/bin/env python3 # # load_message.py - takes a single email or mbox formatted # file on stdin or in a file and reads it into the database. # import os import sys from optparse import OptionParser from configparser import ConfigParser import psycopg2 from lib.storage import ArchivesParserStorage from lib.mbox import MailboxBreakupParser from lib.exception import IgnorableException from lib.log import log, opstatus from lib.varnish import VarnishPurger if __name__ == "__main__": optparser = OptionParser() optparser.add_option('-l', '--list', dest='list', help='Name of list to load message for') optparser.add_option('-d', '--directory', dest='directory', help='Load all messages in directory') optparser.add_option('-m', '--mbox', dest='mbox', help='Load all messages in mbox') optparser.add_option('-i', '--interactive', dest='interactive', action='store_true', help='Prompt after each message') optparser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Verbose output') optparser.add_option('--force-date', dest='force_date', help='Override date (used for dates that can\'t be parsed)') optparser.add_option('--filter-msgid', dest='filter_msgid', help='Only process message with given msgid') (opt, args) = optparser.parse_args() if (len(args)): print("No bare arguments accepted") optparser.print_usage() sys.exit(1) if not opt.list: print("List must be specified") optparser.print_usage() sys.exit(1) if opt.directory and opt.mbox: print("Can't specify both directory and mbox!") optparser.print_usage() sys.exit(1) if opt.force_date and (opt.directory or opt.mbox) and not opt.filter_msgid: print("Can't use force_date with directory or mbox - only individual messages") optparser.print_usage() sys.exit(1) if opt.filter_msgid and not (opt.directory or opt.mbox): print("filter_msgid makes no sense without directory or mbox!") optparser.print_usage() sys.exit(1) log.set(opt.verbose) cfg = ConfigParser() cfg.read('%s/archives.ini' % os.path.realpath(os.path.dirname(sys.argv[0]))) try: connstr = cfg.get('db', 'connstr') except Exception: connstr = 'need_connstr' conn = psycopg2.connect(connstr) curs = conn.cursor() # Take an advisory lock to force serialization. # We could do this "properly" by reordering operations and using ON CONFLICT, # but concurrency is not that important and this is easier... try: curs.execute("SET statement_timeout='30s'") curs.execute("SELECT pg_advisory_xact_lock(8059944559669076)") except Exception as e: print(("Failed to wait on advisory lock: %s" % e)) sys.exit(1) # Get the listid we're working on curs.execute("SELECT listid FROM lists WHERE listname=%(list)s", { 'list': opt.list }) r = curs.fetchall() if len(r) != 1: log.error("List %s not found" % opt.list) conn.close() sys.exit(1) listid = r[0][0] purges = set() if opt.directory: # Parse all files in directory for x in os.listdir(opt.directory): log.status("Parsing file %s" % x) with open(os.path.join(opt.directory, x)) as f: ap = ArchivesParserStorage() ap.parse(f) if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid): continue try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "directory", os.path.join(opt.directory, x), ap, e) opstatus.failed += 1 continue ap.store(conn, listid) purges.update(ap.purges) if opt.interactive: print("Interactive mode, committing transaction") conn.commit() print("Proceed to next message with Enter, or input a period (.) to stop processing") x = input() if x == '.': print("Ok, aborting!") break print("---------------------------------") elif opt.mbox: if not os.path.isfile(opt.mbox): print("File %s does not exist" % opt.mbox) sys.exit(1) mboxparser = MailboxBreakupParser(opt.mbox) while not mboxparser.EOF: ap = ArchivesParserStorage() msg = next(mboxparser) if not msg: break ap.parse(msg) if opt.filter_msgid and not ap.is_msgid(opt.filter_msgid): continue try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "mbox", opt.mbox, ap, e) opstatus.failed += 1 continue ap.store(conn, listid) purges.update(ap.purges) if mboxparser.returncode(): log.error("Failed to parse mbox:") log.error(mboxparser.stderr_output()) sys.exit(1) else: # Parse single message on stdin ap = ArchivesParserStorage() ap.parse(sys.stdin.buffer) try: ap.analyze(date_override=opt.force_date) except IgnorableException as e: log_failed_message(listid, "stdin", "", ap, e) conn.close() sys.exit(1) ap.store(conn, listid) purges.update(ap.purges) if opstatus.stored: log.log("Stored message with message-id %s" % ap.msgid) conn.commit() conn.close() opstatus.print_status() VarnishPurger(cfg).purge(purges)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 198, 2, 3440, 62, 20500, 13, 9078, 532, 2753, 257, 2060, 3053, 393, 285, 3524, 39559, 198, 2, 2393, 319, 14367, 259, 393, 287, 257, 2393, 290, 9743, 340, 656, 262, 6831, 13,...
2.205099
2,667
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-03-10 14:30 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 2980, 515, 416, 37770, 352, 13, 1157, 13, 1959, 319, 12131, 12, 3070, 12, 940, 1478, 25, 1270, 198, 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198,...
2.753623
69
from dataset.baseset import BaseSet import random, cv2 import numpy as np
[ 6738, 27039, 13, 65, 1386, 316, 1330, 7308, 7248, 201, 198, 11748, 4738, 11, 269, 85, 17, 201, 198, 11748, 299, 32152, 355, 45941, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198, 201, 198,...
1.980769
52
# -*- coding: utf-8 -*- import mock from nose.tools import * # noqa (PEP8 asserts) import hmac import hashlib from StringIO import StringIO from django.core.exceptions import ValidationError from django.db import IntegrityError import furl from framework.auth import get_or_create_user from framework.auth.core import Auth from osf.models import OSFUser, AbstractNode from addons.wiki.models import WikiVersion from osf.exceptions import BlacklistedEmailError from website import settings from website.conferences import views from website.conferences import utils, message from website.util import api_url_for, web_url_for from tests.base import OsfTestCase, fake from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory class TestProvisionNode(ContextTestCase):
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 11748, 15290, 198, 6738, 9686, 13, 31391, 1330, 1635, 220, 1303, 645, 20402, 357, 47, 8905, 23, 29348, 8, 198, 198, 11748, 289, 20285, 198, 11748, 12234, 8019, 198,...
3.553097
226
import socketserver import socket import sys import threading import json import queue import time import datetime import traceback def run(config, handlers): server = Server(handlers) server.configure(config) return server
[ 11748, 37037, 18497, 198, 11748, 17802, 198, 11748, 25064, 198, 11748, 4704, 278, 198, 11748, 33918, 198, 11748, 16834, 198, 11748, 640, 198, 11748, 4818, 8079, 198, 11748, 12854, 1891, 198, 220, 220, 220, 220, 220, 220, 220, 220, 198, ...
3.194805
77
# -*- coding: ascii -*- import sys import json # handles a zoom firmware if __name__ == "__main__": if len(sys.argv) == 2: f = open(sys.argv[1], "rb") data = f.read() f.close() check(data)
[ 2, 532, 9, 12, 19617, 25, 355, 979, 72, 532, 9, 12, 201, 198, 11748, 25064, 201, 198, 11748, 33918, 201, 198, 201, 198, 201, 198, 2, 17105, 257, 19792, 18779, 201, 198, 361, 11593, 3672, 834, 6624, 366, 834, 12417, 834, 1298, 201,...
1.897638
127
# 6 a-zA-Z0-9 import random source = '' lower_char = [chr(x) for x in range(ord('a'), ord('z') + 1)] upper_char = [chr(x) for x in range(ord('A'), ord('Z') + 1)] number_char = [chr(x) for x in range(ord('0'), ord('9') + 1)] source += "".join(lower_char) source += "".join(upper_char) source += "".join(number_char) source += "_" print(source) # 20 while True: s = "".join(random.sample(source, 20)) if '_' in s: print(s) break
[ 2, 718, 257, 12, 89, 32, 12, 57, 15, 12, 24, 198, 11748, 4738, 198, 198, 10459, 796, 10148, 198, 21037, 62, 10641, 796, 685, 354, 81, 7, 87, 8, 329, 2124, 287, 2837, 7, 585, 10786, 64, 33809, 2760, 10786, 89, 11537, 1343, 352, ...
2.242574
202
from django.test import TestCase from rest_framework.test import APIRequestFactory from .models import GBIC, GBICType from .views import GBICListViewSet # Create your tests here.
[ 6738, 42625, 14208, 13, 9288, 1330, 6208, 20448, 198, 6738, 1334, 62, 30604, 13, 9288, 1330, 7824, 18453, 22810, 198, 6738, 764, 27530, 1330, 13124, 2149, 11, 13124, 2149, 6030, 198, 6738, 764, 33571, 1330, 13124, 2149, 8053, 7680, 7248, ...
3.693878
49
import inspect from typing import List, Union, Set, Any import numpy as np from fruits.cache import Cache, CoquantileCache from fruits.scope import force_input_shape, FitTransform from fruits.core.callback import AbstractCallback from fruits.signature.iss import SignatureCalculator, CachePlan from fruits.words.word import Word from fruits.sieving.abstract import FeatureSieve from fruits.preparation.abstract import DataPreparateur def branch(self, index: int = None): """Returns the currently selected branch or the branch with the given index. :rtype: FruitBranch """ if index is None: return self._branches[self._cbi] return self._branches[index] def branches(self) -> list: """Returns all branches of this Fruit object. :rtype: list """ return self._branches def switch_branch(self, index: int): """Switches to the branch with the given index. :param index: Integer in ``[0, 1, ..., len(self.branches())-1]`` :type index: int """ if not (0 <= index < len(self._branches)): raise IndexError("Index has to be in [0, len(self.branches()))") self._cbi = index def add(self, *objects: Union[FitTransform, Word, type]): """Adds one or multiple object(s) to the currently selected branch. :param objects: One or more objects of the following types: - :class:`~fruits.preparation.abstract.DataPreparateur` - :class:`~fruits.words.word.Word` - :class:`~fruits.sieving.abstract.FeatureSieve` :type objects: Union[FitTransform, Word] """ if len(self._branches) == 0: self.fork() self._branches[self._cbi].add(*objects) self._fitted = False def nfeatures(self) -> int: """Returns the total number of features of all branches combined. :rtype: int """ return sum([branch.nfeatures() for branch in self._branches]) def configure(self, **kwargs: Any): """Makes changes to the default configuration of a all branches if arguments differ from ``None``. :param kwargs: For possible options, have a look at :meth:`fruits.core.fruit.FruitBranch.configure`. :type kwargs: Any """ for branch in self._branches: branch.configure(**kwargs) def fit(self, X: np.ndarray): """Fits all branches to the given data. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray """ for branch in self._branches: branch.fit(X) self._fitted = True def transform(self, X: np.ndarray, callbacks: List[AbstractCallback] = []) -> np.ndarray: """Returns a two dimensional array of all features from all branches this Fruit object contains. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :param callbacks: List of callbacks. To write your own callback, override the class :class:`~fruits.core.callback.AbstractCallback`., defaults to None :type callbacks: List[AbstractCallback], optional :rtype: np.ndarray :raises: RuntimeError if Fruit.fit wasn't called """ if not self._fitted: raise RuntimeError("Missing call of self.fit") result = np.zeros((X.shape[0], self.nfeatures())) index = 0 for branch in self._branches: for callback in callbacks: callback.on_next_branch() k = branch.nfeatures() result[:, index:index+k] = branch.transform(X, callbacks) index += k result = np.nan_to_num(result, copy=False, nan=0.0) return result def fit_transform(self, X: np.ndarray) -> np.ndarray: """Fits all branches to the given dataset and returns the transformed results of X from all branches. :param X: (Multidimensional) time series dataset :type X: np.ndarray :returns: Two dimensional feature array :rtype: np.ndarray """ self.fit(X) return self.transform(X) def summary(self) -> str: """Returns a summary of this object. The summary contains a summary for each FruitBranch in this Fruit object. :rtype: str """ summary = "{:=^80}".format(f"Summary of fruits.Fruit: '{self.name}'") summary += f"\nBranches: {len(self.branches())}" summary += f"\nFeatures: {self.nfeatures()}" for branch in self.branches(): summary += "\n\n" + branch.summary() summary += "\n{:=^80}".format(f"End of Summary") return summary def copy(self) -> "Fruit": """Creates a shallow copy of this Fruit object. This also creates shallow copies of all branches in this object. :rtype: Fruit """ copy_ = Fruit(self.name+" (Copy)") for branch in self._branches: copy_.fork(branch.copy()) return copy_ def deepcopy(self) -> "Fruit": """Creates a deep copy of this Fruit object. This also creates deep copies of all branches in this object. :rtype: Fruit """ copy_ = Fruit(self.name+" (Copy)") for branch in self._branches: copy_.fork(branch.deepcopy()) return copy_ class FruitBranch: """One branch of a Fruit object. A FruitBranch object extracts values from time series data that are somehow representative of the input data. The user can customize any of the following three steps. - Preparing data: Apply functions at the start of the extraction procedure. There are many so called :class:`~fruits.preparation.abstract.DataPreparateur` objects in fruits available for preprocessing. The preparateurs will be applied sequentially to the input data. - Calculating Iterated Sums: The preprocessed data is now used to calculate the iterated sums signature for different :class:`~fruits.words.word.Word` objects the user can specify. - Extracting Features: Each :class:`~fruits.sieving.abstract.FeatureSieve` added to the branch will be fitted on the iterated sums from the previous step. The branch then returns an array of numbers (the transformed results from those sieves), i.e. the features for each time series. """ def configure(self, mode: str = None, batch_size: int = None, fit_sample_size: Union[float, int] = None): """Makes changes to the default configuration of a fruit branch if arguments differ from ``None``. :param mode: See :meth:`fruits.signature.iss.SignatureCalculator.transform`, defaults to None :type mode: str, optional :param batch_size: See :meth:`~ruits.signature.iss.SignatureCalculator.transform`, defaults to None :type batch_size: int, optional :param fit_sample_size: Size of the random time series sample that is used for fitting. This is represented as a float which will be multiplied by ``X.shape[0]`` or ``1`` for one random time series., defaults to 1 :type fit_sample_size: Union[float, int] """ if mode is not None: self._calculator_options["mode"] = mode if batch_size is not None: self._calculator_options["batch_size"] = batch_size if fit_sample_size is not None: self._fit_sample_size = fit_sample_size def add_preparateur(self, preparateur: DataPreparateur): """Adds a preparateur to the branch. :type preparateur: DataPreparateur """ if not isinstance(preparateur, DataPreparateur): raise TypeError self._preparateurs.append(preparateur) self._fitted = False def get_preparateurs(self) -> List[DataPreparateur]: """Returns a list of all preparateurs added to the branch. :rtype: List[DataPreparateur] """ return self._preparateurs def clear_preparateurs(self): """Removes all preparateurs that were added to this branch.""" self._preparateurs = [] self._fitted = False def add_word(self, word: Word): """Adds a word to the branch. :type word: Word """ if not isinstance(word, Word): raise TypeError self._words.append(word) self._fitted = False def get_words(self) -> List[Word]: """Returns a list of all words in the branch. :rtype: List[Word] """ return self._words def clear_words(self): """Removes all words that were added to this branch.""" self._words = [] self._sieves_extended = [] self._fitted = False def add_sieve(self, sieve: FeatureSieve): """Appends a new feature sieve to the FruitBranch. :type sieve: FeatureSieve """ if not isinstance(sieve, FeatureSieve): raise TypeError self._sieves.append(sieve) self._fitted = False def get_sieves(self) -> List[FeatureSieve]: """Returns a list of all feature sieves added to the branch. :rtype: List[FeatureSieve] """ return self._sieves def clear_sieves(self): """Removes all feature sieves that were added to this branch.""" self._sieves = [] self._sieve_prerequisites = None self._sieves_extended = [] self._fitted = False def add(self, *objects: Union[FitTransform, Word, type]): """Adds one or multiple object(s) to the branch. :type objects: One or more objects of the following types: - :class:`~fruits.preparation.abstract.DataPreparateur` - :class:`~fruits.words.word.Word` - :class:`~fruits.sieving.abstract.FeatureSieve` """ objects_flattened = np.array(objects, dtype=object).flatten() for obj in objects_flattened: if inspect.isclass(obj): obj = obj() if isinstance(obj, DataPreparateur): self.add_preparateur(obj) elif isinstance(obj, Word): self.add_word(obj) elif isinstance(obj, FeatureSieve): self.add_sieve(obj) else: raise TypeError("Cannot add variable of type"+str(type(obj))) def clear(self): """Clears all settings, configurations and calculated results the branch has. After the branch is cleared, it has the same settings as a newly created FruitBranch object. """ self.clear_preparateurs() self.clear_words() self.clear_sieves() self._calculator_options = {"batch_size": 1, "mode": "single"} def nfeatures(self) -> int: """Returns the total number of features the current configuration produces. :rtype: int """ if self._calculator_options["mode"] == "extended": return ( sum([s.nfeatures() for s in self._sieves]) * CachePlan(self._words).n_iterated_sums( list(range(len(self._words))) ) ) else: return ( sum([s.nfeatures() for s in self._sieves]) * len(self._words) ) def fit(self, X: np.ndarray): """Fits the branch to the given dataset. What this action explicitly does depends on the FruitBranch configuration. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray """ self._compile() self._get_cache(X) prepared_data = self._select_fit_sample(X) for prep in self._preparateurs: prep.fit(prepared_data) prepared_data = prep.transform(prepared_data, cache=self._cache) self._sieves_extended = [] iss_calculations = SignatureCalculator().transform( prepared_data, words=self._words, **self._calculator_options )[0] for iterated_data in iss_calculations: iterated_data = iterated_data.reshape(iterated_data.shape[0] * iterated_data.shape[1], iterated_data.shape[2]) sieves_copy = [sieve.copy() for sieve in self._sieves] for sieve in sieves_copy: sieve.fit(iterated_data[:, :]) self._sieves_extended.append(sieves_copy) self._fitted = True def transform(self, X: np.ndarray, callbacks: List[AbstractCallback] = []) -> np.ndarray: """Transforms the given time series dataset. The results are the calculated features for the different time series. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at :meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :param callbacks: List of callbacks. To write your own callback, override the class :class:`~fruits.core.callback.AbstractCallback`., defaults to [] :type callbacks: List[AbstractCallback], optional :rtype: np.ndarray :raises: RuntimeError if ``self.fit`` wasn't called """ if not self._fitted: raise RuntimeError("Missing call of self.fit") self._get_cache(X) prepared_data = force_input_shape(X) for prep in self._preparateurs: prepared_data = prep.transform(prepared_data, cache=self._cache) for callback in callbacks: callback.on_preparateur(prepared_data) for callback in callbacks: callback.on_preparation_end(prepared_data) sieved_data = np.zeros((prepared_data.shape[0], self.nfeatures())) k = 0 iss_calculations = SignatureCalculator().transform( prepared_data, words=self._words, **self._calculator_options )[0] for i, iterated_data in enumerate(iss_calculations): for callback in callbacks: callback.on_iterated_sum(iterated_data) for sieve in self._sieves_extended[i]: nf = sieve.nfeatures() new_features = nf * iterated_data.shape[1] for it in range(iterated_data.shape[1]): sieved_data[:, k+it*nf:k+(it+1)*nf] = sieve.transform( iterated_data[:, it, :], cache=self._cache, ) for callback in callbacks: callback.on_sieve(sieved_data[k:k+new_features]) k += new_features for callback in callbacks: callback.on_sieving_end(sieved_data) return sieved_data def fit_transform(self, X: np.ndarray) -> np.ndarray: """This function does the same that calling ``self.fit(X)`` and ``self.transform(X)`` consecutively does. :param X: (Multidimensional) time series dataset as an array of three dimensions. Have a look at `:meth:`~fruits.scope.force_input_shape`. :type X: np.ndarray :returns: Array of features. :rtype: np.ndarray """ self.fit(X) return self.transform(X) def summary(self) -> str: """Returns a summary of this object. The summary contains all added preparateurs, words and sieves. :rtype: str """ summary = "{:-^80}".format("fruits.FruitBranch") summary += f"\nNumber of features: {self.nfeatures()}" summary += f"\n\nPreparateurs ({len(self._preparateurs)}): " if len(self._preparateurs) == 0: summary += "-" else: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._preparateurs]) summary += f"\nIterators ({len(self._words)}): " if len(self._words) == 0: summary += "-" elif len(self._words) > 10: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._words[:9]]) summary += "\n\t..." else: summary += "\n\t+ " + \ "\n\t+ ".join([str(x) for x in self._words]) summary += f"\nSieves ({len(self._sieves)}): " if len(self._sieves) == 0: summary += "-" else: for x in self._sieves: lines = x.summary().split("\n") summary += "\n\t+ " + lines[0] summary += "\n\t " summary += "\n\t ".join(lines[1:]) return summary def copy(self) -> "FruitBranch": """Returns a shallow copy of this FruitBranch object. :returns: Copy of the branch with same settings but all calculations done erased. :rtype: FruitBranch """ copy_ = FruitBranch() for preparateur in self._preparateurs: copy_.add(preparateur) for iterator in self._words: copy_.add(iterator) for sieve in self._sieves: copy_.add(sieve) return copy_ def deepcopy(self) -> "FruitBranch": """Returns a deep copy of this FruitBranch object. :returns: Deepcopy of the branch with same settings but all calculations done erased. :rtype: FruitBranch """ copy_ = FruitBranch() for preparateur in self._preparateurs: copy_.add(preparateur.copy()) for iterator in self._words: copy_.add(iterator.copy()) for sieve in self._sieves: copy_.add(sieve.copy()) copy_._calculator_options = self._calculator_options.copy() return copy_
[ 11748, 10104, 198, 6738, 19720, 1330, 7343, 11, 4479, 11, 5345, 11, 4377, 198, 198, 11748, 299, 32152, 355, 45941, 198, 198, 6738, 15921, 13, 23870, 1330, 34088, 11, 1766, 40972, 576, 30562, 198, 6738, 15921, 13, 29982, 1330, 2700, 62, ...
2.217981
8,409
import os import argparse import subprocess import socket import sys import click from django.core.management import execute_from_command_line from workoutizer.settings import WORKOUTIZER_DIR, WORKOUTIZER_DB_PATH, TRACKS_DIR from workoutizer import __version__ BASE_DIR = os.path.dirname(os.path.dirname(__file__)) SETUP_DIR = os.path.join(BASE_DIR, 'setup') os.environ["DJANGO_SETTINGS_MODULE"] = "workoutizer.settings" example_rpi_cmd = "wkz --setup_rpi vendor_id=091e product_id=4b48" url_help = 'specify ip address and port pair, like: address:port' cli.add_command(upgrade) cli.add_command(version) cli.add_command(init) cli.add_command(setup_rpi) cli.add_command(run) cli.add_command(manage) cli.add_command(wkz_as_service) if __name__ == '__main__': cli()
[ 11748, 28686, 198, 11748, 1822, 29572, 198, 11748, 850, 14681, 198, 11748, 17802, 198, 11748, 25064, 198, 198, 11748, 3904, 198, 6738, 42625, 14208, 13, 7295, 13, 27604, 1330, 12260, 62, 6738, 62, 21812, 62, 1370, 198, 198, 6738, 17578, ...
2.661074
298
"""Provide trimming of input reads from Fastq or BAM files. """ import os import sys import tempfile from bcbio.utils import (file_exists, safe_makedir, replace_suffix, append_stem, is_pair, replace_directory, map_wrap) from bcbio.log import logger from bcbio.bam import fastq from bcbio.provenance import do from Bio.Seq import Seq from itertools import izip, repeat from bcbio.distributed.transaction import file_transaction from bcbio.pipeline import config_utils SUPPORTED_ADAPTERS = { "illumina": ["AACACTCTTTCCCT", "AGATCGGAAGAGCG"], "truseq": ["AGATCGGAAGAG"], "polya": ["AAAAAAAAAAAAA"], "nextera": ["AATGATACGGCGA", "CAAGCAGAAGACG"]} QUALITY_FLAGS = {5: ['"E"', '"&"'], 20: ['"T"', '"5"']} def trim_read_through(fastq_files, dirs, lane_config): """ for small insert sizes, the read length can be longer than the insert resulting in the reverse complement of the 3' adapter being sequenced. this takes adapter sequences and trims the only the reverse complement of the adapter MYSEQUENCEAAAARETPADA -> MYSEQUENCEAAAA (no polyA trim) """ quality_format = _get_quality_format(lane_config) to_trim = _get_sequences_to_trim(lane_config) out_files = _get_read_through_trimmed_outfiles(fastq_files, dirs) fixed_files = append_stem(out_files, ".fixed") if all(map(file_exists, fixed_files)): return fixed_files logger.info("Trimming %s from the 3' end of reads in %s using " "cutadapt." % (", ".join(to_trim), ", ".join(fastq_files))) cores = lane_config["algorithm"].get("num_cores", 1) out_files = _cutadapt_trim(fastq_files, quality_format, to_trim, out_files, cores) fixed_files = remove_short_reads(out_files, dirs, lane_config) return fixed_files def remove_short_reads(fastq_files, dirs, lane_config): """ remove reads from a single or pair of fastq files which fall below a length threshold (30 bases) """ min_length = int(lane_config["algorithm"].get("min_read_length", 20)) supplied_quality_format = _get_quality_format(lane_config) if supplied_quality_format == "illumina": quality_format = "fastq-illumina" else: quality_format = "fastq-sanger" if is_pair(fastq_files): fastq1, fastq2 = fastq_files out_files = fastq.filter_reads_by_length(fastq1, fastq2, quality_format, min_length) else: out_files = [fastq.filter_single_reads_by_length(fastq_files[0], quality_format, min_length)] map(os.remove, fastq_files) return out_files def _cutadapt_trim(fastq_files, quality_format, adapters, out_files, cores): """Trimming with cutadapt, using version installed with bcbio-nextgen. Uses the system executable to find the version next to our Anaconda Python. TODO: Could we use cutadapt as a library to avoid this? """ if quality_format == "illumina": quality_base = "64" else: quality_base = "33" # --times=2 tries twice remove adapters which will allow things like: # realsequenceAAAAAAadapter to remove both the poly-A and the adapter # this behavior might not be what we want; we could also do two or # more passes of cutadapt cutadapt = os.path.join(os.path.dirname(sys.executable), "cutadapt") base_cmd = [cutadapt, "--times=" + "2", "--quality-base=" + quality_base, "--quality-cutoff=5", "--format=fastq", "--minimum-length=0"] adapter_cmd = map(lambda x: "--adapter=" + x, adapters) base_cmd.extend(adapter_cmd) if all(map(file_exists, out_files)): return out_files with file_transaction(out_files) as tmp_out_files: if isinstance(tmp_out_files, basestring): tmp_out_files = [tmp_out_files] map(_run_cutadapt_on_single_file, izip(repeat(base_cmd), fastq_files, tmp_out_files)) return out_files
[ 37811, 15946, 485, 15797, 2229, 286, 5128, 9743, 422, 12549, 80, 393, 347, 2390, 3696, 13, 198, 37811, 198, 11748, 28686, 198, 11748, 25064, 198, 11748, 20218, 7753, 198, 198, 6738, 275, 21101, 952, 13, 26791, 1330, 357, 7753, 62, 1069,...
2.372174
1,725
from rest_framework.response import Response from rest_framework.views import APIView from django.shortcuts import get_object_or_404 from dashboard.models import projects from .models import AnalysisConfig, SolverResults, SolverProgress, DockerLogs from rest_framework.parsers import FormParser, JSONParser, MultiPartParser, FileUploadParser from rest_framework import status import docker import os import json from zipfile import ZipFile from django.http import HttpResponse from threading import Thread from time import sleep from datetime import datetime def streamDockerLog(container, project): for line in container.logs(stream=True): logs = get_object_or_404(DockerLogs, project=project) now = datetime.now() current_time = now.strftime("[%H:%M:%S]: ") logs.log = current_time + str(line.strip(), 'utf-8') + "\n" + logs.log logs.save()
[ 6738, 1334, 62, 30604, 13, 26209, 1330, 18261, 198, 6738, 1334, 62, 30604, 13, 33571, 1330, 3486, 3824, 769, 198, 6738, 42625, 14208, 13, 19509, 23779, 1330, 651, 62, 15252, 62, 273, 62, 26429, 198, 6738, 30415, 13, 27530, 1330, 4493, ...
3.147887
284
# coding: utf-8 """``AppFS`` opener definition. """ from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from .base import Opener from .errors import OpenerError from ..subfs import ClosingSubFS from .. import appfs
[ 2, 19617, 25, 3384, 69, 12, 23, 198, 37811, 15506, 4677, 10652, 15506, 21996, 6770, 13, 198, 37811, 198, 198, 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 198, 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 198, 6738, 11593, 37443, ...
3.5375
80
import numpy as np from sklearn.metrics import fbeta_score, roc_curve, auc, confusion_matrix from sklearn.decomposition import PCA from sklearn import random_projection from sklearn import svm from sklearn.ensemble import IsolationForest import matplotlib.pyplot as plt from keras.layers import Dense, Input, Dropout from keras.models import Model from keras import regularizers from keras.models import Sequential from keras.optimizers import Adam from keras.regularizers import l2 from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import AdaBoostClassifier import xgboost as xgb
[ 11748, 299, 32152, 355, 45941, 198, 6738, 1341, 35720, 13, 4164, 10466, 1330, 277, 31361, 62, 26675, 11, 686, 66, 62, 22019, 303, 11, 257, 1229, 11, 10802, 62, 6759, 8609, 198, 6738, 1341, 35720, 13, 12501, 296, 9150, 1330, 4217, 32, ...
3.483146
178
# encoding: utf-8 import unittest import os import sys sys.path.append(os.getcwd()) from notifo import Notifo, send_message if __name__ == '__main__': unittest.main()
[ 2, 21004, 25, 3384, 69, 12, 23, 198, 11748, 555, 715, 395, 198, 11748, 28686, 198, 11748, 25064, 198, 17597, 13, 6978, 13, 33295, 7, 418, 13, 1136, 66, 16993, 28955, 198, 6738, 407, 361, 78, 1330, 1892, 361, 78, 11, 3758, 62, 2050...
2.567164
67
#!/usr/bin/env python # -*- coding: utf-8 -*- # import numpy as np import .Selection as Sel import .Exploration as Exp import .CatUtils as CU #----------------------------------------------------------------------------------------- #-----------------------------------------------------------------------------------------
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 198, 198, 11748, 299, 32152, 355, 45941, 198, 198, 11748, 764, 4653, 1564, 355, 15300, 198, 11748, 764, 18438, 6944,...
4.954545
66
from typing import List from presidio_analyzer import EntityRecognizer, RecognizerResult, AnalysisExplanation from presidio_analyzer.nlp_engine import NlpArtifacts from hebsafeharbor.common.terms_recognizer import TermsRecognizer
[ 6738, 19720, 1330, 7343, 198, 6738, 906, 312, 952, 62, 38200, 9107, 1330, 20885, 6690, 2360, 7509, 11, 31517, 7509, 23004, 11, 14691, 3109, 11578, 341, 198, 6738, 906, 312, 952, 62, 38200, 9107, 13, 21283, 79, 62, 18392, 1330, 399, 34...
3.569231
65
# encoding: utf-8 # # Copyright (C) 2018 ycmd contributors # # This file is part of ycmd. # # ycmd is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ycmd is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ycmd. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import from __future__ import unicode_literals from __future__ import print_function from __future__ import division from hamcrest.core.base_matcher import BaseMatcher from hamcrest import ( assert_that, contains, contains_string, equal_to, has_entries, has_entry, matches_regexp ) from pprint import pprint import requests import os.path from ycmd.tests.clangd import ( IsolatedYcmd, SharedYcmd, PathToTestFile, RunAfterInitialized ) from ycmd.tests.test_utils import ( BuildRequest, ChunkMatcher, CombineRequest, LineColMatcher, LocationMatcher, ErrorMatcher, WithRetry, WaitUntilCompleterServerReady ) from ycmd.utils import ReadFile # This test is isolated to trigger objcpp hooks, rather than fetching completer # from cache. def Subcommands_GoTo_all_test(): tests = [ # Local::x -> definition/declaration of x { 'req': ( 'goto.cc', 23, 21 ), 'res': ( 'goto.cc', 4, 9 ) }, # Local::in_line -> definition/declaration of Local::in_line { 'req': ( 'goto.cc', 24, 26 ), 'res': ( 'goto.cc', 6, 10 ) }, # Local -> definition/declaration of Local { 'req': ( 'goto.cc', 24, 16 ), 'res': ( 'goto.cc', 2, 11 ) }, # Local::out_of_line -> definition of Local::out_of_line { 'req': ( 'goto.cc', 25, 27 ), 'res': ( 'goto.cc', 14, 13 ) }, # GoToDeclaration alternates between definition and declaration { 'req': ( 'goto.cc', 14, 13 ), 'res': ( 'goto.cc', 11, 10 ) }, { 'req': ( 'goto.cc', 11, 10 ), 'res': ( 'goto.cc', 14, 13 ) }, # test -> definition and declaration of test { 'req': ( 'goto.cc', 21, 5 ), 'res': ( 'goto.cc', 19, 5 ) }, { 'req': ( 'goto.cc', 19, 5 ), 'res': ( 'goto.cc', 21, 5 ) }, # Unicde { 'req': ( 'goto.cc', 34, 9 ), 'res': ( 'goto.cc', 32, 26 ) }, # Another_Unicde { 'req': ( 'goto.cc', 36, 17 ), 'res': ( 'goto.cc', 32, 54 ) }, { 'req': ( 'goto.cc', 36, 25 ), 'res': ( 'goto.cc', 32, 54 ) }, { 'req': ( 'goto.cc', 38, 3 ), 'res': ( 'goto.cc', 36, 28 ) }, # Expected failures { 'req': ( 'goto.cc', 13, 1 ), 'res': 'Cannot jump to location' }, { 'req': ( 'goto.cc', 16, 6 ), 'res': 'Cannot jump to location' }, ] for test in tests: for cmd in [ 'GoToDefinition', 'GoTo', 'GoToImprecise' ]: yield RunGoToTest_all, '', cmd, test def FixIt_Check_cpp11_Ins( results ): # First fixit # switch(A()) { // expected-error{{explicit conversion to}} assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'static_cast<int>(' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 10 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 10 } ), } ), } ), has_entries( { 'replacement_text': equal_to( ')' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 16, 'column_num': 13 } ), 'end' : has_entries( { 'line_num': 16, 'column_num': 13 } ), } ), } ) ), 'location': has_entries( { 'line_num': 16, 'column_num': 0 } ) } ) ) } ) ) def FixIt_Check_cpp11_InsMultiLine( results ): # Similar to FixIt_Check_cpp11_1 but inserts split across lines # assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'static_cast<int>(' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 26, 'column_num': 7 } ), 'end' : has_entries( { 'line_num': 26, 'column_num': 7 } ), } ), } ), has_entries( { 'replacement_text': equal_to( ')' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 28, 'column_num': 2 } ), 'end' : has_entries( { 'line_num': 28, 'column_num': 2 } ), } ), } ) ), 'location': has_entries( { 'line_num': 25, 'column_num': 14 } ) } ) ) } ) ) def FixIt_Check_cpp11_Del( results ): # Removal of :: assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 35, 'column_num': 7 } ), 'end' : has_entries( { 'line_num': 35, 'column_num': 9 } ), } ), } ) ), 'location': has_entries( { 'line_num': 35, 'column_num': 7 } ) } ) ) } ) ) def FixIt_Check_cpp11_Repl( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 40, 'column_num': 6 } ), 'end' : has_entries( { 'line_num': 40, 'column_num': 9 } ), } ), } ) ), 'location': has_entries( { 'line_num': 40, 'column_num': 6 } ) } ) ) } ) ) def FixIt_Check_cpp11_DelAdd( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 3 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 4 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 9 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 9 } ), } ), } ), ), 'location': has_entries( { 'line_num': 48, 'column_num': 3 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 48, 'column_num': 15 } ), 'end' : has_entries( { 'line_num': 48, 'column_num': 17 } ), } ), } ), ), 'location': has_entries( { 'line_num': 48, 'column_num': 3 } ) } ), ) } ) ) def FixIt_Check_objc( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'id' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 5, 'column_num': 3 } ), 'end' : has_entries( { 'line_num': 5, 'column_num': 3 } ), } ), } ) ), 'location': has_entries( { 'line_num': 5, 'column_num': 3 } ) } ) ) } ) ) def FixIt_Check_objc_NoFixIt( results ): # and finally, a warning with no fixits assert_that( results, equal_to( { 'fixits': [] } ) ) def FixIt_Check_cpp11_MultiFirst( results ): assert_that( results, has_entries( { 'fixits': contains( # first fix-it at 54,16 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 16 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), # second fix-it at 54,52 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 52 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 58 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ), } ), } ), ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 64 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 15 } ) } ), ) } ) ) def FixIt_Check_cpp11_MultiSecond( results ): assert_that( results, has_entries( { 'fixits': contains( # first fix-it at 54,16 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( 'foo' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 16 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 19 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), # second fix-it at 54,52 has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 52 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 53 } ), } ), } ), has_entries( { 'replacement_text': equal_to( '~' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 58 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 58 } ), } ), } ), ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '= default;' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 54, 'column_num': 64 } ), 'end' : has_entries( { 'line_num': 54, 'column_num': 67 } ), } ), } ) ), 'location': has_entries( { 'line_num': 54, 'column_num': 51 } ) } ), ) } ) ) def FixIt_Check_unicode_Ins( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'chunks': contains( has_entries( { 'replacement_text': equal_to( '=' ), 'range': has_entries( { 'start': has_entries( { 'line_num': 21, 'column_num': 9 } ), 'end' : has_entries( { 'line_num': 21, 'column_num': 11 } ), } ), } ) ), 'location': has_entries( { 'line_num': 21, 'column_num': 16 } ) } ) ) } ) ) def FixIt_Check_cpp11_Note( results ): assert_that( results, has_entries( { 'fixits': contains( # First note: put parens around it has_entries( { 'text': contains_string( 'parentheses around the assignment' ), 'chunks': contains( ChunkMatcher( '(', LineColMatcher( 59, 8 ), LineColMatcher( 59, 8 ) ), ChunkMatcher( ')', LineColMatcher( 61, 12 ), LineColMatcher( 61, 12 ) ) ), 'location': LineColMatcher( 60, 1 ), } ), # Second note: change to == has_entries( { 'text': contains_string( '==' ), 'chunks': contains( ChunkMatcher( '==', LineColMatcher( 60, 8 ), LineColMatcher( 60, 9 ) ) ), 'location': LineColMatcher( 60, 1 ), } ), # Unresolved, requires /resolve_fixit request has_entries( { 'text': 'Extract subexpression to variable', 'resolve': True, 'command': has_entries( { 'command': 'clangd.applyTweak' } ) } ) ) } ) ) def FixIt_Check_cpp11_SpellCheck( results ): assert_that( results, has_entries( { 'fixits': contains( # Change to SpellingIsNotMyStrongPoint has_entries( { 'text': contains_string( "change 'SpellingIsNotMyStringPiont' to " "'SpellingIsNotMyStrongPoint'" ), 'chunks': contains( ChunkMatcher( 'SpellingIsNotMyStrongPoint', LineColMatcher( 72, 9 ), LineColMatcher( 72, 35 ) ) ), 'location': LineColMatcher( 72, 9 ), } ) ) } ) ) def FixIt_Check_cuda( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': contains_string( "change 'int' to 'void'" ), 'chunks': contains( ChunkMatcher( 'void', LineColMatcher( 3, 12 ), LineColMatcher( 3, 15 ) ) ), 'location': LineColMatcher( 3, 12 ), } ) ) } ) ) def FixIt_Check_SubexprExtract_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': 'Extract subexpression to variable', 'chunks': contains( ChunkMatcher( 'auto dummy = foo(i + 3);\n ', LineColMatcher( 84, 3 ), LineColMatcher( 84, 3 ) ), ChunkMatcher( 'dummy', LineColMatcher( 84, 10 ), LineColMatcher( 84, 22 ) ), ) } ) ) } ) ) def FixIt_Check_RawStringReplace_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': 'Convert to raw string', 'chunks': contains( ChunkMatcher( 'R"(\\\\r\\asd\n\\v)"', LineColMatcher( 80, 19 ), LineColMatcher( 80, 36 ) ), ) } ) ) } ) ) def FixIt_Check_MacroExpand_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': "Expand macro 'DECLARE_INT'", 'chunks': contains( ChunkMatcher( 'int i', LineColMatcher( 83, 3 ), LineColMatcher( 83, 17 ) ), ) } ) ) } ) ) def FixIt_Check_AutoExpand_Resolved( results ): assert_that( results, has_entries( { 'fixits': contains( has_entries( { 'text': "Expand auto type", 'chunks': contains( ChunkMatcher( 'const char *', LineColMatcher( 80, 1 ), LineColMatcher( 80, 6 ) ), ) } ) ) } ) )
[ 2, 21004, 25, 3384, 69, 12, 23, 198, 2, 198, 2, 15069, 357, 34, 8, 2864, 331, 28758, 20420, 198, 2, 198, 2, 770, 2393, 318, 636, 286, 331, 28758, 13, 198, 2, 198, 2, 331, 28758, 318, 1479, 3788, 25, 345, 460, 17678, 4163, 340,...
1.981642
8,171
""" Config class containing all the settings for running sentiment scoring tool """ import jsonpickle
[ 37811, 198, 16934, 1398, 7268, 477, 262, 6460, 329, 2491, 15598, 9689, 2891, 198, 37811, 198, 198, 11748, 33918, 27729, 293 ]
4.857143
21
# Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ Testing Invert op in DE """ import numpy as np import mindspore.dataset as ds import mindspore.dataset.transforms.py_transforms import mindspore.dataset.vision.py_transforms as F import mindspore.dataset.vision.c_transforms as C from mindspore import log as logger from util import visualize_list, save_and_check_md5, diff_mse DATA_DIR = "../data/dataset/testImageNetData/train/" GENERATE_GOLDEN = False def test_invert_py(plot=False): """ Test Invert python op """ logger.info("Test Invert Python op") # Original Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_original = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Resize((224, 224)), F.ToTensor()]) ds_original = data_set.map(operations=transforms_original, input_columns="image") ds_original = ds_original.batch(512) for idx, (image, _) in enumerate(ds_original): if idx == 0: images_original = np.transpose(image.asnumpy(), (0, 2, 3, 1)) else: images_original = np.append(images_original, np.transpose(image.asnumpy(), (0, 2, 3, 1)), axis=0) # Color Inverted Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Resize((224, 224)), F.Invert(), F.ToTensor()]) ds_invert = data_set.map(operations=transforms_invert, input_columns="image") ds_invert = ds_invert.batch(512) for idx, (image, _) in enumerate(ds_invert): if idx == 0: images_invert = np.transpose(image.asnumpy(), (0, 2, 3, 1)) else: images_invert = np.append(images_invert, np.transpose(image.asnumpy(), (0, 2, 3, 1)), axis=0) num_samples = images_original.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = np.mean((images_invert[i] - images_original[i]) ** 2) logger.info("MSE= {}".format(str(np.mean(mse)))) if plot: visualize_list(images_original, images_invert) def test_invert_c(plot=False): """ Test Invert Cpp op """ logger.info("Test Invert cpp op") # Original Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_original = [C.Decode(), C.Resize(size=[224, 224])] ds_original = data_set.map(operations=transforms_original, input_columns="image") ds_original = ds_original.batch(512) for idx, (image, _) in enumerate(ds_original): if idx == 0: images_original = image.asnumpy() else: images_original = np.append(images_original, image.asnumpy(), axis=0) # Invert Images data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transform_invert = [C.Decode(), C.Resize(size=[224, 224]), C.Invert()] ds_invert = data_set.map(operations=transform_invert, input_columns="image") ds_invert = ds_invert.batch(512) for idx, (image, _) in enumerate(ds_invert): if idx == 0: images_invert = image.asnumpy() else: images_invert = np.append(images_invert, image.asnumpy(), axis=0) if plot: visualize_list(images_original, images_invert) num_samples = images_original.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = diff_mse(images_invert[i], images_original[i]) logger.info("MSE= {}".format(str(np.mean(mse)))) def test_invert_py_c(plot=False): """ Test Invert Cpp op and python op """ logger.info("Test Invert cpp and python op") # Invert Images in cpp data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"]) ds_c_invert = data_set.map(operations=C.Invert(), input_columns="image") ds_c_invert = ds_c_invert.batch(512) for idx, (image, _) in enumerate(ds_c_invert): if idx == 0: images_c_invert = image.asnumpy() else: images_c_invert = np.append(images_c_invert, image.asnumpy(), axis=0) # invert images in python data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224))], input_columns=["image"]) transforms_p_invert = mindspore.dataset.transforms.py_transforms.Compose([lambda img: img.astype(np.uint8), F.ToPIL(), F.Invert(), np.array]) ds_p_invert = data_set.map(operations=transforms_p_invert, input_columns="image") ds_p_invert = ds_p_invert.batch(512) for idx, (image, _) in enumerate(ds_p_invert): if idx == 0: images_p_invert = image.asnumpy() else: images_p_invert = np.append(images_p_invert, image.asnumpy(), axis=0) num_samples = images_c_invert.shape[0] mse = np.zeros(num_samples) for i in range(num_samples): mse[i] = diff_mse(images_p_invert[i], images_c_invert[i]) logger.info("MSE= {}".format(str(np.mean(mse)))) if plot: visualize_list(images_c_invert, images_p_invert, visualize_mode=2) def test_invert_one_channel(): """ Test Invert cpp op with one channel image """ logger.info("Test Invert C Op With One Channel Images") c_op = C.Invert() try: data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) data_set = data_set.map(operations=[C.Decode(), C.Resize((224, 224)), lambda img: np.array(img[:, :, 0])], input_columns=["image"]) data_set.map(operations=c_op, input_columns="image") except RuntimeError as e: logger.info("Got an exception in DE: {}".format(str(e))) assert "The shape" in str(e) def test_invert_md5_py(): """ Test Invert python op with md5 check """ logger.info("Test Invert python op with md5 check") # Generate dataset data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = mindspore.dataset.transforms.py_transforms.Compose([F.Decode(), F.Invert(), F.ToTensor()]) data = data_set.map(operations=transforms_invert, input_columns="image") # Compare with expected md5 from images filename = "invert_01_result_py.npz" save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN) def test_invert_md5_c(): """ Test Invert cpp op with md5 check """ logger.info("Test Invert cpp op with md5 check") # Generate dataset data_set = ds.ImageFolderDataset(dataset_dir=DATA_DIR, shuffle=False) transforms_invert = [C.Decode(), C.Resize(size=[224, 224]), C.Invert(), F.ToTensor()] data = data_set.map(operations=transforms_invert, input_columns="image") # Compare with expected md5 from images filename = "invert_01_result_c.npz" save_and_check_md5(data, filename, generate_golden=GENERATE_GOLDEN) if __name__ == "__main__": test_invert_py(plot=False) test_invert_c(plot=False) test_invert_py_c(plot=False) test_invert_one_channel() test_invert_md5_py() test_invert_md5_c()
[ 2, 15069, 12131, 43208, 21852, 1766, 1539, 12052, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198...
1.996522
4,600
from parameterized import parameterized from numpy.testing import TestCase from .. import candy
[ 6738, 11507, 1143, 1330, 11507, 1143, 198, 6738, 299, 32152, 13, 33407, 1330, 6208, 20448, 198, 198, 6738, 11485, 1330, 18550, 628 ]
4.454545
22
import time import subprocess import os print os.uname() if not os.uname()[0].startswith("Darw"): import pygame pygame.mixer.init() # Plays a song
[ 11748, 640, 198, 11748, 850, 14681, 198, 11748, 28686, 198, 198, 4798, 28686, 13, 403, 480, 3419, 198, 361, 407, 28686, 13, 403, 480, 3419, 58, 15, 4083, 9688, 2032, 342, 7203, 32708, 86, 1, 2599, 198, 197, 11748, 12972, 6057, 198, ...
2.52459
61
from __future__ import absolute_import, unicode_literals, print_function import mock import unittest import d43_aws_tools as aws_tools from boto3.dynamodb.conditions import Attr
[ 6738, 11593, 37443, 834, 1330, 4112, 62, 11748, 11, 28000, 1098, 62, 17201, 874, 11, 3601, 62, 8818, 198, 11748, 15290, 198, 11748, 555, 715, 395, 198, 11748, 288, 3559, 62, 8356, 62, 31391, 355, 3253, 82, 62, 31391, 198, 6738, 275, ...
3.178571
56
from dash import Dash, html, dcc import plotly.express as px import pandas as pd app = Dash(__name__) server = app.server # assume you have a "long-form" data frame # see https://plotly.com/python/px-arguments/ for more options df = pd.DataFrame({ "Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"], "Amount": [4, 1, 2, 2, 4, 5], "City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"] }) fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group") app.layout = html.Div(children=[ html.H1(children='Hello Dash'), html.Div(children=''' Dash: A web application framework for your data. '''), dcc.Graph( id='example-graph', figure=fig ) ]) if __name__ == '__main__': app.run_server(debug=True)
[ 6738, 14470, 1330, 16189, 11, 27711, 11, 288, 535, 198, 11748, 7110, 306, 13, 42712, 355, 279, 87, 198, 11748, 19798, 292, 355, 279, 67, 198, 198, 1324, 796, 16189, 7, 834, 3672, 834, 8, 198, 15388, 796, 598, 13, 15388, 198, 198, ...
2.45092
326
# pypi import six # local from ...lib import db as lib_db from ...lib import utils from ...model import objects as model_objects from ...model import utils as model_utils from . import formhandling # ============================================================================== def decode_args(getcreate_args): """ support for Python2/3 """ if six.PY3: for (k, v) in list(getcreate_args.items()): if isinstance(v, bytes): getcreate_args[k] = v.decode("utf8") return getcreate_args # standardized mapping for `model_utils.DomainsChallenged` to a formStash DOMAINS_CHALLENGED_FIELDS = { "http-01": "domain_names_http01", "dns-01": "domain_names_dns01", } def parse_AcmeAccountSelection( request, formStash, account_key_option=None, allow_none=None, require_contact=None, ): """ :param formStash: an instance of `pyramid_formencode_classic.FormStash` :param account_key_option: :param allow_none: :param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic """ account_key_pem = None account_key_pem_md5 = None dbAcmeAccount = None is_global_default = None # handle the explicit-option acmeAccountSelection = _AcmeAccountSelection() if account_key_option == "account_key_file": # this will handle form validation and raise errors. parser = AcmeAccountUploadParser(formStash) # this will have: `contact`, `private_key_cycle`, `private_key_technology` parser.require_upload(require_contact=require_contact) # update our object acmeAccountSelection.selection = "upload" acmeAccountSelection.upload_parsed = parser return acmeAccountSelection else: if account_key_option == "account_key_global_default": acmeAccountSelection.selection = "global_default" account_key_pem_md5 = formStash.results["account_key_global_default"] is_global_default = True elif account_key_option == "account_key_existing": acmeAccountSelection.selection = "existing" account_key_pem_md5 = formStash.results["account_key_existing"] elif account_key_option == "account_key_reuse": acmeAccountSelection.selection = "reuse" account_key_pem_md5 = formStash.results["account_key_reuse"] elif account_key_option == "none": if not allow_none: # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form( "This form does not support no AcmeAccount selection." ) # note the lowercase "none"; this is an explicit "no item" selection # only certain routes allow this acmeAccountSelection.selection = "none" account_key_pem_md5 = None return acmeAccountSelection else: formStash.fatal_form( message="Invalid `account_key_option`", ) if not account_key_pem_md5: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="You did not provide a value" ) dbAcmeAccount = lib_db.get.get__AcmeAccount__by_pemMd5( request.api_context, account_key_pem_md5, is_active=True ) if not dbAcmeAccount: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="The selected AcmeAccount is not enrolled in the system.", ) if is_global_default and not dbAcmeAccount.is_global_default: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=account_key_option, message="The selected AcmeAccount is not the current default.", ) acmeAccountSelection.AcmeAccount = dbAcmeAccount return acmeAccountSelection # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("There was an error validating your form.") def parse_PrivateKeySelection(request, formStash, private_key_option=None): private_key_pem = None private_key_pem_md5 = None PrivateKey = None # :class:`model.objects.PrivateKey` # handle the explicit-option privateKeySelection = _PrivateKeySelection() if private_key_option == "private_key_file": # this will handle form validation and raise errors. parser = _PrivateKeyUploadParser(formStash) parser.require_upload() # update our object privateKeySelection.selection = "upload" privateKeySelection.upload_parsed = parser privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["upload"] ) return privateKeySelection else: if private_key_option == "private_key_existing": privateKeySelection.selection = "existing" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["existing"] ) private_key_pem_md5 = formStash.results["private_key_existing"] elif private_key_option == "private_key_reuse": privateKeySelection.selection = "reuse" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["reuse"] ) private_key_pem_md5 = formStash.results["private_key_reuse"] elif private_key_option in ( "private_key_generate", "private_key_for_account_key", ): dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0) if not dbPrivateKey: formStash.fatal_field( field=private_key_option, message="Could not load the placeholder PrivateKey.", ) privateKeySelection.PrivateKey = dbPrivateKey if private_key_option == "private_key_generate": privateKeySelection.selection = "generate" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy["generate"] ) elif private_key_option == "private_key_for_account_key": privateKeySelection.selection = "private_key_for_account_key" privateKeySelection.private_key_strategy__requested = ( model_utils.PrivateKeySelection_2_PrivateKeyStrategy[ "private_key_for_account_key" ] ) return privateKeySelection else: # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("Invalid `private_key_option`") if not private_key_pem_md5: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=private_key_option, message="You did not provide a value" ) dbPrivateKey = lib_db.get.get__PrivateKey__by_pemMd5( request.api_context, private_key_pem_md5, is_active=True ) if not dbPrivateKey: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field=private_key_option, message="The selected PrivateKey is not enrolled in the system.", ) privateKeySelection.PrivateKey = dbPrivateKey return privateKeySelection # `formStash.fatal_form()` will raise `FormInvalid()` formStash.fatal_form("There was an error validating your form.") def form_key_selection(request, formStash, require_contact=None): """ :param formStash: an instance of `pyramid_formencode_classic.FormStash` :param require_contact: ``True`` if required; ``False`` if not; ``None`` for conditional logic """ acmeAccountSelection = parse_AcmeAccountSelection( request, formStash, account_key_option=formStash.results["account_key_option"], require_contact=require_contact, ) if acmeAccountSelection.selection == "upload": key_create_args = acmeAccountSelection.upload_parsed.getcreate_args key_create_args["event_type"] = "AcmeAccount__insert" key_create_args[ "acme_account_key_source_id" ] = model_utils.AcmeAccountKeySource.from_string("imported") (dbAcmeAccount, _is_created,) = lib_db.getcreate.getcreate__AcmeAccount( request.api_context, **key_create_args ) acmeAccountSelection.AcmeAccount = dbAcmeAccount privateKeySelection = parse_PrivateKeySelection( request, formStash, private_key_option=formStash.results["private_key_option"], ) if privateKeySelection.selection == "upload": key_create_args = privateKeySelection.upload_parsed.getcreate_args key_create_args["event_type"] = "PrivateKey__insert" key_create_args[ "private_key_source_id" ] = model_utils.PrivateKeySource.from_string("imported") key_create_args["private_key_type_id"] = model_utils.PrivateKeyType.from_string( "standard" ) ( dbPrivateKey, _is_created, ) = lib_db.getcreate.getcreate__PrivateKey__by_pem_text( request.api_context, **key_create_args ) privateKeySelection.PrivateKey = dbPrivateKey elif privateKeySelection.selection == "generate": dbPrivateKey = lib_db.get.get__PrivateKey__by_id(request.api_context, 0) if not dbPrivateKey: formStash.fatal_field( field="private_key_option", message="Could not load the placeholder PrivateKey for autogeneration.", ) privateKeySelection.PrivateKey = dbPrivateKey return (acmeAccountSelection, privateKeySelection) def form_domains_challenge_typed(request, formStash, http01_only=False): domains_challenged = model_utils.DomainsChallenged() domain_names_all = [] try: # 1: iterate over the submitted domains by segment for (target_, source_) in DOMAINS_CHALLENGED_FIELDS.items(): submitted_ = formStash.results.get(source_) if submitted_: # this function checks the domain names match a simple regex # it will raise a `ValueError("invalid domain")` on the first invalid domain submitted_ = utils.domains_from_string(submitted_) if submitted_: domain_names_all.extend(submitted_) domains_challenged[target_] = submitted_ # 2: ensure there are domains if not domain_names_all: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="no domain names submitted", ) # 3: ensure there is no overlap domain_names_all_set = set(domain_names_all) if len(domain_names_all) != len(domain_names_all_set): # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="a domain name can only be associated to one challenge type", ) # 4: maybe we only want http01 domains submitted? if http01_only: for (k, v) in domains_challenged.items(): if k == "http-01": continue if v: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="only http-01 domains are accepted by this form", ) except ValueError as exc: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="Error_Main", message="invalid domain names detected" ) return domains_challenged def form_single_domain_challenge_typed(request, formStash, challenge_type="http-01"): domains_challenged = model_utils.DomainsChallenged() # this function checks the domain names match a simple regex domain_names = utils.domains_from_string(formStash.results["domain_name"]) if not domain_names: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field(field="domain_name", message="Found no domain names") if len(domain_names) != 1: # `formStash.fatal_field()` will raise `FormFieldInvalid(FormInvalid)` formStash.fatal_field( field="domain_name", message="This endpoint currently supports only 1 domain name", ) domains_challenged[challenge_type] = domain_names return domains_challenged
[ 2, 279, 4464, 72, 198, 11748, 2237, 198, 198, 2, 1957, 198, 6738, 2644, 8019, 1330, 20613, 355, 9195, 62, 9945, 198, 6738, 2644, 8019, 1330, 3384, 4487, 198, 6738, 2644, 19849, 1330, 5563, 355, 2746, 62, 48205, 198, 6738, 2644, 19849,...
2.278546
5,859
# # Copyright 20202021 Xilinx, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from math import * import random
[ 2, 198, 2, 15069, 12131, 1238, 2481, 1395, 346, 28413, 11, 3457, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351...
3.697674
172
item1='phone' item1_price = 100 item1_quantity = 5 item1_price_total = item1_price * item1_quantity print(type(item1)) # str print(type(item1_price)) # int print(type(item1_quantity)) # int print(type(item1_price_total)) # int # output: # <class 'str'> # <class 'int'> # <class 'int'> # <class 'int'>
[ 9186, 16, 11639, 4862, 6, 198, 9186, 16, 62, 20888, 796, 1802, 198, 9186, 16, 62, 40972, 414, 796, 642, 198, 9186, 16, 62, 20888, 62, 23350, 796, 2378, 16, 62, 20888, 1635, 2378, 16, 62, 40972, 414, 198, 198, 4798, 7, 4906, 7, 9...
2.04
175
_base_ = "./FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan.py" OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/16_36WoodBlock" DATASETS = dict(TRAIN=("ycbv_036_wood_block_train_pbr",))
[ 62, 8692, 62, 796, 366, 19571, 37535, 7934, 25836, 62, 16, 13, 20, 12512, 36734, 88, 3838, 7156, 2433, 62, 2949, 786, 29531, 62, 46384, 3314, 49, 62, 2601, 541, 42731, 62, 21373, 24928, 16, 62, 35, 4246, 486, 62, 11187, 35, 89, 62...
2.023669
169
from .columns import OC from .paging import get_page, select_page, process_args from .results import serialize_bookmark, unserialize_bookmark, Page, Paging __all__ = [ 'OC', 'get_page', 'select_page', 'serialize_bookmark', 'unserialize_bookmark', 'Page', 'Paging', 'process_args' ]
[ 198, 6738, 764, 28665, 82, 1330, 24775, 198, 6738, 764, 79, 3039, 1330, 651, 62, 7700, 11, 2922, 62, 7700, 11, 1429, 62, 22046, 198, 6738, 764, 43420, 1330, 11389, 1096, 62, 2070, 4102, 11, 555, 46911, 1096, 62, 2070, 4102, 11, 7873...
2.46875
128
# coding=utf-8 # Copyright 2022 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Simple model for image classification. The model is multiple conv/locally_connected/wide_conv/low_rank_locally_connected layers followed by a fully connected layer. Changes to the model architecture can be made by modifying simple_model_config.py file. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import os import tensorflow.compat.v1 as tf from low_rank_local_connectivity import layers from low_rank_local_connectivity import utils MOMENTUM = 0.9 EPS = 1e-5
[ 2, 19617, 28, 40477, 12, 23, 198, 2, 15069, 33160, 383, 3012, 4992, 46665, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, ...
3.67619
315
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries # SPDX-License-Identifier: MIT import time import board import busio from adafruit_icm20x import ICM20948 cycles = 200 i2c = busio.I2C(board.SCL, board.SDA) icm = ICM20948(i2c) # Cycle between two data rates # Best viewed in the Mu serial plotter where you can see how # the data rate affects the resolution of the data while True: icm.gyro_data_rate_divisor = 0 # minimum print("Data Rate:", icm.gyro_data_rate) time.sleep(2) for i in range(cycles): print(icm.gyro) icm.gyro_data_rate_divisor = 255 # maximum print("Data Rate:", icm.gyro_data_rate) time.sleep(2) for i in range(cycles): print(icm.gyro)
[ 2, 30628, 55, 12, 8979, 15269, 8206, 25, 33448, 10846, 4763, 329, 1215, 1878, 4872, 20171, 198, 2, 30628, 55, 12, 34156, 12, 33234, 7483, 25, 17168, 198, 198, 11748, 640, 198, 11748, 3096, 198, 11748, 1323, 952, 198, 6738, 512, 1878, ...
2.486207
290
a = int(input()) while a: for x in range(a-1): out = '*' + ' ' * (a-x-2) + '*' + ' ' * (a-x-2) + '*' print(out.center(2*a-1)) print('*' * (2 * a - 1)) for x in range(a-1): out = '*' + ' ' * x + '*' + ' ' * x + '*' print(out.center(2*a-1)) a = int(input())
[ 64, 796, 493, 7, 15414, 28955, 198, 4514, 257, 25, 198, 220, 220, 220, 329, 2124, 287, 2837, 7, 64, 12, 16, 2599, 198, 220, 220, 220, 220, 220, 220, 220, 503, 796, 705, 9, 6, 1343, 705, 705, 1635, 357, 64, 12, 87, 12, 17, 8,...
1.717514
177
import datetime import json import os from pathlib import Path from types import SimpleNamespace from typing import List from typing import NamedTuple, Union, Optional, Callable from uuid import uuid3, NAMESPACE_DNS from dateutil.parser import parse _VIDEO_SUFFIXES = [".mkv", ".mp4"] _IMAGE_SUFFIXES = [".jpg"] _PERMITTED_EXTENSIONS = _VIDEO_SUFFIXES + _IMAGE_SUFFIXES def get_sorted_paths(path: Path) -> List[Path]: return sorted(Path(path).iterdir(), key=os.path.getmtime) def format_timestamp_for_go(timestamp: Union[datetime.datetime, str]) -> str: if isinstance(timestamp, str): timestamp = parse(timestamp) us = timestamp.strftime("%f") tz_raw = timestamp.strftime("%z") tz = "{}:{}".format(tz_raw[0:3], tz_raw[3:]) return timestamp.strftime(f"%Y-%m-%dT%H:%M:%S.{us}00{tz}") def parse_paths(paths: List[Path], tzinfo: datetime.tzinfo, parse_method: Callable) -> List[PathDetails]: return [ y for y in [parse_method(path=x, tzinfo=tzinfo) for x in paths if x is not None] if y is not None ] def build_event_for_some_path_details(some_path_details: List[PathDetails], path: Path): if len(some_path_details) != 4: raise ValueError( f"expected some_path_details to be 4 long (and related); instead it was {len(some_path_details)} long" ) event_ids = list(set([x.event_id for x in some_path_details])) if len(event_ids) != 1: raise ValueError( f"expected all PathDetails to have a common event_id; instead they were {event_ids}" ) camera_ids = list(set([x.camera_id for x in some_path_details])) if len(camera_ids) != 1: raise ValueError( f"expected all PathDetails to have a common camera_id; instead they were {camera_ids}" ) camera_names = list(set([x.camera_name for x in some_path_details])) if len(camera_names) != 1: raise ValueError( f"expected all PathDetails to have a common camera_name; instead they were {camera_names}" ) high_res_image_paths = list( set([x.path for x in some_path_details if x.is_image and not x.is_lowres]) ) if len(high_res_image_paths) != 1: raise ValueError( f"expected to find 1 high_res_image_path from PathDetails; instead found {high_res_image_paths}" ) low_res_image_paths = list( set([x.path for x in some_path_details if x.is_image and x.is_lowres]) ) if len(low_res_image_paths) != 1: raise ValueError( f"expected to find 1 low_res_image_path from PathDetails; instead found {low_res_image_paths}" ) high_res_video_paths = list( set([x.path for x in some_path_details if not x.is_image and not x.is_lowres]) ) if len(high_res_video_paths) != 1: raise ValueError( f"expected to find 1 high_res_video_path from PathDetails; instead found {high_res_video_paths}" ) low_res_video_paths = list( set([x.path for x in some_path_details if not x.is_image and x.is_lowres]) ) if len(low_res_video_paths) != 1: raise ValueError( f"expected to find 1 low_res_video_path from PathDetails; instead found {low_res_video_paths}" ) timestamp = sorted([x.timestamp for x in some_path_details])[0] high_res_image_path = high_res_image_paths[0] low_res_image_path = low_res_image_paths[0] high_res_video_path = high_res_video_paths[0] low_res_video_path = low_res_video_paths[0] # in Go: # eventId := uuid.NewSHA1( # uuid.NameSpaceDNS, # []byte(fmt.Sprintf("%v, %v, %v, %v, %v", timestamp, highResImagePath, lowResImagePath, highResVideoPath, lowResVideoPath)), # ) event_id = uuid3( NAMESPACE_DNS, f"{format_timestamp_for_go(timestamp)}, {high_res_image_path}, {low_res_image_path}, {high_res_video_path}, {low_res_video_path}", ) return Event( event_id=str(event_id), timestamp=timestamp, camera_name=camera_names[0], high_res_image_path=str(path / high_res_image_path), low_res_image_path=str(path / low_res_image_path), high_res_video_path=str(path / high_res_video_path), low_res_video_path=str(path / low_res_video_path), ) def relate_path_details( some_path_details: List[PathDetails], get_key_methods: List[Callable] ) -> List[List[PathDetails]]: some_path_details_by_key = {} for path_details in some_path_details: keys = [x(path_details) for x in get_key_methods] for key in keys: some_path_details_by_key.setdefault(key, []) some_path_details_by_key[key] += [path_details] viable_some_path_details_by_key = { k: v for k, v in some_path_details_by_key.items() if len(v) == 4 } deduplicated_path_details = [] for some_path_details in viable_some_path_details_by_key.values(): if some_path_details not in deduplicated_path_details: deduplicated_path_details += [some_path_details] return deduplicated_path_details def build_events_for_related_path_details( related_path_details: List[List[PathDetails]], path: Path ) -> List[Event]: events: List[Event] = [] for some_path_details in related_path_details: events += [ build_event_for_some_path_details( some_path_details=some_path_details, path=path ) ] sorted_events = sorted(events, key=lambda x: x.timestamp) for event in sorted_events: event.timestamp = format_timestamp_for_go(timestamp=event.timestamp) return sorted_events def build_json_lines_from_events(events: List[Event]) -> str: return "\n".join( [ json.dumps( { "event_id": x.event_id, "timestamp": x.timestamp, "camera_name": x.camera_name, "high_res_image_path": x.high_res_image_path, "low_res_image_path": x.low_res_image_path, "high_res_video_path": x.high_res_video_path, "low_res_video_path": x.low_res_video_path, } ) for x in events ] ) def write_to_file(path: Path, data: str): with open(str(path), "w") as f: f.write(data) def rebuild_event_store(root_path: Path, tzinfo: datetime.tzinfo, json_path: Path, parse_method: Callable, get_key_methods: List[Callable]): print(f"getting sorted paths from {root_path}...") sorted_paths = get_sorted_paths(path=root_path) print(f"got {len(sorted_paths)} sorted paths") print("parsing sorted paths...") some_path_details = parse_paths(paths=sorted_paths, tzinfo=tzinfo, parse_method=parse_method) print(f"got {len(some_path_details)} parsed paths") print("relating parsed paths...") related_path_details = relate_path_details(some_path_details=some_path_details, get_key_methods=get_key_methods) print(f"got {len(related_path_details)} related paths") print("building events...") events = build_events_for_related_path_details( related_path_details=related_path_details, path=root_path ) print(f"built {len(events)} events") print("building json lines...") json_lines = build_json_lines_from_events(events=events) print(f"built {len(json_lines)} bytes") print(f"writing to {json_path}") write_to_file(path=json_path, data=json_lines) print("done.")
[ 11748, 4818, 8079, 198, 11748, 33918, 198, 11748, 28686, 198, 6738, 3108, 8019, 1330, 10644, 198, 6738, 3858, 1330, 17427, 36690, 10223, 198, 6738, 19720, 1330, 7343, 198, 6738, 19720, 1330, 34441, 51, 29291, 11, 4479, 11, 32233, 11, 4889...
2.264364
3,359
# Generated by Django 2.2.1 on 2019-09-27 14:23 from django.db import migrations, models
[ 2, 2980, 515, 416, 37770, 362, 13, 17, 13, 16, 319, 13130, 12, 2931, 12, 1983, 1478, 25, 1954, 198, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 11, 4981, 628 ]
2.84375
32
#!/usr/bin/env python3 # This Source Code Form is subject to the terms of the MIT # License. If a copy of the same was not distributed with this # file, You can obtain one at # https://github.com/akhilpandey95/scholarlyimpact/blob/master/LICENSE. import os import csv import glob import json import requests import subprocess import numpy as np import pandas as pd from tqdm import tqdm from ast import literal_eval from fp.fp import FreeProxy from torrequest import TorRequest from scholarly import scholarly from collections import Counter, OrderedDict from operator import attrgetter # class definition for Rate limiting # function for obtaining the citations using the dimensions web url def get_gs_citations_web(title): """ Use the google scholar web URL and requests API to obtain the citations for a given title of a scholarly article Parameters ---------- arg1 | title: str The title of a scholarly article Returns ------- Dictionary dict """ while True: try: # call the lumproxy object scholarly.use_lum_proxy() # make the query query = scholarly.search_pubs(title) # come out break except Exception as e: # come out and try again break # return the response dict return next(query) # function for assigning new IP address def assign_new_ip(text=False): """ Reset the identity using TorRequest Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the IP address tuple (old, morphed) Returns ------- boolean True/False """ try: # pass the hashed password req = TorRequest(password='scholarly_password') # return the ip address normal_identity = requests.get('http://ipecho.net/plain') # reset the identity using Tor req.reset_identity() # make a request now morphed_identity = req.get('http://ipecho.net/plain') # return the status depending on the flag if morphed_identity != normal_identity: if text == True: # return the ip address pairs as a tuple return (normal_identity.text, morphed_identity.text) else: return True else: # return just the status return False except: return False # function for assigning a new proxy def set_new_proxy(text=True): """ Reset the identity using FreeProxy Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the IP address tuple (old, morphed) Returns ------- Address fp.fp.FreeProxy """ while True: # call the freeproxy object proxy = FreeProxy(rand=True, timeout=1).get() # allocate the proxy address to scholarly proxy_works = scholarly.use_proxy(http=proxy, https=proxy) # check it the ip address works if proxy_works: # come out break # print the ip address depending on the text argument if text: # print the working ip print("Working proxy:", proxy) # return the proxy details return proxy # function for connecting tor to scholarly def scholarly_init_connection(): """ Bind TorRequest to Scholarly service Parameters ---------- No arguments Returns ------- Nothing """ while True: # assign new tor identity ips = assign_new_ip(text=True) # use the tor request for scholarly tor_req = scholarly.use_tor(tor_sock_port=9050, \ tor_control_port=9051, \ tor_pw="scholarly_password") if tor_req: # come out of the loop, when successful break # print the tor identity print("Working Tor identity:", ips[1]) # function for restarting the system tor service def restart_tor_system_service(text=False): """ Use the os module to restart the tor service Parameters ---------- arg1 [OPTIONAL]| text: bool A boolean flag to return the status of the command Returns ------- Boolean bool """ # subprocess command for stopping the tor service tor_stop = subprocess.Popen(['service', 'tor', 'stop']) # subprocess command for restarting the tor service tor_restart = subprocess.Popen(['service', 'tor', 'restart']) # subprocess command for restarting the tor service tor_status = subprocess.Popen(['service', 'tor', 'status'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) # if the label is set to true then print the output if text: for output in tor_status.stdout.readlines(): print(output.strip()) # pipe out the stdout, stderr for the subprocess stdout, stderr = tor_status.communicate() if len(stderr) > 0: # return False return False else: # return true if successful return True def get_articleInfo(title): """ Use the google scholar web URL and requests API to obtain the citations for a given title of a scholarly article Parameters ---------- arg1 | title: str The title of a scholarly article Returns ------- Dictionary dict """ while True: try: # init the connection with scholarly and tor scholarly_init_connection() # search for the query search_query = scholarly.search_pubs(title) # print success print("Got the results of the query") # come out of the loop break except Exception as e: # print error message print("Attempt Failed, patching new tor identity") # restart the system tor service restart_tor_system_service(text=False) # assign new connection again scholarly_init_connection() # obtain the bib entry of the scholarly article pub = next(search_query) # return the bib entry return pub if __name__ == '__main__': # iterate over the length length_of_file = len(open('paper_titles.txt').readlines()) # place the contents of the list into a file alt_list = open('paper_titles.txt').readlines() # iterate over the length of the file # write the results to a file for i in tqdm(range(length_of_file)): alt_info = open('paper_titles.txt', 'r+') cit_info = open('citations_gs.csv', 'a') cit_info.write(str(alt_list[i].strip( ).split('\t')[0]) + ',' + str(get_articleInfo(alt_list[i].strip().split('\t')[1]))) cit_info.write('\n') cit_info.close() alt_info.seek(0) alt_info.truncate() alt_info.writelines(alt_list[i+1:]) alt_info.close()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 2, 770, 8090, 6127, 5178, 318, 2426, 284, 262, 2846, 286, 262, 17168, 198, 2, 13789, 13, 1002, 257, 4866, 286, 262, 976, 373, 407, 9387, 351, 428, 198, 2, 2393, 11, 921, ...
2.463627
2,873
#Copyright ReportLab Europe Ltd. 2000-2012 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfgen/pathobject.py __version__=''' $Id$ ''' __doc__=""" PDFPathObject is an efficient way to draw paths on a Canvas. Do not instantiate directly, obtain one from the Canvas instead. Progress Reports: 8.83, 2000-01-13, gmcm: created from pdfgen.py """ from reportlab.pdfgen import pdfgeom from reportlab.lib.rl_accel import fp_str
[ 2, 15269, 6358, 17822, 2031, 12052, 13, 4751, 12, 6999, 198, 2, 3826, 5964, 13, 14116, 329, 5964, 3307, 198, 2, 23569, 2638, 1378, 2503, 13, 13116, 23912, 13, 1073, 13, 2724, 14, 37157, 12, 8800, 14, 1177, 66, 14259, 13, 37157, 14, ...
2.970588
170
import wikipedia import re import TCPclient as client WORDS = ["WIKIPEDIA","SEARCH","INFORMATION"]
[ 11748, 47145, 11151, 198, 11748, 302, 198, 11748, 23633, 16366, 355, 5456, 198, 198, 45359, 5258, 796, 14631, 54, 18694, 4061, 1961, 3539, 2430, 5188, 31315, 2430, 1268, 35036, 8973, 628, 628 ]
3.21875
32
import random print("Title : Eat, Drink, And Be Sick") noun = [] for i in range(4): n = input("Enter noun : ") noun.append(n) plural = [] for i in range(6): pn = input("Enter plural noun : ") plural.append(pn) adjective = [] for i in range(2): a = input("Enter adjective : ") adjective.append(a) adverb = input("Enter adverb : ") letter = input("Enter any letter : ") body_part = input("Enter any body part : ") print("An inspector from the Department of Health and ", random.choice(noun) , " Services paid a surprise visit to our " , random.choice(adjective) , " school cafeteria.") print("The lunch special, prepared by our " , random.choice(adjective) , "dietician, was spaghetti and " , random.choice(noun) , " balls with a choice of either a " , random.choice(noun) , " salad or French " , random.choice(plural) , ".") print("The inspector found the meat-" , random.choice(plural) , " to be overcooked and discovered a live " , random.choice(noun) , " in the fries,causing him to have a " + body_part + " ache.") print("In response, he threw up all over his " , random.choice(plural) , ".") print("In his report, the inspector " + adverb + " recommended that the school cafeteria serve only nutritious " , random.choice(plural) , " as well as low-calorie " , random.choice(plural) , " and that all of the saturated " , random.choice(plural) , " be eliminated.") print("He rated the cafeteria a " + letter + "-minus.")
[ 11748, 4738, 198, 4798, 7203, 19160, 1058, 27574, 11, 32906, 11, 843, 1355, 32181, 4943, 198, 77, 977, 796, 17635, 198, 1640, 1312, 287, 2837, 7, 19, 2599, 198, 220, 220, 220, 299, 796, 5128, 7203, 17469, 23227, 1058, 366, 8, 198, 2...
3.222717
449
import numpy g = open('/home/srallaba/mgc/transposed/arctic_a0404.mgc','w') x = numpy.loadtxt('/home/srallaba/mgc_spaces/arctic_a0404.mgc') numpy.savetxt(g, numpy.transpose(x)) g.close()
[ 11748, 299, 32152, 220, 198, 70, 796, 1280, 10786, 14, 11195, 14, 27891, 439, 15498, 14, 11296, 66, 14, 7645, 29813, 14, 283, 11048, 62, 64, 15, 26429, 13, 11296, 66, 41707, 86, 11537, 198, 87, 796, 220, 299, 32152, 13, 2220, 14116,...
2.076087
92
from zone_api.core.zone_manager import ZoneManager from zone_api import platform_encapsulator as pe from zone_api.core.zone import Zone from zone_api.core.zone_event import ZoneEvent from zone_api.core.devices.dimmer import Dimmer from zone_api.core.devices.switch import Fan, Light, Switch from zone_api.core.devices.illuminance_sensor import IlluminanceSensor from zone_api.core.devices.motion_sensor import MotionSensor from zone_api.core.actions.turn_on_switch import TurnOnSwitch from zone_api_test.core.device_test import DeviceTest ILLUMINANCE_THRESHOLD_IN_LUX = 8 INVALID_ITEM_NAME = 'invalid item name'
[ 6738, 6516, 62, 15042, 13, 7295, 13, 11340, 62, 37153, 1330, 13035, 13511, 198, 6738, 6516, 62, 15042, 1330, 3859, 62, 12685, 1686, 8927, 355, 613, 198, 198, 6738, 6516, 62, 15042, 13, 7295, 13, 11340, 1330, 13035, 198, 6738, 6516, 62...
3.19171
193
from rest_framework.response import Response from rest_framework.views import APIView from django_redis import get_redis_connection from goods.models import SKU from decimal import Decimal from rest_framework.generics import CreateAPIView,ListAPIView from rest_framework.mixins import ListModelMixin from orders.serializers import OrderShowSerializer, OrderSaveSerializer, OrderListSerializer, CommentSerializers, \ CommentSaveSerializers, CommentShowSerializers from users.models import User from orders.models import OrderInfo,OrderGoods from orders.utils import PageNum from rest_framework.filters import OrderingFilter # # # # - # #
[ 6738, 1334, 62, 30604, 13, 26209, 1330, 18261, 198, 6738, 1334, 62, 30604, 13, 33571, 1330, 3486, 3824, 769, 198, 6738, 42625, 14208, 62, 445, 271, 1330, 651, 62, 445, 271, 62, 38659, 198, 6738, 7017, 13, 27530, 1330, 14277, 52, 198, ...
3.590164
183
import argparse import errno import logging import os import platform import signal import sys from collections import OrderedDict from contextlib import closing from distutils.version import StrictVersion from functools import partial from gettext import gettext from itertools import chain from pathlib import Path from time import sleep from typing import List import requests from socks import __version__ as socks_version from websocket import __version__ as websocket_version import streamlink.logger as logger from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version from streamlink.cache import Cache from streamlink.exceptions import FatalPluginError from streamlink.plugin import Plugin, PluginOptions from streamlink.stream import StreamIO, StreamProcess from streamlink.utils.named_pipe import NamedPipe from streamlink_cli.argparser import build_parser from streamlink_cli.compat import DeprecatedPath, is_win32, stdout from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester from streamlink_cli.constants import CONFIG_FILES, DEFAULT_STREAM_METADATA, LOG_DIR, PLUGIN_DIRS, STREAM_SYNONYMS from streamlink_cli.output import FileOutput, Output, PlayerOutput from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url ACCEPTABLE_ERRNO = (errno.EPIPE, errno.EINVAL, errno.ECONNRESET) try: ACCEPTABLE_ERRNO += (errno.WSAECONNABORTED,) except AttributeError: pass # Not windows QUIET_OPTIONS = ("json", "stream_url", "subprocess_cmdline", "quiet") args = None console: ConsoleOutput = None output: Output = None plugin: Plugin = None stream_fd: StreamIO = None streamlink: Streamlink = None log = logging.getLogger("streamlink.cli") def check_file_output(filename, force): """Checks if file already exists and ask the user if it should be overwritten if it does.""" log.debug("Checking file output") if os.path.isfile(filename) and not force: if sys.stdin.isatty(): answer = console.ask(f"File {filename} already exists! Overwrite it? [y/N] ") if answer.lower() != "y": sys.exit() else: log.error(f"File {filename} already exists, use --force to overwrite it.") sys.exit() return FileOutput(filename) def create_output(formatter: Formatter): """Decides where to write the stream. Depending on arguments it can be one of these: - The stdout pipe - A subprocess' stdin pipe - A named pipe that the subprocess reads from - A regular file """ if (args.output or args.stdout) and (args.record or args.record_and_pipe): console.exit("Cannot use record options with other file output options.") if args.output: if args.output == "-": out = FileOutput(fd=stdout) else: out = check_file_output(formatter.filename(args.output, args.fs_safe_rules), args.force) elif args.stdout: out = FileOutput(fd=stdout) elif args.record_and_pipe: record = check_file_output(formatter.filename(args.record_and_pipe, args.fs_safe_rules), args.force) out = FileOutput(fd=stdout, record=record) else: http = namedpipe = record = None if not args.player: console.exit("The default player (VLC) does not seem to be " "installed. You must specify the path to a player " "executable with --player.") if args.player_fifo: try: namedpipe = NamedPipe() except OSError as err: console.exit(f"Failed to create pipe: {err}") elif args.player_http: http = create_http_server() if args.record: record = check_file_output(formatter.filename(args.record, args.fs_safe_rules), args.force) log.info(f"Starting player: {args.player}") out = PlayerOutput( args.player, args=args.player_args, quiet=not args.verbose_player, kill=not args.player_no_close, namedpipe=namedpipe, http=http, record=record, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) return out def create_http_server(*_args, **_kwargs): """Creates a HTTP server listening on a given host and port. If host is empty, listen on all available interfaces, and if port is 0, listen on a random high port. """ try: http = HTTPServer() http.bind(*_args, **_kwargs) except OSError as err: console.exit(f"Failed to create HTTP server: {err}") return http def output_stream_http(plugin, initial_streams, formatter: Formatter, external=False, port=0): """Continuously output the stream over HTTP.""" global output if not external: if not args.player: console.exit("The default player (VLC) does not seem to be " "installed. You must specify the path to a player " "executable with --player.") server = create_http_server() player = output = PlayerOutput( args.player, args=args.player_args, filename=server.url, quiet=not args.verbose_player, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) try: log.info(f"Starting player: {args.player}") if player: player.open() except OSError as err: console.exit(f"Failed to start player: {args.player} ({err})") else: server = create_http_server(host=None, port=port) player = None log.info("Starting server, access with one of:") for url in server.urls: log.info(" " + url) for req in iter_http_requests(server, player): user_agent = req.headers.get("User-Agent") or "unknown player" log.info(f"Got HTTP request from {user_agent}") stream_fd = prebuffer = None while not stream_fd and (not player or player.running): try: streams = initial_streams or fetch_streams(plugin) initial_streams = None for stream_name in (resolve_stream_name(streams, s) for s in args.stream): if stream_name in streams: stream = streams[stream_name] break else: log.info("Stream not available, will re-fetch streams in 10 sec") sleep(10) continue except PluginError as err: log.error(f"Unable to fetch new streams: {err}") continue try: log.info(f"Opening stream: {stream_name} ({type(stream).shortname()})") stream_fd, prebuffer = open_stream(stream) except StreamError as err: log.error(err) if stream_fd and prebuffer: log.debug("Writing stream to player") read_stream(stream_fd, server, prebuffer, formatter) server.close(True) player.close() server.close() def output_stream_passthrough(stream, formatter: Formatter): """Prepares a filename to be passed to the player.""" global output filename = f'"{stream_to_url(stream)}"' output = PlayerOutput( args.player, args=args.player_args, filename=filename, call=True, quiet=not args.verbose_player, title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url ) try: log.info(f"Starting player: {args.player}") output.open() except OSError as err: console.exit(f"Failed to start player: {args.player} ({err})") return False return True def open_stream(stream): """Opens a stream and reads 8192 bytes from it. This is useful to check if a stream actually has data before opening the output. """ global stream_fd # Attempts to open the stream try: stream_fd = stream.open() except StreamError as err: raise StreamError(f"Could not open stream: {err}") # Read 8192 bytes before proceeding to check for errors. # This is to avoid opening the output unnecessarily. try: log.debug("Pre-buffering 8192 bytes") prebuffer = stream_fd.read(8192) except OSError as err: stream_fd.close() raise StreamError(f"Failed to read data from stream: {err}") if not prebuffer: stream_fd.close() raise StreamError("No data returned from stream") return stream_fd, prebuffer def output_stream(stream, formatter: Formatter): """Open stream, create output and finally write the stream to output.""" global output success_open = False for i in range(args.retry_open): try: stream_fd, prebuffer = open_stream(stream) success_open = True break except StreamError as err: log.error(f"Try {i + 1}/{args.retry_open}: Could not open stream {stream} ({err})") if not success_open: console.exit(f"Could not open stream {stream}, tried {args.retry_open} times, exiting") output = create_output(formatter) try: output.open() except OSError as err: if isinstance(output, PlayerOutput): console.exit(f"Failed to start player: {args.player} ({err})") else: console.exit(f"Failed to open output: {output.filename} ({err})") with closing(output): log.debug("Writing stream to output") read_stream(stream_fd, output, prebuffer, formatter) return True def read_stream(stream, output, prebuffer, formatter: Formatter, chunk_size=8192): """Reads data from stream and then writes it to the output.""" is_player = isinstance(output, PlayerOutput) is_http = isinstance(output, HTTPServer) is_fifo = is_player and output.namedpipe show_progress = ( isinstance(output, FileOutput) and output.fd is not stdout and (sys.stdout.isatty() or args.force_progress) ) show_record_progress = ( hasattr(output, "record") and isinstance(output.record, FileOutput) and output.record.fd is not stdout and (sys.stdout.isatty() or args.force_progress) ) stream_iterator = chain( [prebuffer], iter(partial(stream.read, chunk_size), b"") ) if show_progress: stream_iterator = progress( stream_iterator, prefix=os.path.basename(output.filename) ) elif show_record_progress: stream_iterator = progress( stream_iterator, prefix=os.path.basename(output.record.filename) ) try: for data in stream_iterator: # We need to check if the player process still exists when # using named pipes on Windows since the named pipe is not # automatically closed by the player. if is_win32 and is_fifo: output.player.poll() if output.player.returncode is not None: log.info("Player closed") break try: output.write(data) except OSError as err: if is_player and err.errno in ACCEPTABLE_ERRNO: log.info("Player closed") elif is_http and err.errno in ACCEPTABLE_ERRNO: log.info("HTTP connection closed") else: console.exit(f"Error when writing to output: {err}, exiting") break except OSError as err: console.exit(f"Error when reading from stream: {err}, exiting") finally: stream.close() log.info("Stream ended") def handle_stream(plugin, streams, stream_name): """Decides what to do with the selected stream. Depending on arguments it can be one of these: - Output internal command-line - Output JSON represenation - Continuously output the stream over HTTP - Output stream data to selected output """ stream_name = resolve_stream_name(streams, stream_name) stream = streams[stream_name] # Print internal command-line if this stream # uses a subprocess. if args.subprocess_cmdline: if isinstance(stream, StreamProcess): try: cmdline = stream.cmdline() except StreamError as err: console.exit(err) console.msg(cmdline) else: console.exit("The stream specified cannot be translated to a command") # Print JSON representation of the stream elif args.json: console.msg_json( stream, metadata=plugin.get_metadata() ) elif args.stream_url: try: console.msg(stream.to_url()) except TypeError: console.exit("The stream specified cannot be translated to a URL") # Output the stream else: # Find any streams with a '_alt' suffix and attempt # to use these in case the main stream is not usable. alt_streams = list(filter(lambda k: stream_name + "_alt" in k, sorted(streams.keys()))) file_output = args.output or args.stdout formatter = get_formatter(plugin) for stream_name in [stream_name] + alt_streams: stream = streams[stream_name] stream_type = type(stream).shortname() if stream_type in args.player_passthrough and not file_output: log.info(f"Opening stream: {stream_name} ({stream_type})") success = output_stream_passthrough(stream, formatter) elif args.player_external_http: return output_stream_http(plugin, streams, formatter, external=True, port=args.player_external_http_port) elif args.player_continuous_http and not file_output: return output_stream_http(plugin, streams, formatter) else: log.info(f"Opening stream: {stream_name} ({stream_type})") success = output_stream(stream, formatter) if success: break def fetch_streams(plugin): """Fetches streams using correct parameters.""" return plugin.streams(stream_types=args.stream_types, sorting_excludes=args.stream_sorting_excludes) def fetch_streams_with_retry(plugin, interval, count): """Attempts to fetch streams repeatedly until some are returned or limit hit.""" try: streams = fetch_streams(plugin) except PluginError as err: log.error(err) streams = None if not streams: log.info(f"Waiting for streams, retrying every {interval} second(s)") attempts = 0 while not streams: sleep(interval) try: streams = fetch_streams(plugin) except FatalPluginError: raise except PluginError as err: log.error(err) if count > 0: attempts += 1 if attempts >= count: break return streams def resolve_stream_name(streams, stream_name): """Returns the real stream name of a synonym.""" if stream_name in STREAM_SYNONYMS and stream_name in streams: for name, stream in streams.items(): if stream is streams[stream_name] and name not in STREAM_SYNONYMS: return name return stream_name def format_valid_streams(plugin, streams): """Formats a dict of streams. Filters out synonyms and displays them next to the stream they point to. Streams are sorted according to their quality (based on plugin.stream_weight). """ delimiter = ", " validstreams = [] for name, stream in sorted(streams.items(), key=lambda stream: plugin.stream_weight(stream[0])): if name in STREAM_SYNONYMS: continue synonyms = list(filter(synonymfilter, streams.keys())) if len(synonyms) > 0: joined = delimiter.join(synonyms) name = f"{name} ({joined})" validstreams.append(name) return delimiter.join(validstreams) def handle_url(): """The URL handler. Attempts to resolve the URL to a plugin and then attempts to fetch a list of available streams. Proceeds to handle stream if user specified a valid one, otherwise output list of valid streams. """ try: plugin = streamlink.resolve_url(args.url) setup_plugin_options(streamlink, plugin) log.info(f"Found matching plugin {plugin.module} for URL {args.url}") if args.retry_max or args.retry_streams: retry_streams = 1 retry_max = 0 if args.retry_streams: retry_streams = args.retry_streams if args.retry_max: retry_max = args.retry_max streams = fetch_streams_with_retry(plugin, retry_streams, retry_max) else: streams = fetch_streams(plugin) except NoPluginError: console.exit(f"No plugin can handle URL: {args.url}") except PluginError as err: console.exit(err) if not streams: console.exit(f"No playable streams found on this URL: {args.url}") if args.default_stream and not args.stream and not args.json: args.stream = args.default_stream if args.stream: validstreams = format_valid_streams(plugin, streams) for stream_name in args.stream: if stream_name in streams: log.info(f"Available streams: {validstreams}") handle_stream(plugin, streams, stream_name) return err = f"The specified stream(s) '{', '.join(args.stream)}' could not be found" if args.json: console.msg_json( plugin=plugin.module, metadata=plugin.get_metadata(), streams=streams, error=err ) else: console.exit(f"{err}.\n Available streams: {validstreams}") elif args.json: console.msg_json( plugin=plugin.module, metadata=plugin.get_metadata(), streams=streams ) elif args.stream_url: try: console.msg(streams[list(streams)[-1]].to_manifest_url()) except TypeError: console.exit("The stream specified cannot be translated to a URL") else: validstreams = format_valid_streams(plugin, streams) console.msg(f"Available streams: {validstreams}") def print_plugins(): """Outputs a list of all plugins Streamlink has loaded.""" pluginlist = list(streamlink.get_plugins().keys()) pluginlist_formatted = ", ".join(sorted(pluginlist)) if args.json: console.msg_json(pluginlist) else: console.msg(f"Loaded plugins: {pluginlist_formatted}") def load_plugins(dirs: List[Path], showwarning: bool = True): """Attempts to load plugins from a list of directories.""" for directory in dirs: if directory.is_dir(): success = streamlink.load_plugins(str(directory)) if success and type(directory) is DeprecatedPath: log.info(f"Loaded plugins from deprecated path, see CLI docs for how to migrate: {directory}") elif showwarning: log.warning(f"Plugin path {directory} does not exist or is not a directory!") def setup_args(parser: argparse.ArgumentParser, config_files: List[Path] = None, ignore_unknown: bool = False): """Parses arguments.""" global args arglist = sys.argv[1:] # Load arguments from config files configs = [f"@{config_file}" for config_file in config_files or []] args, unknown = parser.parse_known_args(configs + arglist) if unknown and not ignore_unknown: msg = gettext("unrecognized arguments: %s") parser.error(msg % " ".join(unknown)) # Force lowercase to allow case-insensitive lookup if args.stream: args.stream = [stream.lower() for stream in args.stream] if not args.url and args.url_param: args.url = args.url_param def setup_http_session(): """Sets the global HTTP settings, such as proxy and headers.""" if args.http_proxy: streamlink.set_option("http-proxy", args.http_proxy) if args.https_proxy: streamlink.set_option("https-proxy", args.https_proxy) if args.http_cookie: streamlink.set_option("http-cookies", dict(args.http_cookie)) if args.http_header: streamlink.set_option("http-headers", dict(args.http_header)) if args.http_query_param: streamlink.set_option("http-query-params", dict(args.http_query_param)) if args.http_ignore_env: streamlink.set_option("http-trust-env", False) if args.http_no_ssl_verify: streamlink.set_option("http-ssl-verify", False) if args.http_disable_dh: streamlink.set_option("http-disable-dh", True) if args.http_ssl_cert: streamlink.set_option("http-ssl-cert", args.http_ssl_cert) if args.http_ssl_cert_crt_key: streamlink.set_option("http-ssl-cert", tuple(args.http_ssl_cert_crt_key)) if args.http_timeout: streamlink.set_option("http-timeout", args.http_timeout) def setup_plugins(extra_plugin_dir=None): """Loads any additional plugins.""" load_plugins(PLUGIN_DIRS, showwarning=False) if extra_plugin_dir: load_plugins([Path(path).expanduser() for path in extra_plugin_dir]) def setup_streamlink(): """Creates the Streamlink session.""" global streamlink streamlink = Streamlink({"user-input-requester": ConsoleUserInputRequester(console)}) def setup_options(): """Sets Streamlink options.""" if args.interface: streamlink.set_option("interface", args.interface) if args.ipv4: streamlink.set_option("ipv4", args.ipv4) if args.ipv6: streamlink.set_option("ipv6", args.ipv6) if args.ringbuffer_size: streamlink.set_option("ringbuffer-size", args.ringbuffer_size) if args.mux_subtitles: streamlink.set_option("mux-subtitles", args.mux_subtitles) if args.hds_live_edge: streamlink.set_option("hds-live-edge", args.hds_live_edge) if args.hls_live_edge: streamlink.set_option("hls-live-edge", args.hls_live_edge) if args.hls_playlist_reload_attempts: streamlink.set_option("hls-playlist-reload-attempts", args.hls_playlist_reload_attempts) if args.hls_playlist_reload_time: streamlink.set_option("hls-playlist-reload-time", args.hls_playlist_reload_time) if args.hls_segment_ignore_names: streamlink.set_option("hls-segment-ignore-names", args.hls_segment_ignore_names) if args.hls_segment_key_uri: streamlink.set_option("hls-segment-key-uri", args.hls_segment_key_uri) if args.hls_audio_select: streamlink.set_option("hls-audio-select", args.hls_audio_select) if args.hls_start_offset: streamlink.set_option("hls-start-offset", args.hls_start_offset) if args.hls_duration: streamlink.set_option("hls-duration", args.hls_duration) if args.hls_live_restart: streamlink.set_option("hls-live-restart", args.hls_live_restart) if args.rtmp_rtmpdump: streamlink.set_option("rtmp-rtmpdump", args.rtmp_rtmpdump) elif args.rtmpdump: streamlink.set_option("rtmp-rtmpdump", args.rtmpdump) if args.rtmp_proxy: streamlink.set_option("rtmp-proxy", args.rtmp_proxy) # deprecated if args.hds_segment_attempts: streamlink.set_option("hds-segment-attempts", args.hds_segment_attempts) if args.hds_segment_threads: streamlink.set_option("hds-segment-threads", args.hds_segment_threads) if args.hds_segment_timeout: streamlink.set_option("hds-segment-timeout", args.hds_segment_timeout) if args.hds_timeout: streamlink.set_option("hds-timeout", args.hds_timeout) if args.hls_segment_attempts: streamlink.set_option("hls-segment-attempts", args.hls_segment_attempts) if args.hls_segment_threads: streamlink.set_option("hls-segment-threads", args.hls_segment_threads) if args.hls_segment_timeout: streamlink.set_option("hls-segment-timeout", args.hls_segment_timeout) if args.hls_timeout: streamlink.set_option("hls-timeout", args.hls_timeout) if args.http_stream_timeout: streamlink.set_option("http-stream-timeout", args.http_stream_timeout) if args.rtmp_timeout: streamlink.set_option("rtmp-timeout", args.rtmp_timeout) # generic stream- arguments take precedence over deprecated stream-type arguments if args.stream_segment_attempts: streamlink.set_option("stream-segment-attempts", args.stream_segment_attempts) if args.stream_segment_threads: streamlink.set_option("stream-segment-threads", args.stream_segment_threads) if args.stream_segment_timeout: streamlink.set_option("stream-segment-timeout", args.stream_segment_timeout) if args.stream_timeout: streamlink.set_option("stream-timeout", args.stream_timeout) if args.ffmpeg_ffmpeg: streamlink.set_option("ffmpeg-ffmpeg", args.ffmpeg_ffmpeg) if args.ffmpeg_verbose: streamlink.set_option("ffmpeg-verbose", args.ffmpeg_verbose) if args.ffmpeg_verbose_path: streamlink.set_option("ffmpeg-verbose-path", args.ffmpeg_verbose_path) if args.ffmpeg_fout: streamlink.set_option("ffmpeg-fout", args.ffmpeg_fout) if args.ffmpeg_video_transcode: streamlink.set_option("ffmpeg-video-transcode", args.ffmpeg_video_transcode) if args.ffmpeg_audio_transcode: streamlink.set_option("ffmpeg-audio-transcode", args.ffmpeg_audio_transcode) if args.ffmpeg_copyts: streamlink.set_option("ffmpeg-copyts", args.ffmpeg_copyts) if args.ffmpeg_start_at_zero: streamlink.set_option("ffmpeg-start-at-zero", args.ffmpeg_start_at_zero) streamlink.set_option("subprocess-errorlog", args.subprocess_errorlog) streamlink.set_option("subprocess-errorlog-path", args.subprocess_errorlog_path) streamlink.set_option("locale", args.locale) def setup_plugin_args(session, parser): """Sets Streamlink plugin options.""" plugin_args = parser.add_argument_group("Plugin options") for pname, plugin in session.plugins.items(): defaults = {} group = plugin_args.add_argument_group(pname.capitalize()) for parg in plugin.arguments: if not parg.is_global: group.add_argument(parg.argument_name(pname), **parg.options) defaults[parg.dest] = parg.default else: pargdest = parg.dest for action in parser._actions: # find matching global argument if pargdest != action.dest: continue defaults[pargdest] = action.default # add plugin to global argument plugins = getattr(action, "plugins", []) plugins.append(pname) setattr(action, "plugins", plugins) plugin.options = PluginOptions(defaults) def setup_plugin_options(session, plugin): """Sets Streamlink plugin options.""" pname = plugin.module required = OrderedDict({}) for parg in plugin.arguments: if parg.options.get("help") == argparse.SUPPRESS: continue value = getattr(args, parg.dest if parg.is_global else parg.namespace_dest(pname)) session.set_plugin_option(pname, parg.dest, value) if not parg.is_global: if parg.required: required[parg.name] = parg # if the value is set, check to see if any of the required arguments are not set if parg.required or value: try: for rparg in plugin.arguments.requires(parg.name): required[rparg.name] = rparg except RuntimeError: log.error(f"{pname} plugin has a configuration error and the arguments cannot be parsed") break if required: for req in required.values(): if not session.get_plugin_option(pname, req.dest): prompt = f"{req.prompt or f'Enter {pname} {req.name}'}: " session.set_plugin_option( pname, req.dest, console.askpass(prompt) if req.sensitive else console.ask(prompt) ) def log_current_versions(): """Show current installed versions""" if not logger.root.isEnabledFor(logging.DEBUG): return # macOS if sys.platform == "darwin": os_version = f"macOS {platform.mac_ver()[0]}" # Windows elif sys.platform == "win32": os_version = f"{platform.system()} {platform.release()}" # Linux / other else: os_version = platform.platform() log.debug(f"OS: {os_version}") log.debug(f"Python: {platform.python_version()}") log.debug(f"Streamlink: {streamlink_version}") log.debug(f"Requests({requests.__version__}), " f"Socks({socks_version}), " f"Websocket({websocket_version})")
[ 11748, 1822, 29572, 198, 11748, 11454, 3919, 198, 11748, 18931, 198, 11748, 28686, 198, 11748, 3859, 198, 11748, 6737, 198, 11748, 25064, 198, 6738, 17268, 1330, 14230, 1068, 35, 713, 198, 6738, 4732, 8019, 1330, 9605, 198, 6738, 1233, 26...
2.341086
12,727
from io import BytesIO from io import StringIO import json from bson.dbref import DBRef import datetime from bson import json_util import logging import base64 jsonCode ={ "building":{ "Essae Vaishnavi Solitaire": { "id": "B1", "division": { "SS": { "id": "D1", "dept":{ "Semicon":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "RND":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "Mobile":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } }, "TTEC": { "id": "D2", "dept":{ "TTEC-AL":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-SL":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-DL":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "TTEC-CI":{ "id":"DEP4", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } } } }, "Fortune Summit": { "id": "B2", "division": { "TMSC": { "id": "D1", "dept":{ "Medical":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "RND":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "Imaging":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } }, "tmc": { "id": "D2", "dept":{ "tmc-1":{ "id":"DEP1", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "tmc-2":{ "id":"DEP2", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } }, "tmc-3":{ "id":"DEP3", "floor":{"0":"0", "1":"1", "2":"2", "3":"3", "4":"4", "5":"5", "6":"6" } } } } } } } } #Create and configure logger logging.basicConfig(filename="server.log", format='%(asctime)s %(message)s', filemode='a') #Creating an object logger=logging.getLogger() #Setting the threshold of logger to DEBUG logger.setLevel(logging.DEBUG) import pymongo uri = "mongodb://218ffa09-0ee0-4-231-b9ee:zTV4cwDG0vM49J2GFsw72JzwOD79Bv3dPU8fbVLb5pbh3p0CmTBYcvhrFKTjtl1s7hgYSfRbMOrsVve6hfvhag==@218ffa09-0ee0-4-231-b9ee.documents.azure.com:10255/?ssl=true&replicaSet=globaldb" client = pymongo.MongoClient(uri) print("Obtained the client") mydb = client.test #Clear DB only for testing
[ 6738, 33245, 1330, 2750, 4879, 9399, 198, 6738, 33245, 1330, 10903, 9399, 198, 11748, 33918, 198, 6738, 275, 1559, 13, 67, 4679, 69, 1330, 20137, 8134, 198, 11748, 4818, 8079, 198, 6738, 275, 1559, 1330, 33918, 62, 22602, 198, 11748, 18...
1.188848
6,492
# python3 # coding=utf-8 # Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for dags.bq_to_cm_dag.""" import unittest from airflow.contrib.hooks import bigquery_hook from airflow.models import baseoperator from airflow.models import dag from airflow.models import variable import mock from gps_building_blocks.cloud.utils import cloud_auth from dags import bq_to_cm_dag from plugins.pipeline_plugins.hooks import monitoring_hook _DAG_NAME = bq_to_cm_dag._DAG_NAME AIRFLOW_VARIABLES = { 'dag_name': _DAG_NAME, f'{_DAG_NAME}_schedule': '@once', f'{_DAG_NAME}_retries': 0, f'{_DAG_NAME}_retry_delay': 3, f'{_DAG_NAME}_is_retry': True, f'{_DAG_NAME}_is_run': True, f'{_DAG_NAME}_enable_run_report': False, f'{_DAG_NAME}_enable_monitoring': True, f'{_DAG_NAME}_enable_monitoring_cleanup': False, 'monitoring_data_days_to_live': 50, 'monitoring_dataset': 'test_monitoring_dataset', 'monitoring_table': 'test_monitoring_table', 'monitoring_bq_conn_id': 'test_monitoring_conn', 'bq_dataset_id': 'test_dataset', 'bq_table_id': 'test_table', 'cm_profile_id': 'cm_profile_id', 'cm_service_account': 'cm_service_account' } if __name__ == '__main__': unittest.main()
[ 2, 21015, 18, 198, 2, 19617, 28, 40477, 12, 23, 198, 2, 15069, 12131, 3012, 11419, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2...
2.631893
671
from django.core.management.base import BaseCommand import logging import re from talentmap_api.common.xml_helpers import XMLloader, strip_extra_spaces, parse_boolean, parse_date, get_nested_tag from talentmap_api.language.models import Language, Proficiency from talentmap_api.position.models import Grade, Skill, Position, CapsuleDescription, SkillCone from talentmap_api.organization.models import Organization, Post, TourOfDuty, Location, Country
[ 6738, 42625, 14208, 13, 7295, 13, 27604, 13, 8692, 1330, 7308, 21575, 198, 198, 11748, 18931, 198, 11748, 302, 198, 198, 6738, 7401, 8899, 62, 15042, 13, 11321, 13, 19875, 62, 16794, 364, 1330, 23735, 29356, 11, 10283, 62, 26086, 62, ...
3.612403
129
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Unit tests for gluon.recfile """ import unittest import os import shutil import uuid from .fix_path import fix_sys_path fix_sys_path(__file__) from gluon import recfile if __name__ == '__main__': unittest.main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 37811, 198, 220, 220, 220, 11801, 5254, 329, 1278, 84, 261, 13, 8344, 7753, 198, 37811, 198, 11748, 555, 715, 39...
2.442478
113
import json import os import subprocess from dotenv import load_dotenv from subprocess import check_output, Popen, PIPE load_dotenv() # Accessing variables. CLIENT_ID = os.environ.get('CLIENT_ID') CLIENT_SECRET = os.environ.get('CLIENT_SECRET') USERNAME = os.environ.get('USERNAME') BUCKET_NAME = os.environ.get('BUCKET_NAME') lambda_functions = get_lambda_functions() for lambda_function in lambda_functions: function_name = lambda_function['FunctionName'] subprocess.run([ "aws", "lambda", "update-function-configuration", "--function-name", f"{function_name}", "--environment", f"Variables={{CLIENT_ID={CLIENT_ID},CLIENT_SECRET={CLIENT_SECRET},USERNAME={USERNAME},BUCKET_NAME={BUCKET_NAME}}}" ])
[ 11748, 33918, 198, 11748, 28686, 198, 11748, 850, 14681, 198, 198, 6738, 16605, 24330, 1330, 3440, 62, 26518, 24330, 198, 6738, 850, 14681, 1330, 2198, 62, 22915, 11, 8099, 268, 11, 350, 4061, 36, 198, 198, 2220, 62, 26518, 24330, 3419,...
2.659498
279
# -*- coding: utf-8 -*- ########################################################################### ## Python code generated with wxFormBuilder (version Aug 8 2018) ## http://www.wxformbuilder.org/ ## ## PLEASE DO *NOT* EDIT THIS FILE! ########################################################################### import wx import wx.xrc ########################################################################### ## Class MyFrame1 ###########################################################################
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 220, 198, 198, 29113, 29113, 7804, 21017, 198, 2235, 11361, 2438, 7560, 351, 266, 87, 8479, 32875, 357, 9641, 2447, 220, 807, 2864, 8, 198, 2235, 2638, 1378, 2503, 13, 49345,...
5.068627
102
""" dependencies.contrib.celery --------------------------- This module implements injectable Celery task. :copyright: (c) 2016-2020 by dry-python team. :license: BSD, see LICENSE for more details. """ from _dependencies.contrib.celery import shared_task from _dependencies.contrib.celery import task __all__ = ["shared_task", "task"]
[ 37811, 198, 45841, 3976, 13, 3642, 822, 13, 7015, 88, 198, 22369, 6329, 198, 198, 1212, 8265, 23986, 8677, 540, 15248, 1924, 4876, 13, 198, 198, 25, 22163, 4766, 25, 357, 66, 8, 1584, 12, 42334, 416, 5894, 12, 29412, 1074, 13, 198, ...
3.356436
101
"""Miscellaneous utility functions.""" from functools import reduce from PIL import Image import numpy as np from matplotlib.colors import rgb_to_hsv, hsv_to_rgb def compose(*funcs): """Compose arbitrarily many functions, evaluated left to right. Reference: https://mathieularose.com/function-composition-in-python/ """ # return lambda x: reduce(lambda v, f: f(v), funcs, x) if funcs: return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs) else: raise ValueError('Composition of empty sequence not supported.') def letterbox_image(image, size): '''resize image with unchanged aspect ratio using padding''' iw, ih = image.size w, h = size scale = min(w/iw, h/ih) nw = int(iw*scale) nh = int(ih*scale) image = image.resize((nw,nh), Image.BICUBIC) new_image = Image.new('RGB', size, (128,128,128)) new_image.paste(image, ((w-nw)//2, (h-nh)//2)) return new_image def get_random_data(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True): '''random preprocessing for real-time data augmentation''' line = annotation_line.split() image = Image.open(line[0]) iw, ih = image.size h, w = input_shape box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]]) if not random: # resize image scale = min(w/iw, h/ih) nw = int(iw*scale) nh = int(ih*scale) dx = (w-nw)//2 dy = (h-nh)//2 image_data=0 if proc_img: image = image.resize((nw,nh), Image.BICUBIC) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(image, (dx, dy)) image_data = np.array(new_image)/255. # correct boxes box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) if len(box)>max_boxes: box = box[:max_boxes] box[:, [0,2]] = box[:, [0,2]]*scale + dx box[:, [1,3]] = box[:, [1,3]]*scale + dy box_data[:len(box)] = box return image_data, box_data # resize image new_ar = w/h * rand(1-jitter,1+jitter)/rand(1-jitter,1+jitter) scale = rand(.25, 2) if new_ar < 1: nh = int(scale*h) nw = int(nh*new_ar) else: nw = int(scale*w) nh = int(nw/new_ar) image = image.resize((nw,nh), Image.BICUBIC) # place image dx = int(rand(0, w-nw)) dy = int(rand(0, h-nh)) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(image, (dx, dy)) image = new_image # flip image or not flip = rand()<.5 if flip: image = image.transpose(Image.FLIP_LEFT_RIGHT) # distort image hue = rand(-hue, hue) sat = rand(1, sat) if rand()<.5 else 1/rand(1, sat) val = rand(1, val) if rand()<.5 else 1/rand(1, val) x = rgb_to_hsv(np.array(image)/255.) x[..., 0] += hue x[..., 0][x[..., 0]>1] -= 1 x[..., 0][x[..., 0]<0] += 1 x[..., 1] *= sat x[..., 2] *= val x[x>1] = 1 x[x<0] = 0 image_data = hsv_to_rgb(x) # numpy array, 0 to 1 # correct boxes box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) box[:, [0,2]] = box[:, [0,2]]*nw/iw + dx box[:, [1,3]] = box[:, [1,3]]*nh/ih + dy if flip: box[:, [0,2]] = w - box[:, [2,0]] box[:, 0:2][box[:, 0:2]<0] = 0 box[:, 2][box[:, 2]>w] = w box[:, 3][box[:, 3]>h] = h box_w = box[:, 2] - box[:, 0] box_h = box[:, 3] - box[:, 1] box = box[np.logical_and(box_w>1, box_h>1)] # discard invalid box if len(box)>max_boxes: box = box[:max_boxes] box_data[:len(box)] = box return image_data, box_data def get_random_data2(annotation_line, input_shape, random=True, max_boxes=20, jitter=.3, hue=.1, sat=1.5, val=1.5, proc_img=True): '''random preprocessing for real-time data augmentation''' line = annotation_line.split() image = Image.open(line[0]) w, h = image.size #13 14 dx, dy = input_shape box = np.array([np.array(list(map(int,box.split(',')))) for box in line[1:]]) x_min = w x_max = 0 y_min = h y_max = 0 for bbox in box: x_min = min(x_min, bbox[0]) y_min = min(y_min, bbox[1]) x_max = max(x_max, bbox[2]) y_max = max(y_max, bbox[3]) name = bbox[4] # d_to_left = x_min d_to_right = w - x_max d_to_top = y_min d_to_bottom = h - y_max # crop_x_min = int(x_min - rand(0, d_to_left)) crop_y_min = int(y_min - rand(0, d_to_top)) crop_x_max = int(x_max + rand(0, d_to_right)) crop_y_max = int(y_max + rand(0, d_to_bottom)) # crop_x_min = max(0, crop_x_min) crop_y_min = max(0, crop_y_min) crop_x_max = min(w, crop_x_max) crop_y_max = min(h, crop_y_max) cropped = image.crop((crop_x_min, crop_y_min, crop_x_max, crop_y_max)) # (left, upper, right, lower) new_image = Image.new('RGB', (w,h), (128,128,128)) new_image.paste(cropped, (dx, dy)) image_data = np.array(new_image)/255. box_data = np.zeros((max_boxes,5)) if len(box)>0: np.random.shuffle(box) if len(box)>max_boxes: box = box[:max_boxes] box[:,0] = box[:,0]-crop_y_min box[:,1] = box[:,1]-crop_y_min box[:,2] = box[:,2]-crop_x_min box[:,3] = box[:,3]-crop_y_min box_data[:len(box)] = box return image_data, box_data
[ 37811, 31281, 25673, 10361, 5499, 526, 15931, 198, 198, 6738, 1257, 310, 10141, 1330, 4646, 198, 198, 6738, 350, 4146, 1330, 7412, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 2603, 29487, 8019, 13, 4033, 669, 1330, 46140, 62, 1462, 6...
1.989158
2,767
import logging import pathlib logging.basicConfig(level=logging.INFO) # Dirs ROOT_DIR = pathlib.Path(__file__).parent.absolute() DUMP_DIR = ROOT_DIR / 'dumps'
[ 11748, 18931, 198, 11748, 3108, 8019, 198, 198, 6404, 2667, 13, 35487, 16934, 7, 5715, 28, 6404, 2667, 13, 10778, 8, 198, 198, 2, 360, 17062, 198, 13252, 2394, 62, 34720, 796, 3108, 8019, 13, 15235, 7, 834, 7753, 834, 737, 8000, 13,...
2.639344
61
import re from pyquery import PyQuery as pq from .. import utils from .constants import RANKINGS_SCHEME, RANKINGS_URL from six.moves.urllib.error import HTTPError
[ 11748, 302, 198, 6738, 12972, 22766, 1330, 9485, 20746, 355, 279, 80, 198, 6738, 11485, 1330, 3384, 4487, 198, 6738, 764, 9979, 1187, 1330, 371, 15154, 20754, 62, 50, 3398, 3620, 36, 11, 371, 15154, 20754, 62, 21886, 198, 6738, 2237, ...
3.037037
54
import importlib from ditk import logging from collections import OrderedDict from functools import wraps import ding ''' Overview: `hpc_wrapper` is the wrapper for functions which are supported by hpc. If a function is wrapped by it, we will search for its hpc type and return the function implemented by hpc. We will use the following code as a sample to introduce `hpc_wrapper`: ``` @hpc_wrapper(shape_fn=shape_fn_dntd, namedtuple_data=True, include_args=[0,1,2,3], include_kwargs=['data', 'gamma', 'v_min', 'v_max'], is_cls_method=False) def dist_nstep_td_error( data: namedtuple, gamma: float, v_min: float, v_max: float, n_atom: int, nstep: int = 1, ) -> torch.Tensor: ... ``` Parameters: - shape_fn (:obj:`function`): a function which return the shape needed by hpc function. In fact, it returns all args that the hpc function needs. - nametuple_data (:obj:`bool`): If True, when hpc function is called, it will be called as hpc_function(*nametuple). If False, nametuple data will remain its `nametuple` type. - include_args (:obj:`list`): a list of index of the args need to be set in hpc function. As shown in the sample, include_args=[0,1,2,3], which means `data`, `gamma`, `v_min` and `v_max` will be set in hpc function. - include_kwargs (:obj:`list`): a list of key of the kwargs need to be set in hpc function. As shown in the sample, include_kwargs=['data', 'gamma', 'v_min', 'v_max'], which means `data`, `gamma`, `v_min` and `v_max` will be set in hpc function. - is_cls_method (:obj:`bool`): If True, it means the function we wrap is a method of a class. `self` will be put into args. We will get rid of `self` in args. Besides, we will use its classname as its fn_name. If False, it means the function is a simple method. Q&A: - Q: Is `include_args` and `include_kwargs` need to be set at the same time? - A: Yes. `include_args` and `include_kwargs` can deal with all type of input, such as (data, gamma, v_min=v_min, v_max=v_max) and (data, gamma, v_min, v_max). - Q: What is `hpc_fns`? - A: Here we show a normal `hpc_fns`: ``` hpc_fns = { 'fn_name1': { 'runtime_name1': hpc_fn1, 'runtime_name2': hpc_fn2, ... }, ... } ``` Besides, `per_fn_limit` means the max length of `hpc_fns[fn_name]`. When new function comes, the oldest function will be popped from `hpc_fns[fn_name]`. ''' hpc_fns = {} per_fn_limit = 3
[ 11748, 1330, 8019, 198, 6738, 288, 270, 74, 1330, 18931, 198, 6738, 17268, 1330, 14230, 1068, 35, 713, 198, 6738, 1257, 310, 10141, 1330, 27521, 198, 11748, 44852, 198, 7061, 6, 198, 29064, 25, 198, 220, 220, 220, 4600, 71, 14751, 62,...
2.316195
1,167
#Return the count of int(s) in passed array.
[ 2, 13615, 262, 954, 286, 493, 7, 82, 8, 287, 3804, 7177, 13, 220 ]
3.214286
14
import re from zlib import crc32 from ..utils import snake_to_camel_case CORE_TYPES = ( 0xbc799737, # boolFalse#bc799737 = Bool; 0x997275b5, # boolTrue#997275b5 = Bool; 0x3fedd339, # true#3fedd339 = True; 0x1cb5c415, # vector#1cb5c415 {t:Type} # [ t ] = Vector t; ) # https://github.com/telegramdesktop/tdesktop/blob/4bf66cb6e93f3965b40084771b595e93d0b11bcd/Telegram/SourceFiles/codegen/scheme/codegen_scheme.py#L57-L62 WHITELISTED_MISMATCHING_IDS = { # 0 represents any layer 0: {'ipPortSecret', 'accessPointRule', 'help.configSimple'}, 77: {'channel'}, 78: {'channel'} } def _from_line(line, is_function, layer): match = re.match( r'^([\w.]+)' # 'name' r'(?:#([0-9a-fA-F]+))?' # '#optionalcode' r'(?:\s{?\w+:[\w\d<>#.?!]+}?)*' # '{args:.0?type}' r'\s=\s' # ' = ' r'([\w\d<>#.?]+);$', # '<result.type>;' line ) if match is None: # Probably "vector#1cb5c415 {t:Type} # [ t ] = Vector t;" raise ValueError('Cannot parse TLObject {}'.format(line)) args_match = re.findall( r'({)?' r'(\w+)' r':' r'([\w\d<>#.?!]+)' r'}?', line ) return TLObject( fullname=match.group(1), object_id=match.group(2), result=match.group(3), is_function=is_function, layer=layer, args=[TLArg(name, arg_type, brace != '') for brace, name, arg_type in args_match] ) def parse_tl(file_path, layer, ignore_core=False): """This method yields TLObjects from a given .tl file.""" with open(file_path, encoding='utf-8') as file: is_function = False for line in file: comment_index = line.find('//') if comment_index != -1: line = line[:comment_index] line = line.strip() if not line: continue match = re.match('---(\w+)---', line) if match: following_types = match.group(1) is_function = following_types == 'functions' continue try: result = _from_line(line, is_function, layer=layer) if not ignore_core or result.id not in CORE_TYPES: yield result except ValueError as e: if 'vector#1cb5c415' not in str(e): raise def find_layer(file_path): """Finds the layer used on the specified scheme.tl file.""" layer_regex = re.compile(r'^//\s*LAYER\s*(\d+)$') with open(file_path, encoding='utf-8') as file: for line in file: match = layer_regex.match(line) if match: return int(match.group(1))
[ 11748, 302, 198, 6738, 1976, 8019, 1330, 1067, 66, 2624, 198, 198, 6738, 11485, 26791, 1330, 17522, 62, 1462, 62, 66, 17983, 62, 7442, 198, 198, 34, 6965, 62, 9936, 47, 1546, 796, 357, 198, 220, 220, 220, 657, 87, 15630, 22, 39647, ...
1.88822
1,494
"Actions for compiling resx files" load( "@io_bazel_rules_dotnet//dotnet/private:providers.bzl", "DotnetResourceInfo", ) def emit_resx_core( dotnet, name = "", src = None, identifier = None, out = None, customresgen = None): """The function adds an action that compiles a single .resx file into .resources file. Returns [DotnetResourceInfo](api.md#dotnetresourceinfo). Args: dotnet: [DotnetContextInfo](api.md#dotnetcontextinfo). name: name of the file to generate. src: The .resx source file that is transformed into .resources file. Only `.resx` files are permitted. identifier: The logical name for the resource; the name that is used to load the resource. The default is the basename of the file name (no subfolder). out: An alternative name of the output file (if name should not be used). customresgen: custom resgen program to use. Returns: DotnetResourceInfo: [DotnetResourceInfo](api.md#dotnetresourceinfo). """ if name == "" and out == None: fail("either name or out must be set") if not out: result = dotnet.actions.declare_file(name + ".resources") else: result = dotnet.actions.declare_file(out) args = _make_runner_arglist(dotnet, src, result, customresgen.files_to_run.executable.path) # We use the command to extrace shell path and force runfiles creation resolve = dotnet._ctx.resolve_tools(tools = [customresgen]) inputs = src.files.to_list() if type(src) == "Target" else [src] dotnet.actions.run( inputs = inputs + resolve[0].to_list(), tools = customresgen.default_runfiles.files, outputs = [result], executable = customresgen.files_to_run, arguments = [args], env = {"RUNFILES_MANIFEST_FILE": customresgen.files_to_run.runfiles_manifest.path}, mnemonic = "CoreResxCompile", input_manifests = resolve[1], progress_message = ( "Compiling resoources" + dotnet.label.package + ":" + dotnet.label.name ), ) return DotnetResourceInfo( name = name, result = result, identifier = identifier, )
[ 1, 32, 2733, 329, 33393, 581, 87, 3696, 1, 198, 198, 2220, 7, 198, 220, 220, 220, 44212, 952, 62, 65, 41319, 62, 38785, 62, 26518, 3262, 1003, 26518, 3262, 14, 19734, 25, 15234, 4157, 13, 65, 48274, 1600, 198, 220, 220, 220, 366, ...
2.545558
878
# Owner(s): ["oncall: jit"] import torch import os import sys from torch.testing._internal.jit_utils import JitTestCase # Make the helper files in test/ importable pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) sys.path.append(pytorch_test_dir) if __name__ == '__main__': raise RuntimeError("This test file is not meant to be run directly, use:\n\n" "\tpython test/test_jit.py TESTNAME\n\n" "instead.")
[ 2, 23853, 7, 82, 2599, 14631, 261, 13345, 25, 474, 270, 8973, 198, 198, 11748, 28034, 198, 11748, 28686, 198, 11748, 25064, 198, 6738, 28034, 13, 33407, 13557, 32538, 13, 45051, 62, 26791, 1330, 449, 270, 14402, 20448, 198, 198, 2, 68...
2.408867
203
import gym import pix_sample_arena import time import pybullet as p import pybullet_data import cv2 if __name__ == "__main__": env = gym.make("pix_sample_arena-v0") x = 0 while True: p.stepSimulation() time.sleep(100)
[ 11748, 11550, 198, 11748, 279, 844, 62, 39873, 62, 533, 2616, 198, 11748, 640, 198, 11748, 12972, 15065, 1616, 355, 279, 198, 11748, 12972, 15065, 1616, 62, 7890, 198, 11748, 269, 85, 17, 198, 198, 361, 11593, 3672, 834, 6624, 366, 83...
2.326923
104
from datetime import time import pytest from i3_battery_block_vgg.timeparser import __parse_time_manually from i3_battery_block_vgg.timeparser import parse_time
[ 6738, 4818, 8079, 1330, 640, 198, 198, 11748, 12972, 9288, 198, 198, 6738, 1312, 18, 62, 65, 16296, 62, 9967, 62, 85, 1130, 13, 2435, 48610, 1330, 11593, 29572, 62, 2435, 62, 805, 935, 198, 6738, 1312, 18, 62, 65, 16296, 62, 9967, ...
3
55
# RUN: %PYTHON% %s 2>&1 | FileCheck %s from __future__ import annotations import mlir import pycde from pycde import (Input, Output, Parameter, module, externmodule, generator, types, dim) from circt.dialects import comb, hw class Coefficients: def __init__(self, coeff): self.coeff = coeff poly = Polynomial() poly.graph() # CHECK-LABEL: digraph "top" # CHECK: label="top"; # CHECK: [shape=record,label="{hw.constant\ni32\n\nvalue: 23 : i32}"]; poly.print() # CHECK-LABEL: hw.module @top() -> (%y: i32) # CHECK: [[REG0:%.+]] = "pycde.PolynomialCompute"(%c23_i32) {instanceName = "example", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG1:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [62, 42, 6]}, module_name = "PolyComputeForCoeff_62_42_6", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG2:%.+]] = "pycde.PolynomialCompute"([[REG0]]) {instanceName = "example2", opNames = ["x"], parameters = {coefficients = {coeff = [1, 2, 3, 4, 5]}, module_name = "PolyComputeForCoeff_1_2_3_4_5", unused_parameter = true}, resultNames = ["y"]} : (i32) -> i32 # CHECK: [[REG3:%.+]] = "pycde.CoolPolynomialCompute"(%c23_i32) {coefficients = [4, 42], opNames = ["x"], parameters = {}, resultNames = ["y"]} : (i32) -> i32 # CHECK: hw.output [[REG0]] : i32 poly.generate() poly.print() # CHECK-LABEL: hw.module @top # CHECK: %example.y = hw.instance "example" @PolyComputeForCoeff_62_42_6(%c23_i32) {parameters = {}} : (i32) -> i32 # CHECK: %example2.y = hw.instance "example2" @PolyComputeForCoeff_62_42_6(%example.y) {parameters = {}} : (i32) -> i32 # CHECK: %example2.y_0 = hw.instance "example2" @PolyComputeForCoeff_1_2_3_4_5(%example.y) {parameters = {}} : (i32) -> i32 # CHECK: %pycde.CoolPolynomialCompute.y = hw.instance "pycde.CoolPolynomialCompute" @supercooldevice(%c23_i32) {coefficients = [4, 42], parameters = {}} : (i32) -> i32 # CHECK-LABEL: hw.module @PolyComputeForCoeff_62_42_6(%x: i32) -> (%y: i32) # CHECK: hw.constant 62 # CHECK: hw.constant 42 # CHECK: hw.constant 6 # CHECK-LABEL: hw.module @PolyComputeForCoeff_1_2_3_4_5(%x: i32) -> (%y: i32) # CHECK: hw.constant 1 # CHECK: hw.constant 2 # CHECK: hw.constant 3 # CHECK: hw.constant 4 # CHECK: hw.constant 5 # CHECK-NOT: hw.module @pycde.PolynomialCompute print("\n\n=== Verilog ===") # CHECK-LABEL: === Verilog === poly.print_verilog() # CHECK-LABEL: module PolyComputeForCoeff_62_42_6( # CHECK: input [31:0] x, # CHECK: output [31:0] y);
[ 2, 32494, 25, 4064, 47, 56, 4221, 1340, 4, 4064, 82, 362, 29, 5, 16, 930, 9220, 9787, 4064, 82, 198, 198, 6738, 11593, 37443, 834, 1330, 37647, 198, 198, 11748, 25962, 343, 198, 198, 11748, 12972, 66, 2934, 198, 6738, 12972, 66, 2...
2.288721
1,188
#! /usr/bin/env python import arc import sys import os # wait for all the background threads to finish before we destroy the objects they may use import atexit # arc.Logger.getRootLogger().addDestination(arc.LogStream(sys.stderr)) # arc.Logger.getRootLogger().setThreshold(arc.DEBUG) # run the example example()
[ 2, 0, 1220, 14629, 14, 8800, 14, 24330, 21015, 198, 11748, 10389, 198, 11748, 25064, 198, 11748, 28686, 628, 198, 2, 4043, 329, 477, 262, 4469, 14390, 284, 5461, 878, 356, 4117, 262, 5563, 484, 743, 779, 198, 11748, 379, 37023, 198, ...
3.191919
99
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Domain objects and functions that manage rights for various user actions.""" import logging from constants import constants from core.domain import activity_services from core.domain import role_services from core.domain import subscription_services from core.domain import user_services from core.platform import models import feconf import utils current_user_services = models.Registry.import_current_user_services() (collection_models, exp_models,) = models.Registry.import_models([ models.NAMES.collection, models.NAMES.exploration ]) # IMPORTANT: Ensure that all changes to how these cmds are interpreted preserve # backward-compatibility with previous exploration snapshots in the datastore. # Do not modify the definitions of CMD keys that already exist. CMD_CREATE_NEW = 'create_new' CMD_CHANGE_ROLE = 'change_role' CMD_CHANGE_EXPLORATION_STATUS = 'change_exploration_status' CMD_CHANGE_COLLECTION_STATUS = 'change_collection_status' CMD_CHANGE_PRIVATE_VIEWABILITY = 'change_private_viewability' CMD_RELEASE_OWNERSHIP = 'release_ownership' CMD_UPDATE_FIRST_PUBLISHED_MSEC = 'update_first_published_msec' ACTIVITY_STATUS_PRIVATE = constants.ACTIVITY_STATUS_PRIVATE ACTIVITY_STATUS_PUBLIC = constants.ACTIVITY_STATUS_PUBLIC ROLE_OWNER = 'owner' ROLE_EDITOR = 'editor' ROLE_TRANSLATOR = 'translator' ROLE_VIEWER = 'viewer' ROLE_NONE = 'none' ROLE_ADMIN = 'admin' ROLE_MODERATOR = 'moderator' def get_activity_rights_from_model(activity_rights_model, activity_type): """Constructs an ActivityRights object from the given activity rights model. Args: activity_rights_model: ActivityRightsModel. Activity rights from the datastore. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Returns: ActivityRights. The rights object created from the model. """ return ActivityRights( activity_rights_model.id, activity_rights_model.owner_ids, activity_rights_model.editor_ids, activity_rights_model.translator_ids, activity_rights_model.viewer_ids, community_owned=activity_rights_model.community_owned, cloned_from=( activity_rights_model.cloned_from if activity_type == constants.ACTIVITY_TYPE_EXPLORATION else None), status=activity_rights_model.status, viewable_if_private=activity_rights_model.viewable_if_private, first_published_msec=activity_rights_model.first_published_msec ) def _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds): """Saves an ExplorationRights or CollectionRights domain object to the datastore. Args: committer_id: str. ID of the committer. activity_rights: ActivityRights. The rights object for the given activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION commit_message: str. Descriptive message for the commit. commit_cmds: list(dict). A list of commands describing what kind of commit was done. """ activity_rights.validate() if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: model_cls = exp_models.ExplorationRightsModel elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: model_cls = collection_models.CollectionRightsModel model = model_cls.get(activity_rights.id, strict=False) model.owner_ids = activity_rights.owner_ids model.editor_ids = activity_rights.editor_ids model.viewer_ids = activity_rights.viewer_ids model.translator_ids = activity_rights.translator_ids model.community_owned = activity_rights.community_owned model.status = activity_rights.status model.viewable_if_private = activity_rights.viewable_if_private model.first_published_msec = activity_rights.first_published_msec model.commit(committer_id, commit_message, commit_cmds) def _update_exploration_summary(activity_rights): """Updates the exploration summary for the activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_rights: ActivityRights. The rights object for the given activity. """ # TODO(msl): get rid of inline imports by refactoring code. from core.domain import exp_services exp_services.update_exploration_summary( activity_rights.id, None) def _update_collection_summary(activity_rights): """Updates the collection summary for the given activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_rights: ActivityRights. The rights object for the given activity. """ from core.domain import collection_services collection_services.update_collection_summary( activity_rights.id, None) def _update_activity_summary(activity_type, activity_rights): """Updates the activity summary for the given activity associated with the given rights object. The ID of rights object is the same as the ID of associated activity. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_rights: ActivityRights. The rights object for the given activity. """ if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: _update_exploration_summary(activity_rights) elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: _update_collection_summary(activity_rights) def update_activity_first_published_msec( activity_type, activity_id, first_published_msec): """Updates the first_published_msec field for the given activity. The caller is responsible for ensuring that this value is not already set before updating it. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_id: str. ID of the activity. first_published_msec: float. First publication time in milliseconds since the Epoch. """ activity_rights = _get_activity_rights(activity_type, activity_id) commit_cmds = [{ 'cmd': CMD_UPDATE_FIRST_PUBLISHED_MSEC, 'old_first_published_msec': activity_rights.first_published_msec, 'new_first_published_msec': first_published_msec }] activity_rights.first_published_msec = first_published_msec _save_activity_rights( feconf.SYSTEM_COMMITTER_ID, activity_rights, activity_type, 'set first published time in msec', commit_cmds) def create_new_exploration_rights(exploration_id, committer_id): """Creates a new exploration rights object and saves it to the datastore. Subscribes the committer to the new exploration. Args: exploration_id: str. ID of the exploration. committer_id: str. ID of the committer. """ exploration_rights = ActivityRights( exploration_id, [committer_id], [], [], []) commit_cmds = [{'cmd': CMD_CREATE_NEW}] exp_models.ExplorationRightsModel( id=exploration_rights.id, owner_ids=exploration_rights.owner_ids, editor_ids=exploration_rights.editor_ids, translator_ids=exploration_rights.translator_ids, viewer_ids=exploration_rights.viewer_ids, community_owned=exploration_rights.community_owned, status=exploration_rights.status, viewable_if_private=exploration_rights.viewable_if_private, first_published_msec=exploration_rights.first_published_msec, ).commit(committer_id, 'Created new exploration', commit_cmds) subscription_services.subscribe_to_exploration( committer_id, exploration_id) def get_exploration_rights(exploration_id, strict=True): """Retrieves the rights for this exploration from the datastore. Args: exploration_id: str. ID of the exploration. strict: bool. Whether to raise an error if there is no exploration matching the given ID. Returns: ActivityRights. The rights object for the given exploration. Raises: EntityNotFoundError. The exploration with ID exploration_id was not found in the datastore. """ model = exp_models.ExplorationRightsModel.get( exploration_id, strict=strict) if model is None: return None return get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_EXPLORATION) def get_multiple_exploration_rights_by_ids(exp_ids): """Returns a list of ActivityRights objects for given exploration ids. Args: exp_ids: list(str). List of exploration ids. Returns: list(ActivityRights or None). List of rights object containing ActivityRights object for existing exploration or None. """ exp_rights_models = exp_models.ExplorationRightsModel.get_multi( exp_ids) exp_models_list = [] for model in exp_rights_models: if model is None: exp_models_list.append(None) else: exp_models_list.append( get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_EXPLORATION)) return exp_models_list def is_exploration_private(exploration_id): """Returns whether exploration is private. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is private or not. """ exploration_rights = get_exploration_rights(exploration_id) return exploration_rights.status == ACTIVITY_STATUS_PRIVATE def is_exploration_public(exploration_id): """Returns whether exploration is public. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is public. """ exploration_rights = get_exploration_rights(exploration_id) return exploration_rights.status == ACTIVITY_STATUS_PUBLIC def is_exploration_cloned(exploration_id): """Returns whether the exploration is a clone of another exploration. Args: exploration_id: str. ID of the exploration. Returns: bool. Whether the exploration is a clone of another exploration. """ exploration_rights = get_exploration_rights(exploration_id) return bool(exploration_rights.cloned_from) def create_new_collection_rights(collection_id, committer_id): """Creates a new collection rights object and saves it to the datastore. Subscribes the committer to the new collection. Args: collection_id: str. ID of the collection. committer_id: str. ID of the committer. """ collection_rights = ActivityRights( collection_id, [committer_id], [], [], []) commit_cmds = [{'cmd': CMD_CREATE_NEW}] collection_models.CollectionRightsModel( id=collection_rights.id, owner_ids=collection_rights.owner_ids, editor_ids=collection_rights.editor_ids, translator_ids=collection_rights.translator_ids, viewer_ids=collection_rights.viewer_ids, community_owned=collection_rights.community_owned, status=collection_rights.status, viewable_if_private=collection_rights.viewable_if_private, first_published_msec=collection_rights.first_published_msec ).commit(committer_id, 'Created new collection', commit_cmds) subscription_services.subscribe_to_collection(committer_id, collection_id) def get_collection_rights(collection_id, strict=True): """Retrieves the rights for this collection from the datastore. Args: collection_id: str. ID of the collection. strict: bool. Whether to raise an error if ID is not found. Returns: ActivityRights. The rights object for the collection. Raises: EntityNotFoundError. The collection with ID collection_id is not found in the datastore. """ model = collection_models.CollectionRightsModel.get( collection_id, strict=strict) if model is None: return None return get_activity_rights_from_model( model, constants.ACTIVITY_TYPE_COLLECTION) def get_collection_owner_names(collection_id): """Retrieves the owners for this collection from the datastore. Args: collection_id: str. ID of the collection. Returns: list(str). Human-readable usernames (or truncated email addresses) of owners for this collection. """ collection_rights = get_collection_rights(collection_id) return user_services.get_human_readable_user_ids( collection_rights.owner_ids) def is_collection_private(collection_id): """Returns whether the collection is private. Args: collection_id: str. ID of the collection. Returns: bool. Whether the collection is private. """ collection_rights = get_collection_rights(collection_id) return collection_rights.status == ACTIVITY_STATUS_PRIVATE def is_collection_public(collection_id): """Returns whether the collection is public. Args: collection_id: str. ID of the collection. Returns: bool. Whether the collection is public. """ collection_rights = get_collection_rights(collection_id) return collection_rights.status == ACTIVITY_STATUS_PUBLIC def _get_activity_rights(activity_type, activity_id): """Retrieves the rights object for the given activity based on its type. Args: activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION activity_id: str. ID of the activity. Returns: ActivityRights. The rights object associated with the given activity. Raises: Exception. activity_type provided is unknown. """ if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: return get_exploration_rights(activity_id, strict=False) elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: return get_collection_rights(activity_id, strict=False) else: raise Exception( 'Cannot get activity rights for unknown activity type: %s' % ( activity_type)) def check_can_access_activity(user, activity_rights): """Checks whether the user can access given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: AcitivityRights or None. Rights object for the given activity. Returns: bool. Whether the given activity can be accessed by the given user. """ if activity_rights is None: return False elif activity_rights.is_published(): return bool( role_services.ACTION_PLAY_ANY_PUBLIC_ACTIVITY in user.actions) elif activity_rights.is_private(): return bool( (role_services.ACTION_PLAY_ANY_PRIVATE_ACTIVITY in user.actions) or activity_rights.is_viewer(user.user_id) or activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id) or activity_rights.is_translator(user.user_id) or activity_rights.viewable_if_private) def check_can_edit_activity(user, activity_rights): """Checks whether the user can edit given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the given user can edit this activity. """ if activity_rights is None: return False if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions: return False if (activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id)): return True if (activity_rights.community_owned or (role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)): return True if (activity_rights.is_published() and (role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_translate_activity(user, activity_rights): """Checks whether the user can translate given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the given user can translate this activity. """ if activity_rights is None: return False if role_services.ACTION_EDIT_OWNED_ACTIVITY not in user.actions: return False if (activity_rights.is_owner(user.user_id) or activity_rights.is_editor(user.user_id) or activity_rights.is_translator(user.user_id)): return True if (activity_rights.community_owned or (role_services.ACTION_EDIT_ANY_ACTIVITY in user.actions)): return True if (activity_rights.is_published() and (role_services.ACTION_EDIT_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_delete_activity(user, activity_rights): """Checks whether the user can delete given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can delete given activity. """ if activity_rights is None: return False if role_services.ACTION_DELETE_ANY_ACTIVITY in user.actions: return True elif (activity_rights.is_private() and (role_services.ACTION_DELETE_OWNED_PRIVATE_ACTIVITY in user.actions) and activity_rights.is_owner(user.user_id)): return True elif (activity_rights.is_published() and (role_services.ACTION_DELETE_ANY_PUBLIC_ACTIVITY in user.actions)): return True return False def check_can_modify_activity_roles(user, activity_rights): """Checks whether the user can modify roles for given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can modify roles for given activity. """ if activity_rights is None: return False if (activity_rights.community_owned or activity_rights.cloned_from): return False if (role_services.ACTION_MODIFY_ROLES_FOR_ANY_ACTIVITY in user.actions): return True if (role_services.ACTION_MODIFY_ROLES_FOR_OWNED_ACTIVITY in user.actions): if activity_rights.is_owner(user.user_id): return True return False def check_can_release_ownership(user, activity_rights): """Checks whether the user can release ownership for given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can release ownership for given activity. """ if activity_rights is None: return False if activity_rights.is_private(): return False return check_can_modify_activity_roles( user, activity_rights) def check_can_publish_activity(user, activity_rights): """Checks whether the user can publish given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can publish given activity. """ if activity_rights is None: return False if activity_rights.cloned_from: return False if activity_rights.is_published(): return False if role_services.ACTION_PUBLISH_ANY_ACTIVITY in user.actions: return True if role_services.ACTION_PUBLISH_OWNED_ACTIVITY in user.actions: if activity_rights.is_owner(user.user_id): return True return False def check_can_unpublish_activity(user, activity_rights): """Checks whether the user can unpublish given activity. Args: user: UserActionsInfo. Object having user_id, role and actions for given user. activity_rights: ActivityRights or None. Rights object for the given activity. Returns: bool. Whether the user can unpublish given activity. """ if activity_rights is None: return False if activity_rights.community_owned: return False if activity_rights.is_published(): if role_services.ACTION_UNPUBLISH_ANY_PUBLIC_ACTIVITY in user.actions: return True return False def _assign_role( committer, assignee_id, new_role, activity_id, activity_type): """Assigns a new role to the user. Args: committer: UserActionsInfo. UserActionInfo object for the user who is performing the action. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR ROLE_TRANSLATOR ROLE_VIEWER activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to modify a role. Exception. The user already owns the activity. Exception. The user can already edit the activity. Exception. The user can already translate the activity. Exception. The activity is already publicly editable. Exception. The activity is already publicly translatable. Exception. The user can already view the activity. Exception. The activity is already publicly viewable. Exception. The role is invalid. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_modify_activity_roles(committer, activity_rights): logging.error( 'User %s tried to allow user %s to be a(n) %s of activity %s ' 'but was refused permission.' % ( committer_id, assignee_id, new_role, activity_id)) raise Exception( 'UnauthorizedUserException: Could not assign new role.') assignee_username = user_services.get_username(assignee_id) old_role = ROLE_NONE if new_role == ROLE_OWNER: if activity_rights.is_owner(assignee_id): raise Exception('This user already owns this %s.' % activity_type) activity_rights.owner_ids.append(assignee_id) if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER if assignee_id in activity_rights.editor_ids: activity_rights.editor_ids.remove(assignee_id) old_role = ROLE_EDITOR if assignee_id in activity_rights.translator_ids: activity_rights.translator_ids.remove(assignee_id) old_role = ROLE_TRANSLATOR elif new_role == ROLE_EDITOR: if (activity_rights.is_editor(assignee_id) or activity_rights.is_owner(assignee_id)): raise Exception( 'This user already can edit this %s.' % activity_type) if activity_rights.community_owned: raise Exception( 'Community-owned %ss can be edited by anyone.' % activity_type) activity_rights.editor_ids.append(assignee_id) if assignee_id in activity_rights.translator_ids: activity_rights.translator_ids.remove(assignee_id) old_role = ROLE_TRANSLATOR if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER elif new_role == ROLE_TRANSLATOR: if (activity_rights.is_editor(assignee_id) or activity_rights.is_translator(assignee_id) or activity_rights.is_owner(assignee_id)): raise Exception( 'This user already can translate this %s.' % activity_type) if activity_rights.community_owned: raise Exception( 'Community-owned %ss can be translated by anyone.' % activity_type) activity_rights.translator_ids.append(assignee_id) if assignee_id in activity_rights.viewer_ids: activity_rights.viewer_ids.remove(assignee_id) old_role = ROLE_VIEWER elif new_role == ROLE_VIEWER: if (activity_rights.is_owner(assignee_id) or activity_rights.is_editor(assignee_id) or activity_rights.is_viewer(assignee_id)): raise Exception( 'This user already can view this %s.' % activity_type) if activity_rights.status != ACTIVITY_STATUS_PRIVATE: raise Exception( 'Public %ss can be viewed by anyone.' % activity_type) activity_rights.viewer_ids.append(assignee_id) else: raise Exception('Invalid role: %s' % new_role) commit_message = 'Changed role of %s from %s to %s' % ( assignee_username, old_role, new_role) commit_cmds = [{ 'cmd': CMD_CHANGE_ROLE, 'assignee_id': assignee_id, 'old_role': old_role, 'new_role': new_role }] _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _release_ownership_of_activity(committer, activity_id, activity_type): """Releases ownership of the given activity to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the user who is performing the action. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raise: Exception. The committer does not have release rights. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_release_ownership(committer, activity_rights): logging.error( 'User %s tried to release ownership of %s %s but was ' 'refused permission.' % (committer_id, activity_type, activity_id)) raise Exception( 'The ownership of this %s cannot be released.' % activity_type) activity_rights.community_owned = True activity_rights.owner_ids = [] activity_rights.editor_ids = [] activity_rights.viewer_ids = [] commit_cmds = [{ 'cmd': CMD_RELEASE_OWNERSHIP, }] _save_activity_rights( committer_id, activity_rights, activity_type, '%s ownership released to the community.' % activity_type, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _change_activity_status( committer_id, activity_id, activity_type, new_status, commit_message): """Changes the status of the given activity. Args: committer_id: str. ID of the user who is performing the update action. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION new_status: str. The new status of the activity. commit_message: str. The human-written commit message for this change. """ activity_rights = _get_activity_rights(activity_type, activity_id) old_status = activity_rights.status activity_rights.status = new_status if activity_type == constants.ACTIVITY_TYPE_EXPLORATION: cmd_type = CMD_CHANGE_EXPLORATION_STATUS elif activity_type == constants.ACTIVITY_TYPE_COLLECTION: cmd_type = CMD_CHANGE_COLLECTION_STATUS commit_cmds = [{ 'cmd': cmd_type, 'old_status': old_status, 'new_status': new_status }] if new_status != ACTIVITY_STATUS_PRIVATE: activity_rights.viewer_ids = [] if activity_rights.first_published_msec is None: activity_rights.first_published_msec = ( utils.get_current_time_in_millisecs()) _save_activity_rights( committer_id, activity_rights, activity_type, commit_message, commit_cmds) _update_activity_summary(activity_type, activity_rights) def _publish_activity(committer, activity_id, activity_type): """Publishes the given activity. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to publish the activity. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_publish_activity(committer, activity_rights): logging.error( 'User %s tried to publish %s %s but was refused ' 'permission.' % (committer_id, activity_type, activity_id)) raise Exception('This %s cannot be published.' % activity_type) _change_activity_status( committer_id, activity_id, activity_type, ACTIVITY_STATUS_PUBLIC, '%s published.' % activity_type) def _unpublish_activity(committer, activity_id, activity_type): """Unpublishes the given activity. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. activity_id: str. ID of the activity. activity_type: str. The type of activity. Possible values: constants.ACTIVITY_TYPE_EXPLORATION constants.ACTIVITY_TYPE_COLLECTION Raises: Exception. The committer does not have rights to unpublish the activity. """ committer_id = committer.user_id activity_rights = _get_activity_rights(activity_type, activity_id) if not check_can_unpublish_activity(committer, activity_rights): logging.error( 'User %s tried to unpublish %s %s but was refused ' 'permission.' % (committer_id, activity_type, activity_id)) raise Exception('This %s cannot be unpublished.' % activity_type) _change_activity_status( committer_id, activity_id, activity_type, ACTIVITY_STATUS_PRIVATE, '%s unpublished.' % activity_type) activity_services.remove_featured_activity(activity_type, activity_id) # Rights functions for activities. def assign_role_for_exploration( committer, exploration_id, assignee_id, new_role): """Assigns a user to the given role and subscribes the assignee to future exploration updates. The caller should ensure that assignee_id corresponds to a valid user in the system. Args: committer: UserActionsInfo. The UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR ROLE_TRANSLATOR Raises: Exception. This could potentially throw an exception from _assign_role. """ _assign_role( committer, assignee_id, new_role, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) if new_role in [ROLE_OWNER, ROLE_EDITOR, ROLE_TRANSLATOR]: subscription_services.subscribe_to_exploration( assignee_id, exploration_id) def release_ownership_of_exploration(committer, exploration_id): """Releases ownership of the given exploration to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _release_ownership_of_activity. """ _release_ownership_of_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) def set_private_viewability_of_exploration( committer, exploration_id, viewable_if_private): """Sets the viewable_if_private attribute for the given exploration's rights object. If viewable_if_private is True, this allows a private exploration to be viewed by anyone with the link. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. viewable_if_private: bool. Whether the exploration should be made viewable (by anyone with the link). Raises: Exception. The committer does not have the permission to perform change action. Exception. If the viewable_if_private property is already as desired. """ committer_id = committer.user_id exploration_rights = get_exploration_rights(exploration_id) # The user who can publish activity can change its private viewability. if not check_can_publish_activity(committer, exploration_rights): logging.error( 'User %s tried to change private viewability of exploration %s ' 'but was refused permission.' % (committer_id, exploration_id)) raise Exception( 'The viewability status of this exploration cannot be changed.') old_viewable_if_private = exploration_rights.viewable_if_private if old_viewable_if_private == viewable_if_private: raise Exception( 'Trying to change viewability status of this exploration to %s, ' 'but that is already the current value.' % viewable_if_private) exploration_rights.viewable_if_private = viewable_if_private commit_cmds = [{ 'cmd': CMD_CHANGE_PRIVATE_VIEWABILITY, 'old_viewable_if_private': old_viewable_if_private, 'new_viewable_if_private': viewable_if_private, }] commit_message = ( 'Made exploration viewable to anyone with the link.' if viewable_if_private else 'Made exploration viewable only to invited playtesters.') _save_activity_rights( committer_id, exploration_rights, constants.ACTIVITY_TYPE_EXPLORATION, commit_message, commit_cmds) _update_exploration_summary(exploration_rights) def publish_exploration(committer, exploration_id): """Publishes the given exploration. It is the responsibility of the caller to check that the exploration is valid prior to publication. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _publish_activity. """ _publish_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) def unpublish_exploration(committer, exploration_id): """Unpublishes the given exploration. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. exploration_id: str. ID of the exploration. Raises: Exception. This could potentially throw an exception from _unpublish_activity. """ _unpublish_activity( committer, exploration_id, constants.ACTIVITY_TYPE_EXPLORATION) # Rights functions for collections. def assign_role_for_collection( committer, collection_id, assignee_id, new_role): """Assign the given user to the given role and subscribes the assignee to future collection updates. The caller should ensure that assignee_id corresponds to a valid user in the system. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. assignee_id: str. ID of the user whose role is being changed. new_role: str. The name of the new role: One of ROLE_OWNER ROLE_EDITOR Raises: Exception. This could potentially throw an exception from _assign_role. """ _assign_role( committer, assignee_id, new_role, collection_id, constants.ACTIVITY_TYPE_COLLECTION) if new_role in [ROLE_OWNER, ROLE_EDITOR]: subscription_services.subscribe_to_collection( assignee_id, collection_id) def release_ownership_of_collection(committer, collection_id): """Releases ownership of the given collection to the community. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _release_ownership_of_activity. """ _release_ownership_of_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION) def publish_collection(committer, collection_id): """Publishes the given collection. It is the responsibility of the caller to check that the collection is valid prior to publication. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _publish_activity. """ _publish_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION) def unpublish_collection(committer, collection_id): """Unpublishes the given collection. Args: committer: UserActionsInfo. UserActionsInfo object for the committer. collection_id: str. ID of the collection. Raises: Exception. This could potentially throw an exception from _unpublish_activity. """ _unpublish_activity( committer, collection_id, constants.ACTIVITY_TYPE_COLLECTION)
[ 2, 19617, 25, 3384, 69, 12, 23, 198, 2, 198, 2, 15069, 1946, 383, 9385, 544, 46665, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743...
2.625711
15,130
"""For neatly implementing static typing in packaging. `mypy` - the static type analysis tool we use - uses the `typing` module, which provides core functionality fundamental to mypy's functioning. Generally, `typing` would be imported at runtime and used in that fashion - it acts as a no-op at runtime and does not have any run-time overhead by design. As it turns out, `typing` is not vendorable - it uses separate sources for Python 2/Python 3. Thus, this codebase can not expect it to be present. To work around this, mypy allows the typing import to be behind a False-y optional to prevent it from running at runtime and type-comments can be used to remove the need for the types to be accessible directly during runtime. This module provides the False-y guard in a nicely named fashion so that a curious maintainer can reach here to read this. In packaging, all static-typing related imports should be guarded as follows: from packaging._typing import TYPE_CHECKING if TYPE_CHECKING: from typing import ... Ref: https://github.com/python/mypy/issues/3216 """ __all__ = ["TYPE_CHECKING", "cast"] # The TYPE_CHECKING constant defined by the typing module is False at runtime # but True while type checking. TYPE_CHECKING = False # pragma: no cover # typing's cast syntax requires calling typing.cast at runtime, but we don't # want to import typing at runtime. Here, we inform the type checkers that # we're importing `typing.cast` as `cast` and re-implement typing.cast's # runtime behavior in a block that is ignored by type checkers. if TYPE_CHECKING: # pragma: no cover # not executed at runtime from typing import cast else: # executed at runtime
[ 37811, 1890, 29776, 15427, 9037, 19720, 287, 16846, 13, 198, 198, 63, 1820, 9078, 63, 532, 262, 9037, 2099, 3781, 2891, 356, 779, 532, 3544, 262, 4600, 774, 13886, 63, 8265, 11, 543, 198, 15234, 1460, 4755, 11244, 7531, 284, 616, 9078...
3.723684
456
from cereal import car from common.numpy_fast import clip, interp from selfdrive.car import apply_toyota_steer_torque_limits, create_gas_interceptor_command, make_can_msg from selfdrive.car.toyota.toyotacan import create_steer_command, create_ui_command, \ create_accel_command, create_acc_cancel_command, \ create_fcw_command, create_lta_steer_command from selfdrive.car.toyota.values import CAR, STATIC_DSU_MSGS, NO_STOP_TIMER_CAR, TSS2_CAR, \ MIN_ACC_SPEED, PEDAL_TRANSITION, CarControllerParams from opendbc.can.packer import CANPacker from common.op_params import opParams VisualAlert = car.CarControl.HUDControl.VisualAlert
[ 6738, 33158, 1330, 1097, 198, 6738, 2219, 13, 77, 32152, 62, 7217, 1330, 10651, 11, 987, 79, 198, 6738, 2116, 19472, 13, 7718, 1330, 4174, 62, 83, 726, 4265, 62, 4169, 263, 62, 13165, 4188, 62, 49196, 11, 2251, 62, 22649, 62, 3849, ...
2.209913
343
from flask import Blueprint auth=Blueprint('auth',__name__) from .import views,forms
[ 6738, 42903, 1330, 39932, 198, 18439, 28, 14573, 4798, 10786, 18439, 3256, 834, 3672, 834, 8, 198, 198, 6738, 764, 11748, 5009, 11, 23914 ]
3.541667
24
from flask_wtf import FlaskForm from wtforms import StringField, SubmitField, SelectField from wtforms.validators import DataRequired
[ 6738, 42903, 62, 86, 27110, 1330, 46947, 8479, 198, 6738, 266, 83, 23914, 1330, 10903, 15878, 11, 39900, 15878, 11, 9683, 15878, 198, 6738, 266, 83, 23914, 13, 12102, 2024, 1330, 6060, 37374, 628 ]
3.970588
34
import pandas as pd import numpy as np def estimate_volatility(prices, l): """Create an exponential moving average model of the volatility of a stock price, and return the most recent (last) volatility estimate. Parameters ---------- prices : pandas.Series A series of adjusted closing prices for a stock. l : float The 'lambda' parameter of the exponential moving average model. Making this value smaller will cause the model to weight older terms less relative to more recent terms. Returns ------- last_vol : float The last element of your exponential moving averge volatility model series. """ # TODO: Implement the exponential moving average volatility model and return the last value. return prices.ewm(alpha=(1-l)).mean()[-1] def test_run(filename='data.csv'): """Test run get_most_volatile() with stock prices from a file.""" prices = pd.read_csv(filename, parse_dates=[ 'date'], index_col='date', squeeze=True) print("Most recent volatility estimate: {:.6f}".format(estimate_volatility(prices, 0.7))) # print(estimate_volatility(prices, 0.7)) if __name__ == '__main__': test_run()
[ 11748, 19798, 292, 355, 279, 67, 198, 11748, 299, 32152, 355, 45941, 628, 198, 4299, 8636, 62, 10396, 18486, 7, 1050, 1063, 11, 300, 2599, 198, 220, 220, 220, 37227, 16447, 281, 39682, 3867, 2811, 2746, 286, 262, 30772, 286, 257, 4283...
2.835586
444
import sys n, q = map(int, sys.stdin.readline().split()) s = '$' + sys.stdin.readline().rstrip() lr = zip(*[map(int, sys.stdin.read().split())] * 2) if __name__ == '__main__': ans = main() print(*ans, sep='\n')
[ 11748, 25064, 201, 198, 201, 198, 77, 11, 10662, 796, 3975, 7, 600, 11, 25064, 13, 19282, 259, 13, 961, 1370, 22446, 35312, 28955, 201, 198, 82, 796, 705, 3, 6, 1343, 25064, 13, 19282, 259, 13, 961, 1370, 22446, 81, 36311, 3419, 2...
2.180952
105
import torch import torch.nn as nn from DecoderLayer import DecoderLayer import math
[ 11748, 28034, 201, 198, 11748, 28034, 13, 20471, 355, 299, 77, 201, 198, 6738, 34580, 49925, 1330, 34580, 49925, 201, 198, 11748, 10688, 201 ]
3.666667
24
""" Grains for Cisco NX-OS minions .. versionadded:: 2016.11.0 For documentation on setting up the nxos proxy minion look in the documentation for :mod:`salt.proxy.nxos<salt.proxy.nxos>`. """ import logging import salt.utils.nxos import salt.utils.platform from salt.exceptions import NxosClientError log = logging.getLogger(__name__) __proxyenabled__ = ["nxos"] __virtualname__ = "nxos"
[ 37811, 198, 8642, 1299, 329, 28289, 42482, 12, 2640, 22811, 198, 198, 492, 2196, 29373, 3712, 1584, 13, 1157, 13, 15, 198, 198, 1890, 10314, 319, 4634, 510, 262, 299, 87, 418, 15741, 28365, 804, 287, 262, 10314, 198, 1640, 1058, 4666,...
2.890511
137
from typing import List, Optional, Tuple from tmsproviderapisdk.tms_extended_model import TmsExtendedModel
[ 6738, 19720, 1330, 7343, 11, 32233, 11, 309, 29291, 198, 6738, 256, 907, 15234, 1304, 499, 9409, 74, 13, 83, 907, 62, 2302, 1631, 62, 19849, 1330, 309, 907, 11627, 1631, 17633, 628 ]
3.272727
33
""" A class hierarchy relating to fields of all kinds. """ from __future__ import print_function, division import numpy as np from ciabatta.meta import make_repr_str from fealty import lattice, field_numerics, walled_field_numerics class Field(Space): def density_field(self, r): return density(r, self.L, self.dx) def r_to_i(self, r): return lattice.r_to_i(r, self.L, self.dx) def i_to_r(self, i): return lattice.i_to_r(i, self.L, self.dx) def __repr__(self): fs = [('L', self.L), ('dim', self.dim), ('dx', self.dx)] return make_repr_str(self, fs) class Scalar(Field): class Diffusing(Scalar): class WalledScalar(Scalar): # Note, inheritance order matters to get walled grad & laplacian call # (see diamond problem on wikipedia and how python handles it)
[ 37811, 198, 32, 1398, 18911, 11270, 284, 7032, 286, 477, 6982, 13, 198, 37811, 198, 6738, 11593, 37443, 834, 1330, 3601, 62, 8818, 11, 7297, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 269, 72, 397, 25014, 13, 28961, 1330, 787, 62,...
2.530303
330
""" ASGI config for example_django project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_django.settings") application = get_asgi_application() os.system("/usr/bin/python3 /opt/code/manage.py migrate") os.system("/usr/bin/python3 /opt/code/manage.py " "loaddata /opt/code/blog/fixtures/default_articles.json")
[ 37811, 198, 1921, 18878, 4566, 329, 1672, 62, 28241, 14208, 1628, 13, 198, 198, 1026, 32142, 262, 7054, 18878, 869, 540, 355, 257, 8265, 12, 5715, 7885, 3706, 7559, 31438, 15506, 13, 198, 198, 1890, 517, 1321, 319, 428, 2393, 11, 766,...
2.763033
211
"""Generate values of Method of Simulated Moments criterion function. Given observed moments and weighting matrix in `OUT_ANALYSIS`, "msm_estimation", generate values of Method of Simulated Moments criterion function for combinations of discount factor and present bias values. The goal is to study the bivariate distribution of the time preference parameters around the combination of true parameter values. """ import itertools import numpy as np import pandas as pd import respy as rp import yaml from bld.project_paths import project_paths_join as ppj from src.library.compute_moments import _replace_nans from src.library.compute_moments import calc_restricted_choice_probabilities from src.library.compute_moments import calc_restricted_wage_distribution from src.library.compute_moments import calc_unrestricted_choice_probabilities from src.library.compute_moments import calc_unrestricted_wage_distribution from src.library.compute_moments import calc_very_restricted_choice_probabilities from src.library.compute_moments import calc_very_restricted_wage_distribution from src.library.housekeeping import _load_pickle from src.library.housekeeping import _temporary_working_directory from tqdm import tqdm def get_bivariate_distribution(params, crit_func, grid_delta, grid_beta): """Compute value of criterion function. Args: params (pd.DataFrame): DataFrame containing model parameters. crit_func (dict): Dictionary containing model options. grid_delta (np.array): Values of discount factor. grid_beta (np.array): Values of present-bias parameter. Returns: pd.DataFrame """ results = [] for beta, delta in tqdm(itertools.product(grid_beta, grid_delta)): params_ = params.copy() params_.loc[("beta", "beta"), "value"] = beta params_.loc[("delta", "delta"), "value"] = delta val = crit_func(params_) result = {"beta": beta, "delta": delta, "val": val} results.append(result) return pd.DataFrame.from_dict(results) if __name__ == "__main__": # load params params = pd.read_csv( ppj("IN_MODEL_SPECS", "params_hyp.csv"), sep=";", index_col=["category", "name"], ) params["value"] = params["value"].astype(float) # load options with open(ppj("IN_MODEL_SPECS", "options_hyp.yaml")) as options: options = yaml.safe_load(options) # get empirical moments empirical_moments = _load_pickle(ppj("OUT_ANALYSIS", "msm_estimation", "moments_hyp.pickle")) # get weighting matrix weighting_matrix = _load_pickle( ppj("OUT_ANALYSIS", "msm_estimation", "weighting_matrix_hyp.pickle") ) calc_moments = { "Choice Probabilities Very Restricted": calc_very_restricted_choice_probabilities, "Choice Probabilities Restricted": calc_restricted_choice_probabilities, "Choice Probabilities Unrestricted": calc_unrestricted_choice_probabilities, "Wage Distribution Very Restricted": calc_very_restricted_wage_distribution, "Wage Distribution Restricted": calc_restricted_wage_distribution, "Wage Distribution Unrestricted": calc_unrestricted_wage_distribution, } with _temporary_working_directory(snippet="heatmap"): # get criterion function weighted_sum_squared_errors = rp.get_moment_errors_func( params=params, options=options, calc_moments=calc_moments, replace_nans=_replace_nans, empirical_moments=empirical_moments, weighting_matrix=weighting_matrix, ) # get bivariate distribution results results = get_bivariate_distribution( crit_func=weighted_sum_squared_errors, params=params, grid_delta=np.arange(0.945, 0.9625, 0.0025), grid_beta=np.arange(0.75, 1.05, 0.01), ) results.to_csv(ppj("OUT_ANALYSIS", "heatmap.csv"))
[ 37811, 8645, 378, 3815, 286, 11789, 286, 3184, 4817, 45209, 34054, 2163, 13, 198, 198, 15056, 6515, 7188, 290, 3463, 278, 17593, 287, 4600, 12425, 62, 1565, 1847, 16309, 1797, 47671, 366, 907, 76, 62, 395, 18991, 1600, 198, 8612, 378, ...
2.66936
1,485
from dataclasses import dataclass from space_game.domain_names import KeyId from space_game.events.Event import Event
[ 6738, 4818, 330, 28958, 1330, 4818, 330, 31172, 198, 198, 6738, 2272, 62, 6057, 13, 27830, 62, 14933, 1330, 7383, 7390, 198, 6738, 2272, 62, 6057, 13, 31534, 13, 9237, 1330, 8558, 628 ]
3.636364
33
import pymsteams import logging from oncall.constants import TEAMS_SUPPORT
[ 11748, 279, 4948, 4169, 4105, 198, 11748, 18931, 198, 6738, 319, 13345, 13, 9979, 1187, 1330, 33536, 50, 62, 40331, 15490, 628 ]
3.454545
22
# + # # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either exp' # ress or implied. # See the License for the specific language governing permissions and # limitations under the License. # import pandas as pd import warnings from bigdl.chronos.model.prophet import ProphetBuilder, ProphetModel from bigdl.chronos.autots.utils import recalculate_n_sampling # -
[ 2, 1343, 198, 2, 198, 2, 15069, 1584, 383, 4403, 19260, 46665, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, ...
3.539171
217
import math import numpy as np import scipy as sp import scipy.linalg import torch import torch.nn as nn import torch.nn.init as init import torch.nn.functional as F from nf.utils import unconstrained_RQS # supported non-linearities: note that the function must be invertible functional_derivatives = { torch.tanh: lambda x: 1 - torch.pow(torch.tanh(x), 2), F.leaky_relu: lambda x: (x > 0).type(torch.FloatTensor) + \ (x < 0).type(torch.FloatTensor) * -0.01, F.elu: lambda x: (x > 0).type(torch.FloatTensor) + \ (x < 0).type(torch.FloatTensor) * torch.exp(x) }
[ 11748, 10688, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 629, 541, 88, 355, 599, 198, 11748, 629, 541, 88, 13, 75, 1292, 70, 198, 11748, 28034, 198, 11748, 28034, 13, 20471, 355, 299, 77, 198, 11748, 28034, 13, 20471, 13, 15003, ...
2.273381
278
# module pyparsing.py # # Copyright (c) 2003-2019 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # from pyparsing import ( Literal, Word, Group, Forward, alphas, alphanums, Regex, CaselessKeyword, Suppress, delimitedList, ) import math import operator # map operator symbols to corresponding arithmetic operations epsilon = 1e-12 opn = { "+": operator.add, "-": operator.sub, "*": operator.mul, "/": operator.truediv, "^": operator.pow, } fn = { "sin": math.sin, "cos": math.cos, "tan": math.tan, "exp": math.exp, "abs": abs, "trunc": lambda a: int(a), "round": round, "sgn": lambda a: -1 if a < -epsilon else 1 if a > epsilon else 0, } exprStack = [] def BNF(): """ expop :: '^' multop :: '*' | '/' addop :: '+' | '-' integer :: ['+' | '-'] '0'..'9'+ atom :: PI | E | real | fn '(' expr ')' | '(' expr ')' factor :: atom [ expop factor ]* term :: factor [ multop factor ]* expr :: term [ addop term ]* """ # use CaselessKeyword for e and pi, to avoid accidentally matching # functions that start with 'e' or 'pi' (such as 'exp'); Keyword # and CaselessKeyword only match whole words e = CaselessKeyword("E") pi = CaselessKeyword("PI") # fnumber = Combine(Word("+-"+nums, nums) + # Optional("." + Optional(Word(nums))) + # Optional(e + Word("+-"+nums, nums))) # or use provided pyparsing_common.number, but convert back to str: # fnumber = ppc.number().addParseAction(lambda t: str(t[0])) fnumber = Regex(r"[+-]?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?") ident = Word(alphas, alphanums + "_$") plus, minus, mult, div = map(Literal, "+-*/") lpar, rpar = map(Suppress, "()") addop = plus | minus multop = mult | div expop = Literal("^") expr = Forward() expr_list = delimitedList(Group(expr)) # add parse action that replaces the function identifier with a (name, number of args) tuple fn_call = (ident + lpar - Group(expr_list) + rpar).setParseAction( lambda t: t.insert(0, (t.pop(0), len(t[0]))) ) atom = ( addop[...] + ( (fn_call | pi | e | fnumber | ident).setParseAction(push_first) | Group(lpar + expr + rpar) ) ).setParseAction(push_unary_minus) # by defining exponentiation as "atom [ ^ factor ]..." instead of "atom [ ^ atom ]...", we get right-to-left # exponents, instead of left-to-right that is, 2^3^2 = 2^(3^2), not (2^3)^2. factor = Forward() factor <<= atom + (expop + factor).setParseAction(push_first)[...] term = factor + (multop + factor).setParseAction(push_first)[...] expr <<= term + (addop + term).setParseAction(push_first)[...] bnf = expr return bnf def eval_fx(fx, stats): """Given fx and stats ('min', 'max', 'mean', 'std') return the result""" _ = BNF().parseString(fx, parseAll=True) val = evaluate_stack(exprStack[:], stats) return val
[ 2, 8265, 279, 4464, 945, 278, 13, 9078, 198, 2, 198, 2, 15069, 357, 66, 8, 5816, 12, 23344, 220, 3362, 309, 13, 33902, 557, 198, 2, 198, 2, 2448, 3411, 318, 29376, 7520, 11, 1479, 286, 3877, 11, 284, 597, 1048, 16727, 198, 2, ...
2.578283
1,584
# -- coding: utf-8 #!/usr/bin/env python """ pyscanlogger: Port scan detector/logger tool, inspired by scanlogd {http://www.openwall.com/scanlogd} but with added ability to log slow port-scans. Features 1. Detects all stealth (half-open) and full-connect scans. 2. Detects Idle scan and logs it correctly using correlation! 3. Detects SCTP scan. 4. Detects slow port-scans also. Modification History Mar 17 2010 - Cleaned up code to publish to google. Apr 8 2010 - Better detection of TCP full-connect scan without spurious and incorrect logging. Better logging functions. Licensed under GNU GPL v3.0. """ import sys, os import dpkt, pcap import struct import socket import time import threading import optparse import entry import timerlist __author__ = "pythonhacker" __maintainer__ = "pythonhacker" __version__ = '0.5.1' __modified__ = 'Thu Apr 8 19:21:11 IST 2010' # UDP - in progress... SCAN_TIMEOUT = 5 WEIGHT_THRESHOLD = 25 PIDFILE="/var/run/pyscanlogger.pid" # TCP flag constants TH_URG=dpkt.tcp.TH_URG TH_ACK=dpkt.tcp.TH_ACK TH_PSH=dpkt.tcp.TH_PUSH TH_RST=dpkt.tcp.TH_RST TH_SYN=dpkt.tcp.TH_SYN TH_FIN=dpkt.tcp.TH_FIN # Protocols TCP=dpkt.tcp.TCP UDP=dpkt.udp.UDP SCTP=dpkt.sctp.SCTP get_timestamp = lambda : time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) ip2quad = lambda x: socket.inet_ntoa(struct.pack('I', x)) scan_ip2quad = lambda scan: map(ip2quad, [scan.src, scan.dst]) if __name__ == '__main__': main()
[ 2, 1377, 19617, 25, 3384, 69, 12, 23, 201, 198, 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 201, 198, 37811, 201, 198, 79, 893, 5171, 6404, 1362, 25, 4347, 9367, 31029, 14, 6404, 1362, 2891, 11, 7867, 201, 198, 1525, 9367, 6404, ...
2.314925
670
from itertools import combinations import copy def check_following_migration(edges, p=0): """ :param edges: :return: """ e = copy.copy(edges) visited = [False for _ in e] miggroup = [] cnt = -1 for i in range(0, len(e)): if visited[i]: continue e[i] = list(e[i]) e[i][p] = list(e[i][p]) t1 = sorted(e[i][p], key=lambda tup: tup[0]) if not visited[i]: visited[i] = True miggroup.append([i]) cnt += 1 for j in range(0, len(e)): if j != i and not visited[j]: e[j] = list(e[j]) e[j][p] = list(e[j][p]) t2 = sorted(e[j][p], key=lambda tup: tup[0]) if (t2[0][0] != t1[0][0]) or (t2[1][0] != t1[1][0]): continue for num in range(0, len(miggroup[cnt])): t1 = sorted(e[miggroup[cnt][num]][p], key=lambda tup: tup[0]) if (t1[0][1] + 1 == t2[0][1] and t1[1][1] - 1 == t2[1][1]) \ or (t1[0][1] - 1 == t2[0][1] and t1[1][1] + 1 == t2[1][1]): visited[j] = True miggroup[cnt].append(j) break return miggroup def get_absdist(domain1, domain2): """ :param domain1: :param domain2: :return: """ return abs(domain1[1] - domain2[1]) def get_closet_domain_to_target(target, domains): """ :param target: :param domains: :return: """ closet = 10000 closetd = () for i in domains: dist = get_absdist(i, target) if dist < closet: closet = dist closetd = i return closetd def get_domains_on_2sides(target1, target2, domains1, domains2): """ :param target1: :param target2: :param domains1: :param domains2: :return: """ if target1[0] == domains1[0][0]: closetd1 = get_closet_domain_to_target(target1, domains1) elif target2[0] == domains1[0][0]: closetd1 = get_closet_domain_to_target(target2, domains1) if target1[0] == domains2[0][0]: closetd2 = get_closet_domain_to_target(target1, domains2) elif target2[0] == domains2[0][0]: closetd2 = get_closet_domain_to_target(target2, domains2) return closetd1, closetd2 def get_closest_target(domains, targets): """ :return: """ domains = sorted(domains, key=lambda tup: tup[1]) mindist = 10000 mint = None for t in targets: dist = min(get_absdist(t, domains[0]), get_absdist(t, domains[len(domains) - 1])) if dist < mindist: mint = t return mint
[ 6738, 340, 861, 10141, 1330, 17790, 198, 11748, 4866, 628, 628, 628, 628, 628, 628, 198, 4299, 2198, 62, 27780, 278, 62, 76, 4254, 7, 276, 3212, 11, 279, 28, 15, 2599, 198, 220, 220, 220, 37227, 628, 220, 220, 220, 1058, 17143, 13...
1.874054
1,453
# coding=utf-8 # Copyright 2020 The HuggingFace Team All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Sparse Fine-tuning the library models for question answering. """ # You can also adapt this script on your own question answering task. Pointers for this are left as comments. from nn_pruning.sparse_trainer import SparseTrainer from .qa_train import QATrainer # SparseTrainer should appear first in the base classes, as its functions must override QATrainer and its base classes (Trainer)
[ 2, 19617, 28, 40477, 12, 23, 198, 2, 15069, 12131, 383, 12905, 2667, 32388, 4816, 1439, 2489, 10395, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 7...
3.815789
266
# Copyright 2018 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ iLO5 RAID specific methods """ from ironic_lib import metrics_utils from oslo_log import log as logging from oslo_utils import importutils from ironic.common import exception from ironic.common.i18n import _ from ironic.common import raid from ironic.common import states from ironic.conductor import utils as manager_utils from ironic import conf from ironic.drivers import base from ironic.drivers.modules import deploy_utils from ironic.drivers.modules.ilo import common as ilo_common LOG = logging.getLogger(__name__) CONF = conf.CONF METRICS = metrics_utils.get_metrics_logger(__name__) ilo_error = importutils.try_import('proliantutils.exception')
[ 2, 15069, 2864, 30446, 15503, 6400, 446, 14973, 7712, 18470, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 345, 743, 198, 2, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 137...
3.694767
344
""" Common Petronia imports for bootstrap parts of an extension. This should be imported along with the `simp` module. """ from ...base.bus import ( EventBus, ListenerRegistrar, ListenerSetup, QueuePriority, ExtensionMetadataStruct, register_event, EVENT_WILDCARD, TARGET_WILDCARD, QUEUE_EVENT_NORMAL, QUEUE_EVENT_HIGH, QUEUE_EVENT_IO, QUEUE_EVENT_TYPES ) from ...base.participant import ( create_singleton_identity, NOT_PARTICIPANT, ) from ...base.events import ( # These are generally just bootstrap events. DisposeCompleteEvent, as_dispose_complete_listener, RequestDisposeEvent, as_request_dispose_listener, SystemStartedEvent, as_system_started_listener, ) from ...base.events.bus import ( EventProtectionModel, GLOBAL_EVENT_PROTECTION, INTERNAL_EVENT_PROTECTION, PRODUCE_EVENT_PROTECTION, CONSUME_EVENT_PROTECTION, REQUEST_EVENT_PROTECTION, RESPONSE_EVENT_PROTECTION, ) from ...core.extensions.api import ANY_VERSION from ...core.shutdown.api import ( SystemShutdownEvent, as_system_shutdown_listener, SystemShutdownFinalizeEvent, as_system_shutdown_finalize_listener, TARGET_ID_SYSTEM_SHUTDOWN, )
[ 198, 37811, 198, 17227, 4767, 1313, 544, 17944, 329, 6297, 26418, 3354, 286, 281, 7552, 13, 198, 198, 1212, 815, 307, 17392, 1863, 351, 262, 4600, 82, 11011, 63, 8265, 13, 198, 37811, 628, 198, 6738, 2644, 8692, 13, 10885, 1330, 357, ...
2.474308
506
import pyopencl as cl from optparse import OptionParser parser = OptionParser() parser.add_option("-s", "--short", action="store_true", help="don't print all device properties") (options, args) = parser.parse_args() for platform in cl.get_platforms(): print(75*"=") print(platform) print(75*"=") if not options.short: print_info(platform, cl.platform_info) for device in platform.get_devices(): if not options.short: print(75*"-") print(device) if not options.short: print(75*"-") print_info(device, cl.device_info) ctx = cl.Context([device]) for mf in [ cl.mem_flags.READ_ONLY, #cl.mem_flags.READ_WRITE, #cl.mem_flags.WRITE_ONLY ]: for itype in [ cl.mem_object_type.IMAGE2D, cl.mem_object_type.IMAGE3D ]: try: formats = cl.get_supported_image_formats(ctx, mf, itype) except: formats = "<error>" else: formats = ", ".join( "{}-{}".format( cl.channel_order.to_string(iform.channel_order, "<unknown channel order 0x%x>"), str_chd_type(iform.channel_data_type)) for iform in formats) print("{} {} FORMATS: {}\n".format( cl.mem_object_type.to_string(itype), cl.mem_flags.to_string(mf), formats)) del ctx
[ 11748, 12972, 9654, 565, 355, 537, 198, 6738, 2172, 29572, 1330, 16018, 46677, 198, 198, 48610, 796, 16018, 46677, 3419, 198, 48610, 13, 2860, 62, 18076, 7203, 12, 82, 1600, 366, 438, 19509, 1600, 2223, 2625, 8095, 62, 7942, 1600, 198, ...
1.653707
1,106
# -*- coding: future_fstrings -*- # # Copyright 2019 Gianluca Frison, Dimitris Kouzoupis, Robin Verschueren, # Andrea Zanelli, Niels van Duijkeren, Jonathan Frey, Tommaso Sartor, # Branimir Novoselnik, Rien Quirynen, Rezart Qelibari, Dang Doan, # Jonas Koenemann, Yutao Chen, Tobias Schls, Jonas Schlagenhauf, Moritz Diehl # # This file is part of acados. # # The 2-Clause BSD License # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.; # import sys, os, json import numpy as np from ctypes import * from casadi import CasadiMeta, Function, SX from copy import deepcopy from .generate_c_code_explicit_ode import generate_c_code_explicit_ode from .generate_c_code_implicit_ode import generate_c_code_implicit_ode from .generate_c_code_gnsf import generate_c_code_gnsf from .generate_c_code_constraint import generate_c_code_constraint from .generate_c_code_nls_cost import generate_c_code_nls_cost from .generate_c_code_external_cost import generate_c_code_external_cost from .acados_ocp import AcadosOcp from .acados_model import acados_model_strip_casadi_symbolics from .utils import is_column, is_empty, casadi_length, render_template, acados_class2dict,\ format_class_dict, ocp_check_against_layout, np_array_to_list, make_model_consistent,\ set_up_imported_gnsf_model
[ 2, 532, 9, 12, 19617, 25, 2003, 62, 69, 37336, 532, 9, 12, 198, 2, 198, 2, 15069, 13130, 30851, 75, 43120, 1305, 1653, 11, 14048, 270, 2442, 30559, 89, 10486, 271, 11, 12325, 18535, 354, 518, 918, 11, 198, 2, 23174, 47022, 23225, ...
3.107143
812
import numpy as np import cv2 import sys import os import glob def im2patch(im, pch_size, stride=1): ''' Transform image to patches. Input: im: 3 x H x W or 1 X H x W image, numpy format pch_size: (int, int) tuple or integer stride: (int, int) tuple or integer ''' if isinstance(pch_size, tuple): pch_H, pch_W = pch_size elif isinstance(pch_size, int): pch_H = pch_W = pch_size else: sys.exit('The input of pch_size must be a integer or a int tuple!') if isinstance(stride, tuple): stride_H, stride_W = stride elif isinstance(stride, int): stride_H = stride_W = stride else: sys.exit('The input of stride must be a integer or a int tuple!') C, H, W = im.shape num_H = len(range(0, H-pch_H+1, stride_H)) num_W = len(range(0, W-pch_W+1, stride_W)) num_pch = num_H * num_W pch = np.zeros((C, pch_H*pch_W, num_pch), dtype=im.dtype) kk = 0 for ii in range(pch_H): for jj in range(pch_W): temp = im[:, ii:H-pch_H+ii+1:stride_H, jj:W-pch_W+jj+1:stride_W] pch[:, kk, :] = temp.reshape((C, num_pch)) kk += 1 return pch.reshape((C, pch_H, pch_W, num_pch)) def noise_estimate(im, pch_size=8): ''' Implement of noise level estimation of the following paper: Chen G , Zhu F , Heng P A . An Efficient Statistical Method for Image Noise Level Estimation[C]// 2015 IEEE International Conference on Computer Vision (ICCV). IEEE Computer Society, 2015. Input: im: the noise image, H x W x 3 or H x W numpy tensor, range [0,1] pch_size: patch_size Output: noise_level: the estimated noise level ''' if im.ndim == 3: im = im.transpose((2, 0, 1)) else: im = np.expand_dims(im, axis=0) # image to patch pch = im2patch(im, pch_size, 3) # C x pch_size x pch_size x num_pch tensor num_pch = pch.shape[3] pch = pch.reshape((-1, num_pch)) # d x num_pch matrix d = pch.shape[0] mu = pch.mean(axis=1, keepdims=True) # d x 1 X = pch - mu sigma_X = np.matmul(X, X.transpose()) / num_pch sig_value, _ = np.linalg.eigh(sigma_X) sig_value.sort() for ii in range(-1, -d-1, -1): tau = np.mean(sig_value[:ii]) if np.sum(sig_value[:ii]>tau) == np.sum(sig_value[:ii] < tau): return np.sqrt(tau) def run(imgPath, patchSize, internalNumPatches, dirOut, saveResults=True): """ Estimates the standard deviation of (additive white gaussian) noise of image patches. The noise is estimated patch by patch. Based on: "An Efficient Statistical Method for Image Noise Level Estimation" (2015) :param imgPath: Path to the input image. :param patchSize: Image patch size. :param internalNumPatches: Internal number of sub-image-patches. :param dirOut: Directory where to save the noise estimation results. :param saveResults: Whether to save the estimation results or not. :return: None """ # Load image img = np.array(cv2.imread(imgPath)) try: img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) img = img / 255.0 h, w = img.shape psize = min(min(patchSize, h), w) psize -= psize % 2 patch_step = psize shift_factor = 2 # Result array estimatedNoiseMap = np.zeros([h, w], dtype=np.int8) rangex = range(0, w, patch_step) rangey = range(0, h, patch_step) for start_x in rangex: for start_y in rangey: end_x = start_x + psize end_y = start_y + psize if end_x > w: end_x = w end_x = shift_factor * ((end_x) // shift_factor) start_x = end_x - psize if end_y > h: end_y = h end_y = shift_factor * ((end_y) // shift_factor) start_y = end_y - psize tileM = img[start_y:end_y, start_x:end_x] h_, w_ = tileM.shape sigma = noise_estimate(tileM, internalNumPatches) * 255.0 estimatedNoiseMap[start_y :start_y + h_, start_x : start_x + w_] = sigma if saveResults: if dirOut is not None: imgName = imgPath.split(os.sep)[-1].split(".")[0] dirOut = os.path.join(dirOut) if not os.path.exists(dirOut): os.makedirs(dirOut) noiseMapPath = os.path.join(dirOut, imgName + ".npz") if not os.path.exists(noiseMapPath): np.savez_compressed(noiseMapPath, estimatedNoiseMap) return estimatedNoiseMap except: return None # Example # if __name__ == '__main__': # dirIn = r"../../../data/udacity/img/GT" # dirOut = r"../../../data/udacity/labels_noise_patchwise/PCA" # imgFileEnding = ".jpg" # for imgPath in glob.glob(os.path.join(dirIn, "*" + imgFileEnding)): # run(imgPath, 128, 8, dirOut)
[ 11748, 299, 32152, 355, 45941, 198, 11748, 269, 85, 17, 198, 11748, 25064, 198, 11748, 28686, 198, 11748, 15095, 198, 198, 4299, 545, 17, 17147, 7, 320, 11, 279, 354, 62, 7857, 11, 33769, 28, 16, 2599, 198, 220, 220, 220, 705, 7061,...
2.003125
2,560
# This source code is part of the Biotite package and is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__ = "biotite.application" __author__ = "Patrick Kunzmann" __all__ = ["Application", "AppStateError", "TimeoutError", "VersionError", "AppState", "requires_state"] import abc import time from functools import wraps from enum import Flag, auto def requires_state(app_state): """ A decorator for methods of :class:`Application` subclasses that raises an :class:`AppStateError` in case the method is called, when the :class:`Application` is not in the specified :class:`AppState` `app_state`. Parameters ---------- app_state : AppState The required app state. Examples -------- Raises :class:`AppStateError` when `function` is called, if :class:`Application` is not in one of the specified states: >>> @requires_state(AppState.RUNNING | AppState.FINISHED) ... def function(self): ... pass """ return decorator def clean_up(self): """ Do clean up work after the application terminates. PROTECTED: Optionally override when inheriting. """ pass class AppStateError(Exception): """ Indicate that the application lifecycle was violated. """ pass class TimeoutError(Exception): """ Indicate that the application's timeout expired. """ pass class VersionError(Exception): """ Indicate that the application's version is invalid. """ pass
[ 2, 770, 2723, 2438, 318, 636, 286, 262, 347, 5151, 578, 5301, 290, 318, 9387, 198, 2, 739, 262, 513, 12, 2601, 682, 347, 10305, 13789, 13, 4222, 766, 705, 43, 2149, 24290, 13, 81, 301, 6, 329, 2252, 198, 2, 1321, 13, 198, 198, ...
2.792028
577
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function from ..framework import core from ..fluid.layer_helper import LayerHelper from ..fluid.data_feeder import check_variable_and_dtype # TODO: define functions to get tensor attributes from ..fluid.layers import rank # noqa: F401 from ..fluid.layers import shape # noqa: F401 import paddle from paddle import _C_ops from paddle.static import Variable from ..fluid.framework import _in_legacy_dygraph, in_dygraph_mode __all__ = [] def is_complex(x): """Return whether x is a tensor of complex data type(complex64 or complex128). Args: x (Tensor): The input tensor. Returns: bool: True if the data type of the input is complex data type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.to_tensor([1 + 2j, 3 + 4j]) print(paddle.is_complex(x)) # True x = paddle.to_tensor([1.1, 1.2]) print(paddle.is_complex(x)) # False x = paddle.to_tensor([1, 2, 3]) print(paddle.is_complex(x)) # False """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_complex_dtype = (dtype == core.VarDesc.VarType.COMPLEX64 or dtype == core.VarDesc.VarType.COMPLEX128) return is_complex_dtype def is_floating_point(x): """ Returns whether the dtype of `x` is one of paddle.float64, paddle.float32, paddle.float16, and paddle.bfloat16. Args: x (Tensor): The input tensor. Returns: bool: True if the dtype of `x` is floating type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.arange(1., 5., dtype='float32') y = paddle.arange(1, 5, dtype='int32') print(paddle.is_floating_point(x)) # True print(paddle.is_floating_point(y)) # False """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_fp_dtype = (dtype == core.VarDesc.VarType.FP32 or dtype == core.VarDesc.VarType.FP64 or dtype == core.VarDesc.VarType.FP16 or dtype == core.VarDesc.VarType.BF16) return is_fp_dtype def is_integer(x): """Return whether x is a tensor of integeral data type. Args: x (Tensor): The input tensor. Returns: bool: True if the data type of the input is integer data type, otherwise false. Examples: .. code-block:: python import paddle x = paddle.to_tensor([1 + 2j, 3 + 4j]) print(paddle.is_integer(x)) # False x = paddle.to_tensor([1.1, 1.2]) print(paddle.is_integer(x)) # False x = paddle.to_tensor([1, 2, 3]) print(paddle.is_integer(x)) # True """ if not isinstance(x, (paddle.Tensor, paddle.static.Variable)): raise TypeError("Expected Tensor, but received type of x: {}".format( type(x))) dtype = x.dtype is_int_dtype = (dtype == core.VarDesc.VarType.UINT8 or dtype == core.VarDesc.VarType.INT8 or dtype == core.VarDesc.VarType.INT16 or dtype == core.VarDesc.VarType.INT32 or dtype == core.VarDesc.VarType.INT64) return is_int_dtype def real(x, name=None): """ Returns a new tensor containing real values of the input tensor. Args: x (Tensor): the input tensor, its data type could be complex64 or complex128. name (str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` . Returns: Tensor: a tensor containing real values of the input tensor. Examples: .. code-block:: python import paddle x = paddle.to_tensor( [[1 + 6j, 2 + 5j, 3 + 4j], [4 + 3j, 5 + 2j, 6 + 1j]]) # Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True, # [[(1+6j), (2+5j), (3+4j)], # [(4+3j), (5+2j), (6+1j)]]) real_res = paddle.real(x) # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[1., 2., 3.], # [4., 5., 6.]]) real_t = x.real() # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[1., 2., 3.], # [4., 5., 6.]]) """ if in_dygraph_mode(): return _C_ops.final_state_real(x) if _in_legacy_dygraph(): return _C_ops.real(x) check_variable_and_dtype(x, 'x', ['complex64', 'complex128'], 'real') helper = LayerHelper('real', **locals()) out = helper.create_variable_for_type_inference( dtype=_complex_to_real_dtype(helper.input_dtype())) helper.append_op(type='real', inputs={'X': x}, outputs={'Out': out}) return out def imag(x, name=None): """ Returns a new tensor containing imaginary values of input tensor. Args: x (Tensor): the input tensor, its data type could be complex64 or complex128. name (str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` . Returns: Tensor: a tensor containing imaginary values of the input tensor. Examples: .. code-block:: python import paddle x = paddle.to_tensor( [[1 + 6j, 2 + 5j, 3 + 4j], [4 + 3j, 5 + 2j, 6 + 1j]]) # Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True, # [[(1+6j), (2+5j), (3+4j)], # [(4+3j), (5+2j), (6+1j)]]) imag_res = paddle.imag(x) # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[6., 5., 4.], # [3., 2., 1.]]) imag_t = x.imag() # Tensor(shape=[2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True, # [[6., 5., 4.], # [3., 2., 1.]]) """ if in_dygraph_mode(): return _C_ops.final_state_imag(x) if _in_legacy_dygraph(): return _C_ops.imag(x) check_variable_and_dtype(x, 'x', ['complex64', 'complex128'], 'imag') helper = LayerHelper('imag', **locals()) out = helper.create_variable_for_type_inference( dtype=_complex_to_real_dtype(helper.input_dtype())) helper.append_op(type='imag', inputs={'X': x}, outputs={'Out': out}) return out
[ 2, 220, 220, 15069, 357, 66, 8, 12131, 350, 37382, 47, 37382, 46665, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, ...
2.170442
3,532
import evdev import time import struct
[ 11748, 819, 7959, 198, 11748, 640, 198, 11748, 2878, 198 ]
3.9
10
# Barcode Example # # This example shows off how easy it is to detect bar codes using the # OpenMV Cam M7. Barcode detection does not work on the M4 Camera. import sensor, image, time, math sensor.reset() sensor.set_pixformat(sensor.GRAYSCALE) sensor.set_framesize(sensor.VGA) # High Res! sensor.set_windowing((640, 80)) # V Res of 80 == less work (40 for 2X the speed). sensor.skip_frames(time = 2000) sensor.set_auto_gain(False) # must turn this off to prevent image washout... sensor.set_auto_whitebal(False) # must turn this off to prevent image washout... clock = time.clock() # Barcode detection can run at the full 640x480 resolution of your OpenMV Cam's # OV7725 camera module. Barcode detection will also work in RGB565 mode but at # a lower resolution. That said, barcode detection requires a higher resolution # to work well so it should always be run at 640x480 in grayscale... while(True): clock.tick() img = sensor.snapshot() codes = img.find_barcodes() for code in codes: img.draw_rectangle(code.rect()) print_args = (barcode_name(code), code.payload(), (180 * code.rotation()) / math.pi, code.quality(), clock.fps()) print("Barcode %s, Payload \"%s\", rotation %f (degrees), quality %d, FPS %f" % print_args) if not codes: print("FPS %f" % clock.fps())
[ 2, 2409, 8189, 17934, 198, 2, 198, 2, 770, 1672, 2523, 572, 703, 2562, 340, 318, 284, 4886, 2318, 12416, 1262, 262, 198, 2, 4946, 44, 53, 7298, 337, 22, 13, 2409, 8189, 13326, 857, 407, 670, 319, 262, 337, 19, 20432, 13, 198, 19...
2.89738
458
n=int(input("Enter number ")) fact=1 for i in range(1,n+1): fact=fact*i print("Factorial is ",fact)
[ 77, 28, 600, 7, 15414, 7203, 17469, 1271, 366, 4008, 198, 22584, 28, 16, 198, 1640, 1312, 287, 2837, 7, 16, 11, 77, 10, 16, 2599, 198, 220, 1109, 28, 22584, 9, 72, 198, 4798, 7203, 29054, 5132, 318, 33172, 22584, 8, 198 ]
2.372093
43
import time # O * para desempacotar o paramtro. Permite atribuir inumeros parametros. maior(2, 1, 7) maior(5, 4, 7, 9, 2) maior(1, 4, 7, 20, 2) maior(0)
[ 11748, 640, 198, 2, 440, 1635, 220, 31215, 748, 368, 33587, 313, 283, 267, 5772, 23528, 13, 2448, 32937, 379, 822, 84, 343, 287, 6975, 418, 5772, 316, 4951, 13, 628, 198, 2611, 1504, 7, 17, 11, 352, 11, 767, 8, 198, 2611, 1504, ...
2.051948
77
# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. import unittest import torch from common_testing import TestCaseMixin, get_random_cuda_device from pytorch3d.ops import packed_to_padded, padded_to_packed from pytorch3d.structures.meshes import Meshes def _test_padded_to_packed_helper(self, D, device): """ Check the results from packed_to_padded and PyTorch implementations are the same. """ meshes = self.init_meshes(16, 100, 300, device=device) mesh_to_faces_packed_first_idx = meshes.mesh_to_faces_packed_first_idx() num_faces_per_mesh = meshes.num_faces_per_mesh() max_faces = num_faces_per_mesh.max().item() if D == 0: values = torch.rand((len(meshes), max_faces), device=device) else: values = torch.rand((len(meshes), max_faces, D), device=device) for i, num in enumerate(num_faces_per_mesh): values[i, num:] = 0 values.requires_grad = True values_torch = values.detach().clone() values_torch.requires_grad = True values_packed = padded_to_packed( values, mesh_to_faces_packed_first_idx, num_faces_per_mesh.sum().item() ) values_packed_torch = TestPackedToPadded.padded_to_packed_python( values_torch, mesh_to_faces_packed_first_idx, num_faces_per_mesh.sum().item(), device, ) # check forward self.assertClose(values_packed, values_packed_torch) # check backward if D == 0: grad_inputs = torch.rand((num_faces_per_mesh.sum().item()), device=device) else: grad_inputs = torch.rand( (num_faces_per_mesh.sum().item(), D), device=device ) values_packed.backward(grad_inputs) grad_outputs = values.grad values_packed_torch.backward(grad_inputs) grad_outputs_torch1 = values_torch.grad grad_outputs_torch2 = TestPackedToPadded.packed_to_padded_python( grad_inputs, mesh_to_faces_packed_first_idx, values.size(1), device=device ) self.assertClose(grad_outputs, grad_outputs_torch1) self.assertClose(grad_outputs, grad_outputs_torch2)
[ 2, 15069, 357, 66, 8, 3203, 11, 3457, 13, 290, 663, 29116, 13, 201, 198, 2, 1439, 2489, 10395, 13, 201, 198, 2, 201, 198, 2, 770, 2723, 2438, 318, 11971, 739, 262, 347, 10305, 12, 7635, 5964, 1043, 287, 262, 201, 198, 2, 38559, ...
2.158217
1,144
import pyproj import pytest import numpy as np from easyric.io import geotiff, shp from skimage.io import imread from skimage.color import rgb2gray import matplotlib.pyplot as plt
[ 11748, 12972, 1676, 73, 198, 11748, 12972, 9288, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 2562, 1173, 13, 952, 1330, 4903, 313, 733, 11, 427, 79, 198, 6738, 1341, 9060, 13, 952, 1330, 545, 961, 198, 6738, 1341, 9060, 13, 8043, ...
2.984615
65
# coding: utf-8 """ Marketplace Insights API <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> The Marketplace Insights API provides the ability to search for sold items on eBay by keyword, GTIN, category, and product and returns the of sales history of those items. # noqa: E501 OpenAPI spec version: v1_beta.2.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ItemLocation): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
[ 2, 19617, 25, 3384, 69, 12, 23, 198, 198, 37811, 198, 220, 220, 220, 36703, 7088, 2337, 7824, 628, 220, 220, 220, 1279, 64, 13291, 17553, 5450, 1378, 16244, 263, 13, 1765, 323, 13, 785, 14, 15042, 12, 31628, 14, 12708, 14, 9641, 2...
2.785714
434
# Fractional Knapsack wt = [40,50,30,10,10,40,30] pro = [30,20,20,25,5,35,15] n = len(wt) data = [ (i,pro[i],wt[i]) for i in range(n) ] bag = 100 data.sort(key=lambda x: x[1]/x[2], reverse=True) profit=0 ans=[] i=0 while i<n: if data[i][2]<=bag: bag-=data[i][2] ans.append(data[i][0]) profit+=data[i][1] i+=1 else: break if i<n: ans.append(data[i][0]) profit += (bag*data[i][1])/data[i][2] print(profit,ans)
[ 2, 376, 7861, 282, 6102, 1686, 441, 201, 198, 201, 198, 46569, 796, 685, 1821, 11, 1120, 11, 1270, 11, 940, 11, 940, 11, 1821, 11, 1270, 60, 201, 198, 1676, 796, 685, 1270, 11, 1238, 11, 1238, 11, 1495, 11, 20, 11, 2327, 11, 1...
1.684564
298