content
stringlengths
1
1.05M
input_ids
listlengths
1
883k
ratio_char_token
float64
1
22.9
token_count
int64
1
883k
""" Handles most general questions (including math!) Requires: - WolframAlpha API key Usage Examples: - "How tall is Mount Everest?" - "What is the derivative of y = 2x?" """ import wolframalpha from orion.classes.module import Module from orion.classes.task import ActiveTask from orion import settings wolfram_client = wolframalpha.Client(settings.WOLFRAM_KEY)
[ 37811, 201, 198, 12885, 829, 749, 2276, 2683, 357, 8201, 10688, 8133, 201, 198, 201, 198, 39618, 25, 201, 198, 220, 220, 220, 532, 8662, 859, 38077, 7824, 1994, 201, 198, 201, 198, 28350, 21066, 25, 201, 198, 220, 220, 220, 532, 366...
2.892086
139
from polymatch import PolymorphicMatcher
[ 6738, 7514, 15699, 1330, 12280, 24503, 291, 19044, 2044, 628, 198 ]
3.909091
11
""" Django settings for project. """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os # Debug #DEBUG = False DEBUG = True TEMPLATE_DEBUG = DEBUG INFORMIX_DEBUG = "debug" ADMINS = ( ('', ''), ) MANAGERS = ADMINS SECRET_KEY = '' ALLOWED_HOSTS = [] LANGUAGE_CODE = 'en-us' TIME_ZONE = 'America/Chicago' SITE_ID = 1 USE_I18N = False USE_L10N = False USE_TZ = False DEFAULT_CHARSET = 'utf-8' FILE_CHARSET = 'utf-8' SERVER_URL = "" API_URL = "%s/%s" % (SERVER_URL, "api") LIVEWHALE_API_URL = "https://%s" % (SERVER_URL) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ROOT_DIR = os.path.dirname(__file__) ROOT_URL = "/djskeletor/" ROOT_URLCONF = 'djskeletor.core.urls' WSGI_APPLICATION = 'djskeletor.wsgi.application' MEDIA_ROOT = '' ADMIN_MEDIA_PREFIX = '/static/admin/' STATIC_ROOT = '' STATIC_URL = "/static/" STATICFILES_DIRS = () STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) DATABASES = { 'default': { 'HOST': '127.0.0.1', 'PORT': '3306', 'NAME': 'django_djskeletor', 'ENGINE': 'django.db.backends.mysql', #'ENGINE': 'django.db.backends.dummy', 'USER': '', 'PASSWORD': '' }, } INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.formtools', 'django.contrib.humanize', 'django.contrib.messages', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.staticfiles', 'djskeletor', 'djskeletor.core', 'djskeletor.myapp', 'djtools', ) MIDDLEWARE_CLASSES = ( 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.cache.FetchFromCacheMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # the following should be uncommented unless you are # embedding your apps in iframes #'django.middleware.clickjacking.XFrameOptionsMiddleware', ) # template stuff TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ) TEMPLATE_DIRS = ( "/data2/django_projects/djskeletor/templates/", "/data2/django_templates/djkorra/", "/data2/django_templates/djcher/", "/data2/django_templates/", ) TEMPLATE_CONTEXT_PROCESSORS = ( "djtools.context_processors.sitevars", "django.contrib.auth.context_processors.auth", "django.core.context_processors.request", "django.core.context_processors.debug", "django.core.context_processors.media", ) # caching CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', #'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', #'LOCATION': '127.0.0.1:11211', #'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', #'LOCATION': '/var/tmp/django_djskeletor_cache', #'TIMEOUT': 60*20, #'KEY_PREFIX': "DJSKELETOR_", #'OPTIONS': { # 'MAX_ENTRIES': 80000, #} } } CACHE_MIDDLEWARE_ANONYMOUS_ONLY = True # LDAP Constants LDAP_SERVER = '' LDAP_SERVER_PWM = '' LDAP_PORT = '' LDAP_PORT_PWM = '' LDAP_PROTOCOL = "" LDAP_PROTOCOL_PWM = "" LDAP_BASE = "" LDAP_USER = "" LDAP_PASS = "" LDAP_EMAIL_DOMAIN = "" LDAP_OBJECT_CLASS = "" LDAP_OBJECT_CLASS_LIST = [] LDAP_GROUPS = {} LDAP_RETURN = [] LDAP_RETURN_PWM = [] LDAP_ID_ATTR = "" LDAP_CHALLENGE_ATTR = "" # auth backends AUTHENTICATION_BACKENDS = ( 'djauth.ldapBackend.LDAPBackend', 'django.contrib.auth.backends.ModelBackend', ) LOGIN_URL = '/djskeletor/accounts/login/' LOGIN_REDIRECT_URL = '/djskeletor/' USE_X_FORWARDED_HOST = True #SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_EXPIRE_AT_BROWSER_CLOSE = False SESSION_COOKIE_DOMAIN=".carthage.edu" SESSION_COOKIE_NAME ='django_djskeletor_cookie' SESSION_COOKIE_AGE = 86400 # SMTP settings EMAIL_HOST = '' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_USE_TLS = True EMAIL_PORT = 587 EMAIL_FAIL_SILENTLY = False DEFAULT_FROM_EMAIL = '' SERVER_EMAIL = '' SERVER_MAIL='' # logging LOG_FILEPATH = os.path.join(os.path.dirname(__file__), "logs/") LOG_FILENAME = LOG_FILEPATH + "debug.log" LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'standard': { 'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", 'datefmt' : "%Y/%b/%d %H:%M:%S" }, 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s', 'datefmt' : "%Y/%b/%d %H:%M:%S" }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'null': { 'level':'DEBUG', 'class':'django.utils.log.NullHandler', }, 'logfile': { 'level':'DEBUG', 'class':'logging.handlers.RotatingFileHandler', 'filename': LOG_FILENAME, 'maxBytes': 50000, 'backupCount': 2, 'formatter': 'standard', }, 'console':{ 'level':'INFO', 'class':'logging.StreamHandler', 'formatter': 'standard' }, 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'include_html': True, 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'djskeletor': { 'handlers':['logfile'], 'propagate': True, 'level':'DEBUG', }, 'django': { 'handlers':['console'], 'propagate': True, 'level':'WARN', }, 'django.db.backends': { 'handlers': ['console'], 'level': 'DEBUG', 'propagate': False, }, 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } }
[ 37811, 198, 35, 73, 14208, 6460, 329, 1628, 13, 198, 37811, 198, 198, 2, 10934, 13532, 2641, 262, 1628, 588, 428, 25, 28686, 13, 6978, 13, 22179, 7, 33, 11159, 62, 34720, 11, 2644, 8, 198, 11748, 28686, 198, 198, 2, 31687, 198, 2,...
2.038077
3,204
__author__ = 'Aaron Yang' __email__ = 'byang971@usc.edu' __date__ = '12/9/2020 4:18 PM' from abc import abstractmethod if __name__ == '__main__': ss = Factory().produce() pc = PCFactory().produce() laptop = LAPTOPFactory().produce() pc.info() laptop.info() ss.info()
[ 834, 9800, 834, 796, 705, 34451, 10998, 6, 198, 834, 12888, 834, 796, 705, 1525, 648, 24, 4869, 31, 16241, 13, 15532, 6, 198, 834, 4475, 834, 796, 705, 1065, 14, 24, 14, 42334, 604, 25, 1507, 3122, 6, 198, 198, 6738, 450, 66, 13...
2.416
125
import uuid from typing import AsyncGenerator import pytest from sqlalchemy import exc from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import sessionmaker from sqlmodel import Session, SQLModel, create_engine from fastapi_users_db_sqlmodel import ( NotSetOAuthAccountTableError, SQLModelUserDatabase, SQLModelUserDatabaseAsync, ) from tests.conftest import OAuthAccount, UserDB, UserDBOAuth safe_uuid = uuid.UUID("a9089e5d-2642-406d-a7c0-cbc641aca0ec")
[ 11748, 334, 27112, 198, 6738, 19720, 1330, 1081, 13361, 8645, 1352, 198, 198, 11748, 12972, 9288, 198, 6738, 44161, 282, 26599, 1330, 2859, 198, 6738, 44161, 282, 26599, 13, 2302, 13, 292, 13361, 952, 1330, 1081, 13361, 36044, 11, 2251, ...
2.926554
177
#! /usr/bin/env python3 import os from os import path root_dir = path.dirname(path.realpath(__file__)) local_reg_dir = path.join(root_dir, 'registry') os.makedirs(local_reg_dir, exist_ok=True) vk_files = [ 'registry/vk.xml', 'registry/reg.py', 'registry/generator.py' ] copy_reg(path.join(root_dir, 'Vulkan-Headers'), vk_files)
[ 2, 0, 1220, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 11748, 28686, 198, 6738, 28686, 1330, 3108, 198, 198, 15763, 62, 15908, 796, 3108, 13, 15908, 3672, 7, 6978, 13, 5305, 6978, 7, 834, 7753, 834, 4008, 198, 12001, 62, 2301,...
2.398551
138
import numpy as np import pandas as pd import scipy.stats as st #from medical_ML import Experiment import matplotlib.pyplot as plt import xgboost as xgb from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier from sklearn.dummy import DummyClassifier from sklearn.tree import DecisionTreeClassifier from sklearn.linear_model import LogisticRegression, Lasso from sklearn.neighbors import KNeighborsClassifier from sklearn.svm import SVC from sklearn import linear_model from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor from sklearn.dummy import DummyRegressor def split_cohort(datafile, to_exclude = None, test_ind_col = None, drop = 'some'): """ Load and clean the dataset """ if isinstance(datafile, str): data = pd.read_csv(datafile) else: data = datafile test_data = None if to_exclude is not None: for k in to_exclude.keys(): if k == 'race': data = data[data[k].isin(to_exclude[k])] elif k == 'agebl': data = data[data[k] >= to_exclude[k]] elif to_exclude[k]: data = data[data[k] == 0] if drop == 'some': data = data.drop(k, axis = 1) if drop == 'all': if (k != 'race') & (k != 'agebl'): data = data.drop(k, axis = 1) # self.data = self.data[self.data['year'] <= 2010] # self.data = self.data.drop(['year'], axis = 1) if test_ind_col is not None: test_data = data[data[test_ind_col] == 1] test_data = test_data.drop(test_ind_col, axis = 1) data = data[data[test_ind_col] == 0] data = data.drop(test_ind_col, axis = 1) return(data, test_data) # ax.patch.set_facecolor("0.85")
[ 11748, 299, 32152, 355, 45941, 198, 11748, 19798, 292, 355, 279, 67, 198, 11748, 629, 541, 88, 13, 34242, 355, 336, 198, 198, 2, 6738, 3315, 62, 5805, 1330, 29544, 198, 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 1...
2.18203
857
import json import logging import re import susepubliccloudinfoclient.infoserverrequests as ifsrequest import yaml import sys RELEASE_DATE = re.compile('^.*-v(\d{8})-*.*') def get_caasp_release_version(): """Return the version from os-release""" os_release = open('/etc/os-release', 'r').readlines() for entry in os_release: if entry.startswith('VERSION_ID'): version_id = entry.split('=')[-1].strip() # We assume that os-release will always have '"' as # version delimiters version = version_id.strip('"\'') logging.info('Release version: "%s"' % version) return version def get_cloud_config_path(): """Return the path for the cloud configuration file""" return '/etc/salt/pillar/cloud.sls' def get_from_config(config_option): """Get the value for the given config option""" # Expected low usage of this method, re-read the file on an as needed # basis. If this turns out to be an issue cache the content config_path = get_cloud_config_path() with open(config_path) as config_file: config = yaml.load(config_file.read()) settings = config.get('cloud') if not settings: return return settings.get(config_option) def get_cluster_image_identifier(framework, region): """Return the identifier for the latest cluster node image""" cluster_image = get_from_config('cluster_image') if cluster_image: # The data returned in this code path has built in knowledge # about the information consumed by the client from the # full pint data image_data = {} image_data['id'] = cluster_image image_data['name'] = cluster_image if framework == 'microsoft' and cluster_image.count(':') == 3: image_data['urn'] = cluster_image msg = 'Using cluster image from configuration. ' msg += 'Image data for cluster node image: "%s"' logging.info(msg % image_data) return image_data name_filter = 'name~caasp,name~cluster' flavor = get_from_config('procurement_flavor') if flavor == 'byos': name_filter += ',name~byos' else: name_filter += ',name!byos' version = get_caasp_release_version() name_filter += ',name~' + version.replace('.', '-') # The cluster image we choose depends on the admin node version, # thus we cannot just query for active images. We need to get all # images and then process accordingly. try: image_info = ifsrequest.get_image_data( framework, None, 'json', region, name_filter ) except Exception as e: logging.error('Pint server access failed: "%s"' % e.message) # This message will bubble up through salt return 'See /var/log/caasp_cloud_setup.log' try: image_data = json.loads(image_info) available_images = image_data.get('images', []) target_image = None target_image_date = 0 for image in available_images: image_name = image.get('name') try: date = int(RELEASE_DATE.match(image_name).group(1)) if date > target_image_date: # If we have multiple images with the same date that # match our filter criteria we have a serious data problem # we cannot really recover, the first one wins target_image = image except Exception: # Image name with no date stamp skip it continue except Exception as e: logging.error('Could not load json data from pint: "%s"' % e.message) # This message will bubble up through salt return 'See /var/log/caasp_cloud_setup.log' if not target_image: logging.error('Could not determine image identifier for cluster node.') logging.error('This implies that the pint server is unreachable or the ' 'data is incomplete, please report the issue, exiting.') sys.exit('pint lookup failed') logging.info('Image data for cluster node image: "%s"' % target_image) return target_image
[ 11748, 33918, 198, 11748, 18931, 198, 11748, 302, 198, 11748, 424, 325, 11377, 17721, 10745, 38679, 1153, 13, 10745, 13416, 332, 8897, 3558, 355, 611, 82, 25927, 198, 11748, 331, 43695, 198, 11748, 25064, 198, 198, 2200, 22781, 62, 35, ...
2.502062
1,697
# Copyright (C) 2015-2016 The bitcoin-blockchain-parser developers # # This file is part of bitcoin-blockchain-parser. # # It is subject to the license terms in the LICENSE file found in the top-level # directory of this distribution. # # No part of bitcoin-blockchain-parser, including this file, may be copied, # modified, propagated, or distributed except according to the terms contained # in the LICENSE file. import unittest from datetime import datetime from .utils import read_test_data from blockchain_parser.block import Block
[ 2, 15069, 357, 34, 8, 1853, 12, 5304, 383, 8550, 12, 9967, 7983, 12, 48610, 6505, 198, 2, 198, 2, 770, 2393, 318, 636, 286, 8550, 12, 9967, 7983, 12, 48610, 13, 198, 2, 198, 2, 632, 318, 2426, 284, 262, 5964, 2846, 287, 262, 3...
3.857143
140
''' DNA++ (c) DNA++ 2017 All rights reserved. @author: neilswainston '''
[ 7061, 6, 198, 28886, 4880, 357, 66, 8, 7446, 4880, 2177, 198, 198, 3237, 2489, 10395, 13, 198, 198, 31, 9800, 25, 497, 346, 2032, 391, 3743, 198, 7061, 6, 198 ]
2.419355
31
import os import pandas as pd import pytest from user_similarity_model.config.core import DATASET_DIR, config
[ 11748, 28686, 198, 198, 11748, 19798, 292, 355, 279, 67, 198, 11748, 12972, 9288, 198, 198, 6738, 2836, 62, 38610, 414, 62, 19849, 13, 11250, 13, 7295, 1330, 360, 1404, 1921, 2767, 62, 34720, 11, 4566, 628 ]
3.054054
37
from django.apps import AppConfig import logging logger = logging.getLogger(__name__)
[ 6738, 42625, 14208, 13, 18211, 1330, 2034, 16934, 198, 11748, 18931, 198, 198, 6404, 1362, 796, 18931, 13, 1136, 11187, 1362, 7, 834, 3672, 834, 8, 628 ]
3.259259
27
import filer import tests
[ 11748, 1226, 263, 198, 11748, 5254, 198 ]
3.714286
7
""" ============ Pass Network ============ This example shows how to plot passes between players in a set formation. """ import pandas as pd from mplsoccer.pitch import Pitch from matplotlib.colors import to_rgba import numpy as np from mplsoccer.statsbomb import read_event, EVENT_SLUG ############################################################################## # Set team and match info, and get event and tactics dataframes for the defined match_id match_id = 15946 team = 'Barcelona' opponent = 'Alavs (A), 2018/19 La Liga' event_dict = read_event(f'{EVENT_SLUG}/{match_id}.json', warn=False) players = event_dict['tactics_lineup'] events = event_dict['event'] ############################################################################## # Adding on the last tactics id and formation for the team for each event events.loc[events.tactics_formation.notnull(), 'tactics_id'] = events.loc[ events.tactics_formation.notnull(), 'id'] events[['tactics_id', 'tactics_formation']] = events.groupby('team_name')[[ 'tactics_id', 'tactics_formation']].ffill() ############################################################################## # Add the abbreviated player position to the players dataframe formation_dict = {1: 'GK', 2: 'RB', 3: 'RCB', 4: 'CB', 5: 'LCB', 6: 'LB', 7: 'RWB', 8: 'LWB', 9: 'RDM', 10: 'CDM', 11: 'LDM', 12: 'RM', 13: 'RCM', 14: 'CM', 15: 'LCM', 16: 'LM', 17: 'RW', 18: 'RAM', 19: 'CAM', 20: 'LAM', 21: 'LW', 22: 'RCF', 23: 'ST', 24: 'LCF', 25: 'SS'} players['position_abbreviation'] = players.player_position_id.map(formation_dict) ############################################################################## # Add on the subsitutions to the players dataframe, i.e. where players are subbed on # but the formation doesn't change sub = events.loc[events.type_name == 'Substitution', ['tactics_id', 'player_id', 'substitution_replacement_id', 'substitution_replacement_name']] players_sub = players.merge(sub.rename({'tactics_id': 'id'}, axis='columns'), on=['id', 'player_id'], how='inner', validate='1:1') players_sub = (players_sub[['id', 'substitution_replacement_id', 'position_abbreviation']] .rename({'substitution_replacement_id': 'player_id'}, axis='columns')) players = pd.concat([players, players_sub]) players.rename({'id': 'tactics_id'}, axis='columns', inplace=True) players = players[['tactics_id', 'player_id', 'position_abbreviation']] ############################################################################## # Add player position information to the events dataframe # add on the position the player was playing in the formation to the events dataframe events = events.merge(players, on=['tactics_id', 'player_id'], how='left', validate='m:1') # add on the position the receipient was playing in the formation to the events dataframe events = events.merge(players.rename({'player_id': 'pass_recipient_id'}, axis='columns'), on=['tactics_id', 'pass_recipient_id'], how='left', validate='m:1', suffixes=['', '_receipt']) ############################################################################## # Create dataframes for passes and player locations # get a dataframe with all passes mask_pass = (events.team_name == team) & (events.type_name == 'Pass') to_keep = ['id', 'match_id', 'player_id', 'player_name', 'outcome_name', 'pass_recipient_id', 'pass_recipient_name', 'x', 'y', 'end_x', 'end_y', 'tactics_id', 'tactics_formation', 'position_abbreviation', 'position_abbreviation_receipt'] passes = events.loc[mask_pass, to_keep].copy() print('Formations used by {} in match: '.format(team), passes['tactics_formation'].unique()) ############################################################################## # Filter passes by chosen formation, then group all passes and receipts to # calculate avg x, avg y, count of events for each slot in the formation formation = 433 passes_formation = passes[(passes.tactics_formation == formation) & (passes.position_abbreviation_receipt.notnull())].copy() passer_passes = passes_formation[['position_abbreviation', 'x', 'y']].copy() recipient_passes = passes_formation[['position_abbreviation_receipt', 'end_x', 'end_y']].copy() # rename columns to match those in passer_passes recipient_passes.rename({'position_abbreviation_receipt': 'position_abbreviation', 'end_x': 'x', 'end_y': 'y'}, axis='columns', inplace=True) # create a new dataframe containing all individual passes and receipts from passes_formation appended_passes = pd.concat(objs=[passer_passes, recipient_passes], ignore_index=True) average_locs_and_count = appended_passes.groupby('position_abbreviation').agg({ 'x': ['mean'], 'y': ['mean', 'count']}) average_locs_and_count.columns = ['x', 'y', 'count'] ############################################################################## # Group the passes by unique pairings of players and add the avg player positions to this dataframe # calculate the number of passes between each position (using min/ max so we get passes both ways) passes_formation['pos_max'] = passes_formation[['position_abbreviation', 'position_abbreviation_receipt']].max(axis='columns') passes_formation['pos_min'] = passes_formation[['position_abbreviation', 'position_abbreviation_receipt']].min(axis='columns') passes_between = passes_formation.groupby(['pos_min', 'pos_max']).id.count().reset_index() passes_between.rename({'id': 'pass_count'}, axis='columns', inplace=True) # add on the location of each player so we have the start and end positions of the lines passes_between = passes_between.merge(average_locs_and_count, left_on='pos_min', right_index=True) passes_between = passes_between.merge(average_locs_and_count, left_on='pos_max', right_index=True, suffixes=['', '_end']) ############################################################################## # Calculate the line width and marker sizes relative to the largest counts max_line_width = 18 max_marker_size = 3000 passes_between['width'] = passes_between.pass_count / passes_between.pass_count.max() * max_line_width average_locs_and_count['marker_size'] = (average_locs_and_count['count'] / average_locs_and_count['count'].max() * max_marker_size) ############################################################################## # Set color to make the lines more transparent when fewer passes are made min_transparency = 0.3 color = np.array(to_rgba('white')) color = np.tile(color, (len(passes_between), 1)) c_transparency = passes_between.pass_count / passes_between.pass_count.max() c_transparency = (c_transparency * (1 - min_transparency)) + min_transparency color[:, 3] = c_transparency ############################################################################## # Plotting pitch = Pitch(pitch_type='statsbomb', orientation='horizontal', pitch_color='#22312b', line_color='#c7d5cc', figsize=(16, 11), constrained_layout=True, tight_layout=False) fig, ax = pitch.draw() pass_lines = pitch.lines(passes_between.x, passes_between.y, passes_between.x_end, passes_between.y_end, lw=passes_between.width, color=color, zorder=1, ax=ax) pass_nodes = pitch.scatter(average_locs_and_count.x, average_locs_and_count.y, s=average_locs_and_count.marker_size, color='red', edgecolors='black', linewidth=1, alpha=1, ax=ax) for index, row in average_locs_and_count.iterrows(): pitch.annotate(row.name, xy=(row.x, row.y), c='white', va='center', ha='center', size=16, weight='bold', ax=ax) title = ax.set_title("{} {} Formation vs {}".format(team, formation, opponent), size=28, y=0.97, color='#c7d5cc') fig.set_facecolor("#22312b")
[ 37811, 198, 25609, 198, 14478, 7311, 198, 25609, 198, 198, 1212, 1672, 2523, 703, 284, 7110, 8318, 1022, 1938, 287, 257, 900, 9978, 13, 198, 37811, 198, 198, 11748, 19798, 292, 355, 279, 67, 198, 6738, 285, 489, 35634, 2189, 13, 79, ...
2.791551
2,888
__all__ = ['Factory'] import jsfiddle_build import jsfiddle_github import jsfiddle_generator import jsfiddle_readme_generator import getdirs import getfiles import os import popd import yaml class Factory: """attrs: `path`. methods: `detox()`, `init()`, `build()`, `readme()`, `update_resources()`""" path = None
[ 834, 439, 834, 796, 37250, 22810, 20520, 628, 198, 11748, 474, 28202, 2509, 62, 11249, 198, 11748, 474, 28202, 2509, 62, 12567, 198, 11748, 474, 28202, 2509, 62, 8612, 1352, 198, 11748, 474, 28202, 2509, 62, 961, 1326, 62, 8612, 1352, ...
2.877193
114
import logging import os from datetime import datetime from inspect import signature, Parameter from pathlib import Path from pprint import pprint from textwrap import dedent from typing import Optional, Union import fire import tensorflow as tf from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard, TerminateOnNaN from tensorflow.keras import Model from spellnn import models from spellnn.data import alphabet from spellnn.data.alphabet import get_chars from spellnn.data.processing import DataProcessor from spellnn.data.util import nb_lines from spellnn.layers.mapping import CharMapping os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # FATAL logging.getLogger('tensorflow').setLevel(logging.FATAL) if __name__ == '__main__': cli = Gym() fire.Fire(cli)
[ 11748, 18931, 198, 11748, 28686, 198, 6738, 4818, 8079, 1330, 4818, 8079, 198, 6738, 10104, 1330, 9877, 11, 25139, 2357, 198, 6738, 3108, 8019, 1330, 10644, 198, 6738, 279, 4798, 1330, 279, 4798, 198, 6738, 2420, 37150, 1330, 4648, 298, ...
3.218623
247
# Copyright 2020 The Flax Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Frozen Dictionary.""" from typing import TypeVar, Mapping, Dict, Tuple from flax import serialization import jax K = TypeVar('K') V = TypeVar('V') def freeze(xs: Dict[K, V]) -> FrozenDict[K, V]: """Freeze a nested dict. Makes a nested `dict` immutable by transforming it into `FrozenDict`. """ # Turn the nested FrozenDict into a dict. This way the internal data structure # of FrozenDict does not contain any FrozenDicts. # instead we create those lazily in `__getitem__`. # As a result tree_flatten/unflatten will be fast # because it operates on native dicts. xs = unfreeze(xs) return FrozenDict(xs) def unfreeze(x: FrozenDict[K, V]) -> Dict[K, V]: """Unfreeze a FrozenDict. Makes a mutable copy of a `FrozenDict` mutable by transforming it into (nested) dict. """ if not isinstance(x, (FrozenDict, dict)): return x ys = {} for key, value in x.items(): ys[key] = unfreeze(value) return ys serialization.register_serialization_state( FrozenDict, _frozen_dict_state_dict, _restore_frozen_dict)
[ 2, 15069, 12131, 383, 1610, 897, 46665, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198, 2,...
3.085981
535
# -*- coding: utf-8 -*- from django.utils import translation from django.db.models import ObjectDoesNotExist from pybb import util from pybb.signals import user_saved
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 6738, 42625, 14208, 13, 26791, 1330, 11059, 198, 6738, 42625, 14208, 13, 9945, 13, 27530, 1330, 9515, 13921, 3673, 3109, 396, 198, 6738, 12972, 11848, 1330, 7736, 198...
3.035714
56
from enum import Enum
[ 6738, 33829, 1330, 2039, 388 ]
4.2
5
import fileinput counts = {} for line in fileinput.input(): line = line.strip() p1, p2 = line.split('>') p1 = p1[:-2] x1, y1 = p1.split(',') x1 = int(x1) y1 = int(y1) p2 = p2[1:] x2, y2 = p2.split(',') x2 = int(x2) y2 = int(y2) if x1 == x2: dx = 0 elif x1 > x2: dx = -1 else: dx = 1 if y1 == y2: dy = 0 elif y1 > y2: dy = -1 else: dy = 1 x = x1 y = y1 while True: pt = (x, y) counts[pt] = counts.get(pt, 0) + 1 if x == x2 and y == y2: break x += dx y += dy n = 0 for _, ct in counts.items(): if ct > 1: n += 1 print(n)
[ 11748, 2393, 15414, 198, 198, 9127, 82, 796, 23884, 198, 1640, 1627, 287, 2393, 15414, 13, 15414, 33529, 198, 220, 220, 220, 1627, 796, 1627, 13, 36311, 3419, 198, 220, 220, 220, 279, 16, 11, 279, 17, 796, 1627, 13, 35312, 10786, 29...
1.630979
439
import logging from web3 import Web3 import sys import time import meditation.meditation as meditation if __name__ == "__main__": log_format = '%(asctime)s|%(name)s|%(levelname)s: %(message)s' logger = logging.getLogger("DFK-meditation") logger.setLevel(logging.DEBUG) logging.basicConfig(level=logging.INFO, format=log_format, stream=sys.stdout) rpc_server = 'https://api.harmony.one' logger.info("Using RPC server " + rpc_server) private_key = None # set private key account_address = '0x2E7669F61eA77F02445A015FBdcFe2DE47083E02' gas_price_gwei = 10 tx_timeout_seconds = 30 w3 = Web3(Web3.HTTPProvider(rpc_server)) active_meditations = meditation.get_active_meditations(account_address, rpc_server) logger.info("Pending meditation on address " + str(account_address) + ": "+str(active_meditations)) level = 1 hero_id = 1 required_runes = meditation.get_required_runes(level, rpc_server) meditation.start_meditation(1, meditation.stat2id('strength'), meditation.stat2id('endurance'), meditation.stat2id('luck'), meditation.ZERO_ADDRESS, private_key, w3.eth.getTransactionCount(account_address), gas_price_gwei, tx_timeout_seconds, rpc_server, logger) hero_meditation = meditation.get_hero_meditation(hero_id, rpc_server) logger.info("Pending meditation "+str(hero_meditation)) time.sleep(5) meditation.complete_meditation(hero_id, private_key, w3.eth.getTransactionCount(account_address), gas_price_gwei, tx_timeout_seconds, rpc_server, logger)
[ 11748, 18931, 198, 6738, 3992, 18, 1330, 5313, 18, 198, 11748, 25064, 198, 11748, 640, 198, 11748, 16901, 13, 1150, 3780, 355, 16901, 628, 198, 361, 11593, 3672, 834, 6624, 366, 834, 12417, 834, 1298, 198, 220, 220, 220, 2604, 62, 189...
2.511521
651
import time
[ 11748, 640, 628 ]
4.333333
3
''' Copyright (c) The Dojo Foundation 2011. All Rights Reserved. Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved. ''' # tornado import tornado.ioloop # std lib import logging import time import weakref import functools # coweb from .base import BotWrapperBase log = logging.getLogger('coweb.bot')
[ 7061, 6, 198, 15269, 357, 66, 8, 383, 2141, 7639, 5693, 2813, 13, 1439, 6923, 33876, 13, 198, 15269, 357, 66, 8, 19764, 10501, 3648, 11, 2813, 13, 1439, 6923, 33876, 13, 198, 7061, 6, 198, 2, 33718, 198, 11748, 33718, 13, 1669, 11...
3.391304
92
import pygame import random pygame.init() clock = pygame.time.Clock() fps = 60 #game window bottom_panel = 150 screen_width = 800 screen_height = 400 + bottom_panel screen = pygame.display.set_mode((screen_width, screen_height)) pygame.display.set_caption('Battle') #define game variables current_fighter = 1 total_fighters = 3 action_cooldown = 0 action_wait_time = 90 attack = False potion = False clicked = False #define fonts font = pygame.font.SysFont('Times New Roman', 26) #define colours red = (255, 0, 0) green = (0, 255, 0) #load images #background image background_img = pygame.image.load('img/Background/background.png').convert_alpha() #panel image panel_img = pygame.image.load('img/Icons/panel.png').convert_alpha() #sword image sword_img = pygame.image.load('img/Icons/sword.png').convert_alpha() #create function for drawing text #function for drawing background #function for drawing panel #fighter class knight = Fighter(200, 260, 'Knight', 30, 10, 3) bandit1 = Fighter(550, 270, 'Bandit', 20, 6, 1) bandit2 = Fighter(700, 270, 'Bandit', 20, 6, 1) bandit_list = [] bandit_list.append(bandit1) bandit_list.append(bandit2) knight_health_bar = HealthBar(100, screen_height - bottom_panel + 40, knight.hp, knight.max_hp) bandit1_health_bar = HealthBar(550, screen_height - bottom_panel + 40, bandit1.hp, bandit1.max_hp) bandit2_health_bar = HealthBar(550, screen_height - bottom_panel + 100, bandit2.hp, bandit2.max_hp) run = True while run: clock.tick(fps) #draw background draw_bg() #draw panel draw_panel() knight_health_bar.draw(knight.hp) bandit1_health_bar.draw(bandit1.hp) bandit2_health_bar.draw(bandit2.hp) #draw fighters knight.update() knight.draw() for bandit in bandit_list: bandit.update() bandit.draw() #control player actions #reset action variables attack = False potion = False target = None #make sure mouse is visible pygame.mouse.set_visible(True) pos = pygame.mouse.get_pos() for count, bandit in enumerate(bandit_list): if bandit.rect.collidepoint(pos): #hide mouse pygame.mouse.set_visible(False) #show sword in place of mouse cursor screen.blit(sword_img, pos) if clicked == True: attack = True target = bandit_list[count] #player action if knight.alive == True: if current_fighter == 1: action_cooldown += 1 if action_cooldown >= action_wait_time: #look for player action #attack if attack == True and target != None: knight.attack(target) current_fighter += 1 action_cooldown = 0 #enemy action for count, bandit in enumerate(bandit_list): if current_fighter == 2 + count: if bandit.alive == True: action_cooldown += 1 if action_cooldown >= action_wait_time: #attack bandit.attack(knight) current_fighter += 1 action_cooldown = 0 else: current_fighter += 1 #if all fighters have had a turn then reset if current_fighter > total_fighters: current_fighter = 1 for event in pygame.event.get(): if event.type == pygame.QUIT: run = False if event.type == pygame.MOUSEBUTTONDOWN: clicked = True else: clicked = False pygame.display.update() pygame.quit()
[ 11748, 12972, 6057, 198, 11748, 4738, 198, 198, 9078, 6057, 13, 15003, 3419, 198, 198, 15750, 796, 12972, 6057, 13, 2435, 13, 44758, 3419, 198, 29647, 796, 3126, 198, 198, 2, 6057, 4324, 198, 22487, 62, 35330, 796, 6640, 198, 9612, 62...
2.66388
1,196
matriz = [[0, 0, 0], [0, 0, 0], [0, 0, 0]] soma = col3 = maior = 0 for l in range(0, 3): for c in range(0, 3): matriz[l][c] = int(input(f'[{l}][{c}]: ')) for l in range(0, 3): for c in range(0, 3): print(f'[{matriz[l][c]:^5}]', end='') if matriz[l][c] % 2 == 0: soma += matriz[l][c] print() for l in range(0, 3): col3 += matriz[l][2] for c in range(0, 3): if c == 0: maior = matriz[1][c] elif matriz[1][c] > maior: maior = matriz[1][c] print(f'A soma dos numeros pares {soma}') print(f'A soma dos valores da 3 coluna {col3}') print(f'O maior numero da 2 linha {maior}')
[ 6759, 47847, 796, 16410, 15, 11, 657, 11, 657, 4357, 685, 15, 11, 657, 11, 657, 4357, 685, 15, 11, 657, 11, 657, 11907, 198, 82, 6086, 796, 951, 18, 796, 17266, 1504, 796, 657, 198, 1640, 300, 287, 2837, 7, 15, 11, 513, 2599, ...
1.856734
349
import onvif import os import asyncio import urllib.parse from onvif import ONVIFCamera from pytapo import Tapo from .const import ENABLE_MOTION_SENSOR, DOMAIN, LOGGER, CLOUD_PASSWORD from homeassistant.const import CONF_IP_ADDRESS, CONF_USERNAME, CONF_PASSWORD from homeassistant.components.onvif.event import EventManager from homeassistant.components.ffmpeg import DATA_FFMPEG from haffmpeg.tools import IMAGE_JPEG, ImageFrame
[ 11748, 319, 85, 361, 198, 11748, 28686, 198, 11748, 30351, 952, 198, 11748, 2956, 297, 571, 13, 29572, 198, 6738, 319, 85, 361, 1330, 6177, 12861, 4851, 18144, 198, 6738, 12972, 83, 41817, 1330, 16880, 78, 198, 6738, 764, 9979, 1330, ...
2.932886
149
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # ---------------------------------------------------------------------------- # Name: test_uidattr # Purpose: Test driver for module 'uidattr' # # Author: Michael Amrhein (michael@adrhinum.de) # # Copyright: (c) 2018 Michael Amrhein # ---------------------------------------------------------------------------- # $Source$ # $Revision$ """Test driver for module 'uidattr'""" import unittest from uuid import uuid1 from camd3.infrastructure.component import ( Component, register_utility, UniqueIdAttribute) from camd3.infrastructure.component.idfactories import ( UUIDGenerator, uuid_generator) # factory for UUIDs if __name__ == '__main__': # pragma: no cover unittest.main()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 16529, 10541, 198, 2, 6530, 25, 220, 220, 220, 220, 220, 220, 220, 1332, 62, 27112, 35226, 198, 2, 32039, 25...
2.948148
270
import requests import time from bs4 import BeautifulSoup
[ 11748, 7007, 198, 11748, 640, 198, 6738, 275, 82, 19, 1330, 23762, 50, 10486, 628, 628, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 198 ]
2.551724
29
import uuid from abc import ABC, abstractmethod from collections import defaultdict from typing import Union from boto3.dynamodb.conditions import Attr as BotoAttr from boto3.dynamodb.conditions import Key as BotoKey from awstin.dynamodb.utils import from_decimal, to_decimal NOT_SET = NotSet() def size_query(self, *args, **kwargs): return BotoAttr(self._awstin_name).size() # ---- Update Operators class CombineOperator(UpdateOperator): """ Combine two update expressions """ class SetOperator(UpdateOperator): """ Support for SET """ class AddOperator(UpdateOperator): class RemoveOperator(UpdateOperator): class DeleteOperator(UpdateOperator): # ---- Update Operands def list_append(left, right): """ Set a value to the combination of two lists in an update expression """ return ListAppendOperand(UpdateOperand(left), UpdateOperand(right))
[ 11748, 334, 27112, 198, 6738, 450, 66, 1330, 9738, 11, 12531, 24396, 198, 6738, 17268, 1330, 4277, 11600, 198, 6738, 19720, 1330, 4479, 198, 198, 6738, 275, 2069, 18, 13, 67, 4989, 375, 65, 13, 17561, 1756, 1330, 3460, 81, 355, 347, ...
3.013029
307
from .supervise import *
[ 6738, 764, 16668, 85, 786, 1330, 1635, 198 ]
3.125
8
import torch import torch.nn as nn from torch.nn.parameter import Parameter from torch.nn.utils.rnn import pad_packed_sequence as unpack from torch.nn.utils.rnn import pack_padded_sequence as pack from .my_optim import weight_norm as WN # TODO: use system func to bind ~ RNN_MAP = {'lstm': nn.LSTM, 'gru': nn.GRU, 'rnn': nn.RNN} #------------------------------ # Contextual embedding # TODO: remove packing to speed up # Credit from: https://github.com/salesforce/cove #------------------------------
[ 11748, 28034, 201, 198, 11748, 28034, 13, 20471, 355, 299, 77, 201, 198, 6738, 28034, 13, 20471, 13, 17143, 2357, 1330, 25139, 2357, 201, 198, 6738, 28034, 13, 20471, 13, 26791, 13, 81, 20471, 1330, 14841, 62, 34860, 62, 43167, 355, 5...
2.817204
186
#!//anaconda/envs/py36/bin/python # # File name: kmc_pld.py # Date: 2018/08/03 09:07 # Author: Lukas Vlcek # # Description: # import numpy as np from collections import Counter
[ 2, 0, 1003, 272, 330, 13533, 14, 268, 14259, 14, 9078, 2623, 14, 8800, 14, 29412, 198, 2, 198, 2, 9220, 1438, 25, 220, 220, 10571, 66, 62, 79, 335, 13, 9078, 198, 2, 7536, 25, 220, 220, 220, 220, 220, 220, 220, 2864, 14, 2919,...
2.204545
88
# -*- coding: utf-8 -*- # Copyright (C) 2006-2007 Sren Roug, European Environment Agency # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # from odf.namespaces import METANS from odf.element import Element # Autogenerated
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 15069, 357, 34, 8, 4793, 12, 12726, 311, 918, 13876, 70, 11, 3427, 9344, 7732, 198, 2, 198, 2, 770, 5888, 318, 1479, 3788, 26, 345, 460, 17678, 4163, 340, 290, ...
3.645669
254
import os os.environ['CUDA_VISIBLE_DEVICES'] = '2' import torch torch.rand(10) import torch.nn as nn import torch.nn.functional as F import glob from tqdm import tqdm, trange print(torch.cuda.is_available()) print(torch.cuda.get_device_name()) print(torch.cuda.current_device()) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using device:', device) print() #Additional Info when using cuda if device.type == 'cuda': print(torch.cuda.get_device_name(0)) print('Memory Usage:') print('Allocated:', round(torch.cuda.memory_allocated(0)/1024**3,1), 'GB') print('Cached: ', round(torch.cuda.memory_reserved(0)/1024**3,1), 'GB') import torch.backends.cudnn as cudnn import numpy as np import os, cv2 from tqdm import tqdm, trange import seaborn as sns from models.experimental import attempt_load from utils.datasets import LoadStreams, LoadImages from utils.general import ( check_img_size, non_max_suppression, apply_classifier, scale_coords, xyxy2xywh, plot_one_box, strip_optimizer) from utils.torch_utils import select_device, load_classifier, time_synchronized from my_utils import xyxy_2_xyxyo, draw_boxes # Initialize device = select_device('') half = device.type != 'cpu' # half precision only supported on CUDA #%% # Directories out = '/home/user01/data_ssd/Talha/yolo/op/' weights = '/home/user01/data_ssd/Talha/yolo/ScaledYOLOv4/runs/exp2_yolov4-csp-results/weights/best_yolov4-csp-results.pt' source = '/home/user01/data_ssd/Talha/yolo/paprika_y5/valid/images/' imgsz = 416 conf_thres = 0.4 iou_thres = 0.5 classes = [0,1,2,3,4,5] class_names = ["blossom_end_rot", "graymold","powdery_mildew","spider_mite", "spotting_disease", "snails_and_slugs"] # deleting files in op_dir filelist = [ f for f in os.listdir(out)]# if f.endswith(".png") ] for f in tqdm(filelist, desc = 'Deleting old files fro directory'): os.remove(os.path.join(out, f)) # Load model model = attempt_load(weights, map_location=device) # load FP32 model imgsz = check_img_size(imgsz, s=model.stride.max()) # check img_size if half: model.half() # to FP16 # Load model model = attempt_load(weights, map_location=device) # load FP32 model imgsz = check_img_size(imgsz, s=model.stride.max()) # check img_size img_paths = glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.png') + \ glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.jpg') # Run inference if device.type != 'cpu': model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once #%% for i in trange(len(img_paths)): path = img_paths[i] img1 = cv2.imread(path) img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2RGB) img_h, img_w, _ = img1.shape img2 = prepare_input(img1, 416, half) # get file name name = os.path.basename(path)[:-4] # Inference t1 = time_synchronized() pred = model(img2, augment=False)[0] # Apply NMS pred = non_max_suppression(pred, conf_thres, iou_thres, classes=classes, agnostic=True) if pred[0] is not None: boxes = pred[0].cpu().detach().numpy() # <xmin><ymin><xmax><ymax><confd><class_id> else: boxes = np.array([10.0, 20.0, 30.0, 50.0, 0.75, 0]).reshape(1,6) # dummy values coords_minmax = np.zeros((boxes.shape[0], 4)) # droping 5th value confd = np.zeros((boxes.shape[0], 1)) class_ids = np.zeros((boxes.shape[0], 1)) # assign coords_minmax = boxes[:,0:4] # coords confd = boxes[:,4] # confidence class_ids = boxes[:,5] # class id coords_xyminmax = [] det_classes = [] for i in range(boxes.shape[0]): coords_xyminmax.append(xyxy_2_xyxyo(img_w, img_h, coords_minmax[i])) det_classes.append(class_names[int(class_ids[i])]) all_bounding_boxnind = [] for i in range(boxes.shape[0]): bounding_box = [0.0] * 6 bounding_box[0] = det_classes[i] bounding_box[1] = confd[i] bounding_box[2] = coords_xyminmax[i][0] bounding_box[3] = coords_xyminmax[i][1] bounding_box[4] = coords_xyminmax[i][2] bounding_box[5] = coords_xyminmax[i][3] bounding_box = str(bounding_box)[1:-1]# remove square brackets bounding_box = bounding_box.replace("'",'')# removing inverted commas around class name bounding_box = "".join(bounding_box.split())# remove spaces in between **here dont give space inbetween the inverted commas "". all_bounding_boxnind.append(bounding_box) all_bounding_boxnind = ' '.join(map(str, all_bounding_boxnind))# convert list to string all_bounding_boxnind=list(all_bounding_boxnind.split(' ')) # convert strin to list # replacing commas with spaces for i in range(len(all_bounding_boxnind)): all_bounding_boxnind[i] = all_bounding_boxnind[i].replace(',',' ') for i in range(len(all_bounding_boxnind)): # check if file exiscts else make new with open(out +'{}.txt'.format(name), "a+") as file_object: # Move read cursor to the start of file. file_object.seek(0) # If file is not empty then append '\n' data = file_object.read(100) if len(data) > 0 : file_object.write("\n") # Append text at the end of file file_object.write(all_bounding_boxnind[i]) #%% import glob, random import matplotlib.pyplot as plt import matplotlib as mpl mpl.rcParams['figure.dpi'] = 300 img_paths = glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.png') + \ glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.jpg') img_path = random.choice(img_paths) img1 = cv2.imread(img_path) img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2RGB) img_h, img_w, _ = img1.shape img2 = prepare_input(img1, 416, half) pred = model(img2, augment=False)[0] # Apply NMS pred = non_max_suppression(pred, conf_thres, iou_thres, classes=classes, agnostic=True) boxes = pred[0].cpu().detach().numpy() # <xmin><ymin><xmax><ymax><confd><class_id> coords_minmax = np.zeros((boxes.shape[0], 4)) # droping 5th value confd = np.zeros((boxes.shape[0], 1)) class_ids = np.zeros((boxes.shape[0], 1)) # assign coords_minmax = boxes[:,0:4] # coords confd = boxes[:,4] # confidence class_ids = boxes[:,5] # class id coords_xyminmax = [] det_classes = [] for i in range(boxes.shape[0]): coords_xyminmax.append(xyxy_2_xyxyo(img_w, img_h, coords_minmax[i])) det_classes.append(class_names[int(class_ids[i])]) t = np.asarray(coords_xyminmax) op = draw_boxes(img1, confd, t, det_classes, class_names, order='xy_minmax', analysis=False) plt.imshow(op) print('='*50) print('Image Name: ', os.path.basename(img_path),img1.shape) print('\nClass_name ', '| B_box Coords ', '| Confidence') print('_'*50) for k in range(len(det_classes)): print(det_classes[k], t[k], confd[k]) print('='*50)
[ 11748, 28686, 198, 418, 13, 268, 2268, 17816, 43633, 5631, 62, 29817, 34563, 62, 39345, 34444, 20520, 796, 705, 17, 6, 198, 11748, 28034, 198, 13165, 354, 13, 25192, 7, 940, 8, 198, 11748, 28034, 13, 20471, 355, 299, 77, 198, 11748, ...
2.269105
3,062
"""Mock responses for recommendations.""" SEARCH_REQ = { "criteria": { "policy_type": ['reputation_override'], "status": ['NEW', 'REJECTED', 'ACCEPTED'], "hashes": ['111', '222'] }, "rows": 50, "sort": [ { "field": "impact_score", "order": "DESC" } ] } SEARCH_RESP = { "results": [ { "recommendation_id": "91e9158f-23cc-47fd-af7f-8f56e2206523", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "32d2be78c00056b577295aa0943d97a5c5a0be357183fcd714c7f5036e4bdede", "filename": "XprotectService", "application": { "type": "EXE", "value": "FOO" } }, "workflow": { "status": "NEW", "changed_by": "rbaratheon@example.com", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T20:53:39.000Z", "comment": "Ours is the fury" }, "impact": { "org_adoption": "LOW", "impacted_devices": 45, "event_count": 76, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }, { "recommendation_id": "bd50c2b2-5403-4e9e-8863-9991f70df026", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "0bbc082cd8b3ff62898ad80a57cb5e1f379e3fcfa48fa2f9858901eb0c220dc0", "filename": "sophos ui.msi" }, "workflow": { "status": "NEW", "changed_by": "tlannister@example.com", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T20:53:09.000Z", "comment": "Always pay your debts" }, "impact": { "org_adoption": "HIGH", "impacted_devices": 8, "event_count": 25, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }, { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "NEW", "changed_by": "estark@example.com", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } } ], "num_found": 3 } ACTION_INIT = { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "NEW", "changed_by": "estark@example.com", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } } ACTION_REQS = [ { "action": "ACCEPT", "comment": "Alpha" }, { "action": "RESET" }, { "action": "REJECT", "comment": "Charlie" }, ] ACTION_REFRESH_SEARCH = { "criteria": { "status": ['NEW', 'REJECTED', 'ACCEPTED'], "policy_type": ['reputation_override'] }, "rows": 50 } ACTION_SEARCH_RESP = { "results": [ACTION_INIT], "num_found": 1 } ACTION_REFRESH_STATUS = ['ACCEPTED', 'NEW', 'REJECTED'] ACTION_INIT_ACCEPTED = { "recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68", "rule_type": "reputation_override", "policy_id": 0, "new_rule": { "override_type": "SHA256", "override_list": "WHITE_LIST", "sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124", "filename": "mimecast for outlook 7.8.0.125 (x86).msi" }, "workflow": { "status": "ACCEPTED", "ref_id": "e9410b754ea011ebbfd0db2585a41b07", "changed_by": "estark@example.com", "create_time": "2021-05-18T16:37:07.000Z", "update_time": "2021-08-31T15:13:40.000Z", "comment": "Winter is coming" }, "impact": { "org_adoption": "MEDIUM", "impacted_devices": 45, "event_count": 79, "impact_score": 0, "update_time": "2021-05-18T16:37:07.000Z" } }
[ 37811, 44, 735, 9109, 329, 10763, 526, 15931, 198, 198, 5188, 31315, 62, 2200, 48, 796, 1391, 198, 220, 220, 220, 366, 22213, 5142, 1298, 1391, 198, 220, 220, 220, 220, 220, 220, 220, 366, 30586, 62, 4906, 1298, 37250, 260, 1996, 34...
1.692444
3,375
# Copyright 2015 Carnegie Mellon University # # Author: Han Chen <hanc@andrew.cmu.edu> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import json from oslo.config import cfg from stevedore import driver from monasca.common import es_conn from monasca.common import email_sender from monasca.common import kafka_conn from monasca.openstack.common import log from monasca.openstack.common import service as os_service es_opts = [ cfg.StrOpt('topic', default='alarm', help=('The topic that messages will be retrieved from.' 'This also will be used as a doc type when saved ' 'to ElasticSearch.')), cfg.StrOpt('topic2', default='notification_methods', help=('The topic that messages will be retrieved from.' 'This also will be used as a doc type when saved ' 'to ElasticSearch.')), cfg.StrOpt('doc_type', default='', help=('The document type which defines what document ' 'type the messages will be save into. If not ' 'specified, then the topic will be used.')), cfg.StrOpt('processor', default='', help=('The message processer to load to process the message.' 'If the message does not need to be process anyway,' 'leave the default')), ] es_group = cfg.OptGroup(name='notification', title='notification') cfg.CONF.register_group(es_group) cfg.CONF.register_opts(es_opts, es_group) LOG = log.getLogger(__name__)
[ 2, 15069, 1853, 33976, 49808, 2059, 198, 2, 198, 2, 6434, 25, 9530, 12555, 1279, 71, 1192, 31, 392, 1809, 13, 11215, 84, 13, 15532, 29, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, ...
2.603175
819
from typing import Optional, Callable import requests from requests.auth import AuthBase from requests.exceptions import RequestException
[ 6738, 19720, 1330, 32233, 11, 4889, 540, 198, 198, 11748, 7007, 198, 6738, 7007, 13, 18439, 1330, 26828, 14881, 198, 6738, 7007, 13, 1069, 11755, 1330, 19390, 16922, 628, 198 ]
4.7
30
import os import shutil from .ZipFileManager import ZipFileManager from .DiskFileManager import DiskFileManager from .Directory import Directory import string printable = set(string.printable) - set("\x0b\x0c") def file_tree(target, replace=False): """Open a connection to a file tree which can be either a disk folder, a zip archive, or an in-memory zip archive. Parameters ---------- target Either the path to a target folder, or a zip file, or '@memory' to write a zip file in memory (at which case a string of the zip file is returned) If the target is already a flametree directory, it is returned as-is. replace If True, will remove the target if it already exists. If False, new files will be written inside the target and some files may be overwritten. """ if isinstance(target, Directory): return target if (not isinstance(target, str)) or is_hex(target): return Directory(file_manager=ZipFileManager(source=target)) elif target == "@memory": return Directory("@memory", file_manager=ZipFileManager("@memory")) elif target.lower().endswith(".zip"): return Directory(target, file_manager=ZipFileManager(target, replace=replace)) else: return Directory(target, file_manager=DiskFileManager(target))
[ 11748, 28686, 198, 11748, 4423, 346, 198, 198, 6738, 764, 41729, 8979, 13511, 1330, 38636, 8979, 13511, 198, 6738, 764, 40961, 8979, 13511, 1330, 31664, 8979, 13511, 198, 6738, 764, 43055, 1330, 27387, 198, 198, 11748, 4731, 198, 198, 479...
3.085648
432
import sys import socket import time ip = '127.0.0.1' port = 9001 if (len(sys.argv)>1): ip = sys.argv[1] if (len(sys.argv)>2): port = int(sys.argv[2]) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((ip,port)) sock.send('bip\n\r') data = sock.recv(80) print data sock.send('TTS[it-IT] ciao, come stai?\n\r') data = sock.recv(80) print data sock.send('TTS[en-US] very well, thank you!\n\r') data = sock.recv(80) print data sock.send('TTS default language is english!\n\r') data = sock.recv(80) print data sock.send('bop\n\r') data = sock.recv(80) print data time.sleep(1) sock.close()
[ 11748, 25064, 198, 11748, 17802, 198, 11748, 640, 198, 198, 541, 796, 705, 16799, 13, 15, 13, 15, 13, 16, 6, 198, 634, 796, 860, 8298, 198, 198, 361, 357, 11925, 7, 17597, 13, 853, 85, 8, 29, 16, 2599, 198, 220, 220, 220, 20966,...
2.128814
295
from abc import ABC, abstractmethod from typing import List from .common import ( AtCell, BasicMessage, GroupMessage, FriendMessage, MsgCellType, MessageType, PlainCell, ) from ..utils import is_str_blank, str_contains if __name__ == "__main__": msg_matcher = JustAtMeMsg(123) msg = { "type": "GroupMessage", "sender": {"id": 123, "nickname": "", "remark": ""}, "messageChain": [ {"type": "Source", "id": 123456, "time": 123456}, {"type": "At", "target": 1234, "display": "@Mirai"}, {"type": "Plain", "text": " "}, ], } print(msg_matcher.match(Ctx(msg)))
[ 6738, 450, 66, 1330, 9738, 11, 12531, 24396, 198, 6738, 19720, 1330, 7343, 198, 6738, 764, 11321, 1330, 357, 198, 220, 220, 220, 1629, 28780, 11, 198, 220, 220, 220, 14392, 12837, 11, 198, 220, 220, 220, 4912, 12837, 11, 198, 220, 2...
2.247525
303
from django.contrib import admin from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin from django.contrib.auth.models import Group from django.utils.translation import ugettext_lazy as _ from main.models import UserInfo, User, Child, Volunteer, Donor, Letter, Need, PurchaseForInstitute, PurchaseForNeed, \ Activity, OngoingUserInfo admin.site.unregister(Group) admin.site.register(Child) admin.site.register(Volunteer) admin.site.register(Donor) admin.site.register(Letter) admin.site.register(Need) admin.site.register(PurchaseForInstitute) admin.site.register(PurchaseForNeed) admin.site.register(Activity) admin.site.register(OngoingUserInfo)
[ 6738, 42625, 14208, 13, 3642, 822, 1330, 13169, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 28482, 1330, 11787, 46787, 355, 37770, 12982, 46787, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 27530, 1330, 4912, 198, 6...
3.263415
205
import tdl import time import hunting.constants as c
[ 11748, 256, 25404, 198, 11748, 640, 198, 11748, 10988, 13, 9979, 1187, 355, 269, 628 ]
3.6
15
from django.db import models
[ 6738, 42625, 14208, 13, 9945, 1330, 4981, 628, 198 ]
3.444444
9
""" Module for Pytorch dataset representations """ import torch from torch.utils.data import Dataset
[ 37811, 198, 26796, 329, 9485, 13165, 354, 27039, 24612, 198, 37811, 198, 198, 11748, 28034, 198, 6738, 28034, 13, 26791, 13, 7890, 1330, 16092, 292, 316, 198 ]
3.777778
27
from zenslackchat.zendesk_base_webhook import BaseWebHook from zenslackchat.zendesk_email_to_slack import email_from_zendesk from zenslackchat.zendesk_comments_to_slack import comments_from_zendesk
[ 6738, 1976, 641, 75, 441, 17006, 13, 89, 437, 274, 74, 62, 8692, 62, 12384, 25480, 1330, 7308, 13908, 39, 566, 198, 6738, 1976, 641, 75, 441, 17006, 13, 89, 437, 274, 74, 62, 12888, 62, 1462, 62, 6649, 441, 1330, 3053, 62, 6738, ...
2.531646
79
# Import modules import groupdocs_merger_cloud from Common import Common # This example demonstrates how to move document page to a new position
[ 2, 17267, 13103, 198, 11748, 1448, 31628, 62, 647, 1362, 62, 17721, 198, 6738, 8070, 1330, 8070, 198, 198, 2, 770, 1672, 15687, 703, 284, 1445, 3188, 2443, 284, 257, 649, 2292 ]
4.53125
32
y_pred=ml.predict(x_test) print(y_pred) from sklearn.metrics import r2_score r2_score(y_test,y_pred) pred_y_df=pd.DataFrame({'Actual Value':y_test,'Predicted Value':y_pred, 'Difference': y_test-y_pred}) pred_y_df[0:20]
[ 88, 62, 28764, 28, 4029, 13, 79, 17407, 7, 87, 62, 9288, 8, 198, 4798, 7, 88, 62, 28764, 8, 198, 198, 6738, 1341, 35720, 13, 4164, 10466, 1330, 374, 17, 62, 26675, 198, 81, 17, 62, 26675, 7, 88, 62, 9288, 11, 88, 62, 28764, ...
2.21
100
# -*- coding: utf-8 -*- """ This module offers util functions to be called and used in other modules """ from datetime import datetime import os import json import pickle import string import random import numpy as np import pandas as pd from matplotlib import pyplot as plt import seaborn as sns from sklearn import tree def id_generator(size=6, chars=string.ascii_lowercase + string.digits): """GENERATE A RANDOM STRING TO BE USED AS AN ID Args: size (int, optional): size of the string. Defaults to 6. chars (str, optional): charachters to be used to generate the string. Defaults to string.ascii_lowercase+string.digits. Returns: [str]: a random chain of charachters """ return "".join(random.choice(chars) for _ in range(size)) def save_model(path, model): """SAVE MODEL INTO PICKLE FILE Args: path (str): path where to save the model model (binary): the model to be saved """ with open(path, "wb") as file: pickle.dump(model, file) def update_history(models_hist_path, model_id, model_name, model, params): """SAVE METADATA RELATED TO THE TRAINED MODEL INTO THE HISTORY FILE Args: models_hist_path (str): path to the history file model_id (str): unique id of the model model_name (str): model name = "model_"+model_id+".pkl" model (binary): binary file of the model params (dict): dictionnary containing the hyper-parameters used to fit the model """ model_metadata = dict() model_metadata["trained"] = str(datetime.now()) model_metadata["model_type"] = type(model).__name__ model_metadata["model_id"] = model_id model_metadata["params"] = params print(model_metadata) with open(models_hist_path, "r+") as outfile: try: hist = json.load(outfile) hist[model_name] = model_metadata outfile.seek(0) json.dump(hist, outfile, indent=4) except json.decoder.JSONDecodeError: json.dump({model_name: model_metadata}, outfile, indent=4) def update_history_add_eval( models_hist_path, model_id=None, model_name=None, metrics=None ): """ADD EVALUATION METRICS THE HISTORY FILE FOR THE SPECIFIED MODEL Args: models_hist_path (str): path to the history file model_id (str, optional): the id of the model. Defaults to None. model_name (str, optional): the name of the model. Defaults to None. metrics (dict, optional): a dictionnary containing metadata related to the model evaluation. Defaults to None. """ assert ( model_id is not None or model_name is not None ), "At least the model id or name must be given" assert models_hist_path is not None, "You must specify the path to the history file" if not model_name: model_name = "model_" + model_id + ".pkl" eval_metadata = dict() eval_metadata["datetime"] = str(datetime.now()) eval_metadata["metrics"] = metrics with open(models_hist_path, "r+") as outfile: try: hist = json.load(outfile) hist[model_name]["evaluation"] = eval_metadata outfile.seek(0) json.dump(hist, outfile, indent=4) except json.decoder.JSONDecodeError: print("cannot save evaluation metadata") def generate_features_importance_plot(model, features, model_id): """GENERATES A PLOT DESCRIBING FEATURES IMPORTANCE FOR THE MODEL TO MAKE THE PREDICTION. Args: model (tree-based model): a tree based model (decision tree, random forest ...) features (pandas dataframe): a table of the features on which we trained the model model_id (str): the unique id of the model """ mean_importances = model.feature_importances_ importances_indices = np.argsort(mean_importances)[::-1] ordered_columns = [features.columns[i] for i in importances_indices] importances = pd.DataFrame( [tree.feature_importances_ for tree in model.estimators_], columns=features.columns, ) importances = importances[ordered_columns] _, ax = plt.subplots(figsize=(12, 8)) sns.boxplot(x="variable", y="value", ax=ax, data=pd.melt(importances)) figure = ax.get_figure() figure.savefig( "models/models-training/run_" + model_id + "/features_importance.png" ) def plot_trees(rf, feature_names, target_names, model_id): """GENERATES A PLOT THAT SHOWS THE DECISION MAKING OF THE TREES Args: rf (model): a tree based model (random forest ...) feature_names (list): names of the columns of the training set target_names (str): name of the target columns model_id (str): unique id of the model """ fn = feature_names cn = target_names fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(10, 2), dpi=900) for index in range(0, 5): tree.plot_tree( rf.estimators_[index], feature_names=fn, class_names=cn, filled=True, ax=axes[index], ) axes[index].set_title("Estimator: " + str(index), fontsize=11) fig.savefig("models/models-training/run_" + model_id + "/Trees.png")
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 37811, 770, 8265, 4394, 7736, 5499, 284, 307, 1444, 290, 973, 198, 220, 220, 220, 287, 584, 13103, 198, 37811, 198, 198, 6738, 4818, 8079, 1330, 4818, 8079, 198, ...
2.467316
2,157
import asyncio import discord from discord.ext import commands import re import sqlite3 from urllib.parse import quote as uriquote import html CURR = ["AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK", "DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN", "MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY", "TWD", "ZAR"]
[ 11748, 30351, 952, 198, 11748, 36446, 198, 6738, 36446, 13, 2302, 1330, 9729, 198, 11748, 302, 198, 11748, 44161, 578, 18, 198, 6738, 2956, 297, 571, 13, 29572, 1330, 9577, 355, 2956, 1557, 1258, 198, 11748, 27711, 628, 198, 34, 31302, ...
2
206
from SG_GetFeatureMatrix import * from SG_VectorY import * featureMatrix = featureMatrixFromReviews() Y = getYVector()
[ 6738, 26147, 62, 3855, 38816, 46912, 1330, 1635, 198, 6738, 26147, 62, 38469, 56, 1330, 1635, 198, 198, 30053, 46912, 796, 3895, 46912, 4863, 14832, 82, 3419, 198, 56, 796, 651, 56, 38469, 3419 ]
3.5
34
# Generated by Django 3.1.4 on 2021-01-17 19:12 from django.conf import settings from django.db import migrations, models import django.db.models.deletion
[ 2, 2980, 515, 416, 37770, 513, 13, 16, 13, 19, 319, 33448, 12, 486, 12, 1558, 678, 25, 1065, 198, 198, 6738, 42625, 14208, 13, 10414, 1330, 6460, 198, 6738, 42625, 14208, 13, 9945, 1330, 15720, 602, 11, 4981, 198, 11748, 42625, 1420...
3.019231
52
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (c) 2013 dotCloud, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import socket import mock from nova import context from nova import exception from nova.openstack.common import jsonutils from nova.openstack.common import units from nova import test from nova.tests import utils import nova.tests.virt.docker.mock_client from nova.tests.virt.test_virt_drivers import _VirtDriverTestCase from nova.virt.docker import hostinfo from nova.virt.docker import network
[ 2, 43907, 25, 7400, 11338, 28, 19, 6482, 10394, 28, 19, 2705, 8658, 11338, 28, 19, 198, 2, 198, 2, 15069, 357, 66, 8, 2211, 16605, 18839, 11, 3457, 13, 198, 2, 1439, 6923, 33876, 13, 198, 2, 198, 2, 220, 220, 220, 49962, 739, ...
3.35474
327
import unittest import unittest.mock as mock import asyncio import pyx.http as http
[ 11748, 555, 715, 395, 198, 11748, 555, 715, 395, 13, 76, 735, 355, 15290, 198, 11748, 30351, 952, 198, 11748, 12972, 87, 13, 4023, 355, 2638, 628, 628, 628, 628, 198 ]
2.967742
31
# -*- coding: utf-8 -*- # Copyright 2013 splinter authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. import os import unittest from splinter import Browser from .fake_webapp import EXAMPLE_APP from .base import WebDriverTests from selenium.common.exceptions import WebDriverException def test_attach_file(self): "should provide a way to change file field value" file_path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'mockfile.txt' ) self.browser.attach_file('file', file_path) self.browser.find_by_name('upload').click() html = self.browser.html self.assertIn('text/plain', html) self.assertIn(open(file_path).read().encode('utf-8'), html) class ChromeBrowserFullscreenTest(WebDriverTests, unittest.TestCase): def setUp(self): self.browser.visit(EXAMPLE_APP) def test_should_support_with_statement(self): with Browser('chrome', fullscreen=True) as internet: pass
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 2, 15069, 2211, 4328, 3849, 7035, 13, 1439, 2489, 10395, 13, 198, 2, 5765, 286, 428, 2723, 2438, 318, 21825, 416, 257, 347, 10305, 12, 7635, 198, 2, 5964, 326, ...
2.567757
428
import os import json from File.file import File os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' if __name__ == "__main__": main()
[ 11748, 28686, 198, 198, 11748, 33918, 198, 198, 6738, 9220, 13, 7753, 1330, 9220, 198, 198, 418, 13, 268, 2268, 17816, 10234, 62, 8697, 47, 62, 23678, 62, 25294, 62, 2538, 18697, 20520, 796, 705, 18, 6, 628, 628, 198, 361, 11593, 36...
2.368421
57
import struct import numpy as np import pytest import miniglm
[ 11748, 2878, 198, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 12972, 9288, 198, 198, 11748, 949, 38686, 76, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 198 ]
2.727273
33
# -*- coding: utf-8 -*- from flask import Blueprint, redirect from flaskbb.utils.helpers import render_template from .forms import AddForm, DeleteForm from .models import MyPost from flaskbb.extensions import db news = Blueprint("news", __name__, template_folder="templates")
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 6738, 42903, 1330, 39932, 11, 18941, 198, 6738, 42903, 11848, 13, 26791, 13, 16794, 364, 1330, 8543, 62, 28243, 198, 198, 6738, 764, 23914, 1330, 3060, 8479, 11, 23520, ...
3.45122
82
from stix_shifter_utils.stix_translation.src.json_to_stix import json_to_stix_translator from stix_shifter_utils.stix_translation.src.utils.transformer_utils import get_module_transformers from stix_shifter_modules.aws_athena.entry_point import EntryPoint import unittest MODULE = "aws_athena" entry_point = EntryPoint() map_data = entry_point.get_results_translator().map_data data_source = { "type": "identity", "id": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", "name": "aws_athena", "identity_class": "events" } options = {} def test_vpc_flow_network_json_to_stix(self): """to test network stix object properties""" data = { "vpcflow": { "account": 979326520502, "interfaceid": "eni-04b762de832716892", "sourceaddress": "89.248.172.85", "destinationaddress": "172.31.62.249", "sourceport": 58387, "destinationport": 51289, "protocol": "tcp", "starttime": 1592547796, "endtime": 1592547798, "action": "REJECT", "date": "2020-06-19", "logstatus": "OK", "numbytes": 40, "region": "us-east-1", "version": 2 } } result_bundle = json_to_stix_translator.convert_to_stix( data_source, map_data, [data], get_module_transformers(MODULE), options) result_bundle_objects = result_bundle['objects'] result_bundle_identity = result_bundle_objects[0] assert result_bundle_identity['type'] == data_source['type'] observed_data = result_bundle_objects[1] assert 'objects' in observed_data objects = observed_data['objects'] network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic') assert network_obj is not None, 'network-traffic object type not found' assert network_obj.keys() == {'type', 'src_ref', 'dst_ref', 'src_port', 'dst_port', 'protocols', 'start', 'end'} assert network_obj['type'] == 'network-traffic' assert network_obj['src_ref'] == '1' assert network_obj['dst_ref'] == '4' assert network_obj['src_port'] == 58387 assert network_obj['dst_port'] == 51289 assert network_obj['protocols'] == ['tcp'] assert network_obj['start'] == '2020-06-19T06:23:16.000Z' assert network_obj['end'] == '2020-06-19T06:23:18.000Z' def test_vpc_flow_custom_attr_json_to_stix(self): """to test network stix object properties""" data = { "vpcflow": { "account": 979326520502, "interfaceid": "eni-04b762de832716892", "sourceaddress": "89.248.172.85", "destinationaddress": "172.31.62.249", "sourceport": 58387, "destinationport": 51289, "protocol": "tcp", "starttime": 1592547796, "endtime": 1592547798, "action": "REJECT", "date": "2020-06-19", "logstatus": "OK", "numbytes": 40, "region": "us-east-1", "version": 2 } } options = {"unmapped_fallback": True} result_bundle = json_to_stix_translator.convert_to_stix( data_source, map_data, [data], get_module_transformers(MODULE), options) result_bundle_objects = result_bundle['objects'] result_bundle_identity = result_bundle_objects[0] assert result_bundle_identity['type'] == data_source['type'] observed_data = result_bundle_objects[1] assert 'objects' in observed_data objects = observed_data['objects'] custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena') assert custom_object.keys() == {'type', 'interfaceid', 'date', 'logstatus', 'numbytes', 'region', 'version'} assert custom_object['date'] == '2020-06-19' assert custom_object['logstatus'] == 'OK' assert custom_object['numbytes'] == 40 assert custom_object['region'] == 'us-east-1' assert custom_object['version'] == 2 def test_guardduty_network_json_to_stix(self): """to test network stix object properties""" data = { "guardduty": { "accountid": 979326520502, "region": "us-east-1", "type": "UnauthorizedAccess:EC2/SSHBruteForce", "resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal", "resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104", "resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4", "resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1." "amazonaws.com", "resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a", "resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128", "resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21", "resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f", "resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13", "resource_instancedetails_imageid": "ami-0015fcaa5516c75ed", "resource_instancedetails_instanceid": "i-031cb81e1f32a36e1", "resource_instancedetails_availabilityzone": "us-east-1f", "service_eventfirstseen": "2020-07-31T06:19:09Z", "service_action_networkconnectionaction_protocol": "TCP", "service_action_networkconnectionaction_remoteportdetails_port": "38420", "service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden", "service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94", "service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro", "service_action_networkconnectionaction_localportdetails_port": "22", "service_eventlastseen": "2020-09-12T09:19:40Z", "severity": 2, "title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.", "arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding" "/7ab9d1cb6248e05a0e419a79528761cb", "createdat": "2020-07-31T06:37:13.745Z", "description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1. " "Brute force attacks are used to gain unauthorized access to your instance by " "guessing the SSH password.", "finding_id": "7ab9d1cb6248e05a0e419a79528761cb", "partition": "aws", "resource": { "instancedetails": { "imagedescription": "Provided by Red Hat, Inc.", "instancestate": "running", "instancetype": "t2.large", "launchtime": "2020-09-11T23:16:03Z", "tags": { "0": { "key": "Name", "value": "ArcSight Logger" } } }, "resourcetype": "Instance" }, "schemaversion": 2.0, "service": { "action": { "actiontype": "NETWORK_CONNECTION", "networkconnectionaction": { "connectiondirection": "INBOUND", "localportdetails": { "portname": "SSH" }, "remoteipdetails": { "geolocation": { "lat": "59.2741", "lon": "15.2066" }, "organization": { "asn": "2119", "asnorg": "Telenor Norge AS", "isp": "Telenor Sverige AB", "org": "Telenor Sverige AB" } }, "remoteportdetails": { "portname": "Unknown" } } }, "count": "20", "detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df", "resourcerole": "TARGET", "servicename": "guardduty" }, "updatedat": "2020-09-12T09:25:34.086Z" } } result_bundle = json_to_stix_translator.convert_to_stix( data_source, map_data, [data], get_module_transformers(MODULE), options) result_bundle_objects = result_bundle['objects'] result_bundle_identity = result_bundle_objects[0] assert result_bundle_identity['type'] == data_source['type'] observed_data = result_bundle_objects[1] assert 'objects' in observed_data objects = observed_data['objects'] network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic') assert network_obj is not None, 'network-traffic object type not found' assert network_obj.keys() == {'type', 'dst_port', 'src_ref', 'dst_ref', 'src_port', 'protocols'} assert network_obj['type'] == 'network-traffic' assert network_obj['dst_port'] == 38420 assert network_obj['src_ref'] == '3' assert network_obj['dst_ref'] == '9' assert network_obj['src_port'] == 22 assert network_obj['protocols'] == ['tcp'] def test_guardduty_custom_attr_json_to_stix(self): """to test network stix object properties""" data = { "guardduty": { "accountid": 979326520502, "region": "us-east-1", "type": "UnauthorizedAccess:EC2/SSHBruteForce", "resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal", "resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104", "resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4", "resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1." "amazonaws.com", "resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a", "resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128", "resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21", "resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f", "resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13", "resource_instancedetails_imageid": "ami-0015fcaa5516c75ed", "resource_instancedetails_instanceid": "i-031cb81e1f32a36e1", "resource_instancedetails_availabilityzone": "us-east-1f", "service_eventfirstseen": "2020-07-31T06:19:09Z", "service_action_networkconnectionaction_protocol": "TCP", "service_action_networkconnectionaction_remoteportdetails_port": "38420", "service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden", "service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94", "service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro", "service_action_networkconnectionaction_localportdetails_port": "22", "service_eventlastseen": "2020-09-12T09:19:40Z", "severity": 2, "title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.", "arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding/" "7ab9d1cb6248e05a0e419a79528761cb", "createdat": "2020-07-31T06:37:13.745Z", "description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1." " Brute force attacks are used to gain unauthorized access to your instance by guessing " "the SSH password.", "finding_id": "7ab9d1cb6248e05a0e419a79528761cb", "partition": "aws", "resource": { "instancedetails": { "imagedescription": "Provided by Red Hat, Inc.", "instancestate": "running", "instancetype": "t2.large", "launchtime": "2020-09-11T23:16:03Z", "tags": { "0": { "key": "Name", "value": "ArcSight Logger" } } }, "resourcetype": "Instance" }, "schemaversion": 2.0, "service": { "action": { "actiontype": "NETWORK_CONNECTION", "networkconnectionaction": { "connectiondirection": "INBOUND", "localportdetails": { "portname": "SSH" }, "remoteipdetails": { "geolocation": { "lat": "59.2741", "lon": "15.2066" }, "organization": { "asn": "2119", "asnorg": "Telenor Norge AS", "isp": "Telenor Sverige AB", "org": "Telenor Sverige AB" } }, "remoteportdetails": { "portname": "Unknown" } } }, "count": "20", "detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df", "resourcerole": "TARGET", "servicename": "guardduty" }, "updatedat": "2020-09-12T09:25:34.086Z" } } options = {"unmapped_fallback": True} result_bundle = json_to_stix_translator.convert_to_stix( data_source, map_data, [data], get_module_transformers(MODULE), options) result_bundle_objects = result_bundle['objects'] result_bundle_identity = result_bundle_objects[0] assert result_bundle_identity['type'] == data_source['type'] observed_data = result_bundle_objects[1] assert 'objects' in observed_data objects = observed_data['objects'] custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena') assert custom_object.keys() == {'type', 'service_action_networkconnectionaction_remoteipdetails_country_countryname', 'finding_id', 'arn', 'createdat', 'partition', 'resource', 'schemaversion', 'service', 'updatedat'} assert custom_object['arn'] == 'arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed' \ '494f3b7ca56acdc74df/finding/7ab9d1cb6248e05a0e419a79528761cb' assert custom_object['finding_id'] == '7ab9d1cb6248e05a0e419a79528761cb' assert custom_object['createdat'] == '2020-07-31T06:37:13.745Z' assert custom_object['partition'] == 'aws' assert custom_object['schemaversion'] == 2.0 assert custom_object['updatedat'] == '2020-09-12T09:25:34.086Z'
[ 6738, 336, 844, 62, 1477, 18171, 62, 26791, 13, 301, 844, 62, 41519, 13, 10677, 13, 17752, 62, 1462, 62, 301, 844, 1330, 33918, 62, 1462, 62, 301, 844, 62, 7645, 41880, 198, 6738, 336, 844, 62, 1477, 18171, 62, 26791, 13, 301, 844...
1.832345
9,275
#!/usr/bin/python # -*- coding: utf-8 -*- #file: 03 login.py #@author: Gorit #@contact: gorit@qq.com #@time: 2020/1/20 12:44 import requests from lxml import etree # obj = lMonKey()
[ 2, 48443, 14629, 14, 8800, 14, 29412, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 7753, 25, 7643, 17594, 13, 9078, 198, 2, 31, 9800, 25, 19097, 270, 198, 2, 31, 32057, 25, 30344, 270, 31, 38227, 13, ...
2.214286
84
#!/usr/bin/python # -*- coding: utf-8 -*- # This file was created using the DirectGUI Designer from direct.gui import DirectGuiGlobals as DGG from direct.gui.DirectFrame import DirectFrame from direct.gui.DirectLabel import DirectLabel from direct.gui.DirectButton import DirectButton from direct.gui.DirectOptionMenu import DirectOptionMenu from panda3d.core import ( LPoint3f, LVecBase3f, LVecBase4f, TextNode )
[ 2, 48443, 14629, 14, 8800, 14, 29412, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 2, 770, 2393, 373, 2727, 1262, 262, 4128, 40156, 31716, 198, 198, 6738, 1277, 13, 48317, 1330, 4128, 8205, 72, 9861, 67...
3.006944
144
from . import FROM_FEED_PUBLISHED_TODAY, STRINGIFY
[ 6738, 764, 1330, 16034, 62, 15112, 1961, 62, 5105, 9148, 18422, 1961, 62, 51, 3727, 4792, 11, 19269, 2751, 5064, 56, 628, 198 ]
2.304348
23
# Advent of Code 2020 # Day 21 # Author: irobin591 import os import doctest import re re_entry = re.compile(r'^([a-z ]+) \(contains ([a-z, ]*)\)$') with open(os.path.join(os.path.dirname(__file__), "input.txt"), 'r') as input_file: input_data = input_file.read().strip().split('\n') def part1(input_data): """ >>> part1(open(os.path.join(os.path.dirname(__file__), "test_part1.txt"), 'r').read().strip().split('\\n')) 5 """ # dict['allergen'] = ['asdfa', 'agbsfb'] allergens = {} ingredients = [] # map strings to allergens for entry in input_data: r = re_entry.match(entry) if not r: raise RuntimeError("") contents = set(r.group(1).split(' ')) ingredients.extend(contents) for allergen in r.group(2).split(', '): if allergen not in allergens: allergens[allergen] = contents else: # only keep already added ingredients allergens[allergen] = [ingredient for ingredient in contents if ingredient in allergens[allergen]] # print(allergens) # print(ingredients) ingredients_with_allergens = set([y for x in allergens.values() for y in x]) # print(list(filter(lambda i: i not in ingredients_with_allergens, ingredients))) return len(list(filter(lambda i: i not in ingredients_with_allergens, ingredients))) def part2(input_data): """ >>> part2(open(os.path.join(os.path.dirname(__file__), "test_part1.txt"), 'r').read().strip().split('\\n')) 'mxmxvkd,sqjhc,fvjkl' """ # dict['allergen'] = ['asdfa', 'agbsfb'] allergens = {} ingredients = [] # map strings to allergens for entry in input_data: r = re_entry.match(entry) if not r: raise RuntimeError("") contents = set(r.group(1).split(' ')) ingredients.extend(contents) for allergen in r.group(2).split(', '): if allergen not in allergens: allergens[allergen] = list(contents) else: # only keep already added ingredients allergens[allergen] = [ingredient for ingredient in contents if ingredient in allergens[allergen]] # print(allergens) # (allergen, ingredient) assigned_allergens = [] while sum([len(ingreds) for ingreds in allergens.values()]) > 0: for allergen in allergens: if len(allergens[allergen]) == 1: ingredient = allergens[allergen][0] assigned_allergens.append((allergen, ingredient)) for allergen2 in allergens: if ingredient in allergens[allergen2]: allergens[allergen2].remove(ingredient) assigned_allergens.sort(key=lambda x: x[0]) return ",".join([x[1] for x in assigned_allergens]) if __name__ == "__main__": doctest.testmod() print("Part One: {}".format(part1(input_data))) print("Part Two: {}".format(part2(input_data))) pass
[ 2, 33732, 286, 6127, 12131, 198, 2, 3596, 2310, 198, 2, 6434, 25, 1312, 305, 8800, 48952, 198, 198, 11748, 28686, 198, 11748, 10412, 395, 198, 11748, 302, 198, 198, 260, 62, 13000, 796, 302, 13, 5589, 576, 7, 81, 6, 61, 26933, 64,...
2.254858
1,338
""" Test that escaping characters for HTML is disabled. """ import os, subprocess
[ 37811, 198, 14402, 326, 25071, 3435, 329, 11532, 318, 10058, 13, 198, 37811, 198, 11748, 28686, 11, 850, 14681, 198 ]
4.1
20
from flask import Flask, jsonify, request from w3lib.html import get_base_url import extruct import requests app = Flask(__name__)
[ 6738, 42903, 1330, 46947, 11, 33918, 1958, 11, 2581, 198, 6738, 266, 18, 8019, 13, 6494, 1330, 651, 62, 8692, 62, 6371, 198, 11748, 1070, 1356, 198, 11748, 7007, 198, 198, 1324, 796, 46947, 7, 834, 3672, 834, 8, 628 ]
3.325
40
""" A simple Python DAG using the Taskflow API. """ import logging import time from datetime import datetime from airflow import DAG from airflow.decorators import task log = logging.getLogger(__name__) with DAG( dag_id='simple_python_taskflow_api', schedule_interval=None, start_date=datetime(2021, 1, 1), catchup=False, tags=['airflow101'], ) as dag: hello_task = say_hello() sleeping_task = sleep_for_1() hello_task >> sleeping_task
[ 37811, 198, 32, 2829, 11361, 360, 4760, 1262, 262, 15941, 11125, 7824, 13, 198, 37811, 198, 11748, 18931, 198, 11748, 640, 198, 6738, 4818, 8079, 1330, 4818, 8079, 198, 198, 6738, 45771, 1330, 360, 4760, 198, 6738, 45771, 13, 12501, 273...
2.724138
174
from . import model import numpy as np from scipy import special, stats
[ 6738, 764, 1330, 2746, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 629, 541, 88, 1330, 2041, 11, 9756, 198 ]
3.6
20
#Contains the functions needed to process both chords and regularized beards # proc_chords is used for chords #proc_beard_regularize for generating beards #proc_pdf saves pdfs of a variable below cloud base #Both have a large overlap, but I split them in two to keep the one script from getting to confusing. import numpy as np import math from netCDF4 import Dataset import os import time as ttiimmee from scipy.interpolate import interp1d from scipy.interpolate import interp2d #from scipy.interpolate import griddata #from mpl_toolkits.axes_grid1 import make_axes_locatable import pickle import sys #sys.path.insert(0, "/home/pgriewank/code/2019-chords-plumes/") #from unionfind import UnionFind from cusize_functions import * #import matplotlib.pyplot as plt import pandas as pd import gc import glob import xarray as xr #turned into a function #removed the possibility to loop over multiple dates, if you want to do that call the function repeatedly #Full list of variables to analyze is unclear, I will try to include everything available, but this might break the memory bank #want to keep the automatic x and y calculation #Scaling shouldn't be needed, as all chord properties should be indepenent of wind direction (right?) #Similarly, no basedefinition is needed, all values are relative to cloud base #Should be able to work for any variable in the column output, or for any 3D variable as long as it is named the same as the file. #Changing 3D output #Default is now to always go over x and y directions #TODO #plot_flag disabled for the mean time #turned into a function #removed the possibility to loop over multiple dates, if you want to do that call the function repeatedly #Should be able to work for any variable in the column output, or for any 3D variable as long as it is named the same as the file. #If the input data is a 3D field it will always go over x and y directions #Two different scale_flags added to rotate the curtain to point upwind. #TODO #plot_flag disabled for the mean time #A simple script which calculates a histogram below the cloud base and saves it #I will try to keep it at least somewhat general with a flexible variable
[ 2, 4264, 1299, 262, 5499, 2622, 284, 1429, 1111, 34211, 290, 3218, 1143, 307, 1371, 198, 2, 13834, 62, 354, 3669, 318, 973, 329, 34211, 198, 2, 36942, 62, 39433, 62, 16338, 1096, 329, 15453, 307, 1371, 198, 2, 36942, 62, 12315, 1603...
3.615764
609
"""D. mel housekeeping genes based on tau. Uses the intersection of w1118 and orgR to create a list of D. mel housekeeping genes. """ import os from functools import partial import pandas as pd from larval_gonad.io import pickle_load, pickle_dump if __name__ == "__main__": if os.getenv("SNAKE_DEBUG", False): from larval_gonad.debug import snakemake_debug snakemake = snakemake_debug( workdir="expression-atlas-wf", input=dict( male=[ "../output/expression-atlas-wf/tau_housekeeping/w1118_male.pkl", "../output/expression-atlas-wf/tau_housekeeping/orgR_male.pkl", ], female=[ "../output/expression-atlas-wf/tau_housekeeping/w1118_female.pkl", "../output/expression-atlas-wf/tau_housekeeping/orgR_female.pkl", ], annot="../output/expression-atlas-wf/YOgn_to_dmel_ortholog/dmel.pkl", ), ) main()
[ 37811, 35, 13, 7758, 2156, 19934, 10812, 1912, 319, 256, 559, 13, 198, 198, 5842, 274, 262, 16246, 286, 266, 1157, 1507, 290, 8745, 49, 284, 2251, 257, 1351, 286, 198, 35, 13, 7758, 2156, 19934, 10812, 13, 198, 37811, 198, 11748, 28...
1.996132
517
""" Api Key validation """ from typing import Optional from fastapi.security.api_key import APIKeyHeader from fastapi import HTTPException, Security, Depends from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN from server.core.security import verify_key from server.db.mongodb import AsyncIOMotorClient, get_database from server.models.user import User from server.db.crud.user import get_user_by_email from pydantic import EmailStr api_key_scheme = APIKeyHeader(name="X-API-KEY", auto_error=False) email_scheme = APIKeyHeader(name="X-EMAIL-ID", auto_error=False)
[ 37811, 198, 32, 14415, 7383, 21201, 198, 37811, 198, 6738, 19720, 1330, 32233, 198, 198, 6738, 3049, 15042, 13, 12961, 13, 15042, 62, 2539, 1330, 7824, 9218, 39681, 198, 6738, 3049, 15042, 1330, 14626, 16922, 11, 4765, 11, 2129, 2412, 1...
3.112245
196
#!/usr/bin/env python # This is a slightly modified version of ChromiumOS' splitconfig # https://chromium.googlesource.com/chromiumos/third_party/kernel/+/stabilize-5899.B-chromeos-3.14/chromeos/scripts/splitconfig """See this page for more details: http://dev.chromium.org/chromium-os/how-tos-and-troubleshooting/kernel-configuration """ import os import re import sys allconfigs = {} # Parse config files for config in sys.argv[1:]: allconfigs[config] = set() for line in open(config): m = re.match("#*\s*CONFIG_(\w+)[\s=](.*)$", line) if not m: continue option, value = m.groups() allconfigs[config].add((option, value)) # Split out common config options common = allconfigs.values()[0].copy() for config in allconfigs.keys(): common &= allconfigs[config] for config in allconfigs.keys(): allconfigs[config] -= common allconfigs["common"] = common # Generate new splitconfigs for config in allconfigs.keys(): f = open("split-" + config, "w") for option, value in sorted(list(allconfigs[config])): if value == "is not set": print >>f, "# CONFIG_%s %s" % (option, value) else: print >>f, "CONFIG_%s=%s" % (option, value) f.close()
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 2, 770, 318, 257, 4622, 9518, 2196, 286, 18255, 1505, 2640, 6, 6626, 11250, 198, 2, 3740, 1378, 28663, 1505, 13, 2188, 519, 829, 1668, 13, 785, 14, 28663, 1505, 418, 14, 17089, ...
2.506
500
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model
[ 2, 19617, 28, 40477, 12, 23, 198, 2, 16529, 35937, 198, 2, 15069, 357, 66, 8, 5413, 10501, 13, 1439, 2489, 10395, 13, 198, 2, 49962, 739, 262, 17168, 13789, 13, 4091, 13789, 13, 14116, 287, 262, 1628, 6808, 329, 198, 2, 5964, 1321...
5.354167
96
#!/usr/bin/env python ''' Notes: - Weak implies weakly supervised learning (4 classes) - Strong implies strongly (fully) superversied learning (10 classes) - frame number is set to 22ms (default); that is the "sweet spot" based on dsp literature - sampling rate is 16kHz (for the MFCC of each track) - Accuracy increases as the test set gets smaller, which implies that a lot of these machine learning models are heavily data-driven (i.e. feed more data for more performance boosts) - Currently, optimal benchmark results are achieved with a test set size of 10 percent of the total data ''' import os import glob import sys import time import warnings warnings.filterwarnings("ignore") import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from processing import mfcc_processing, datasets from deep_models import models from sklearn.model_selection import train_test_split from sklearn.svm import SVC from sklearn.neighbors import KNeighborsClassifier from sklearn.cluster import KMeans from sklearn.preprocessing import MinMaxScaler from sklearn.preprocessing import normalize input_path = './data/genres/' mfcc_path = './data/processed/mfcc/' have_mfccs = True if __name__ == '__main__': mfccs = None data = None if not have_mfccs: have_mfccs = True print 'calculating mfccs...' mfccs = mfcc_processing.write_mfccs(input_path, mfcc_path, True) else : print 'retrieving mfccs...' mfccs = mfcc_processing.read_mfccs(mfcc_path, True) data = mfcc_processing.featurize_data(mfccs, weak=True, verbose=True) print weak = False if weak: data = mfcc_processing.featurize_data(mfccs, weak=True, verbose=True) print svm_classifier(data, test_size=0.10, weak=True, verbose=True) print knn_classifier(data, test_size=0.10, weak=True, verbose=True) print mfcc_nn_model(num_epochs=10, test_size=0.10, weak=True, verbose=True) else: data = mfcc_processing.featurize_data(mfccs, weak=False, verbose=True) print svm_classifier(data, test_size=0.10, weak=False, verbose=True) print knn_classifier(data, test_size=0.10, weak=False, verbose=True) print mfcc_nn_model(num_epochs=10, test_size=0.10, weak=False, verbose=True)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 7061, 6, 198, 16130, 25, 198, 220, 220, 220, 532, 28788, 15565, 4939, 306, 28679, 4673, 357, 19, 6097, 8, 198, 220, 220, 220, 532, 13535, 15565, 7634, 357, 2759, 8, 2208, 690, ...
2.572043
930
# # Blowfish encrypt - Encrypt selected region with Blowfish # # Copyright (c) 2019, Nobutaka Mantani # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import binascii import re import sys import time import tkinter import tkinter.ttk import tkinter.messagebox try: import Cryptodome.Cipher.Blowfish import Cryptodome.Util.Padding except ImportError: exit(-1) # PyCryptodome is not installed # Print selected items # Receive data data = sys.stdin.buffer.read() # Create input dialog root = tkinter.Tk() root.title("Blowfish encrypt") root.protocol("WM_DELETE_WINDOW", (lambda r=root: r.quit())) label_mode = tkinter.Label(root, text="Mode:") label_mode.grid(row=0, column=0, padx=5, pady=5, sticky="w") combo_mode = tkinter.ttk.Combobox(root, width=5, state="readonly") combo_mode["values"] = ("ECB", "CBC", "CFB", "OFB", "CTR") combo_mode.current(0) combo_mode.grid(row=0, column=1, padx=5, pady=5, sticky="w") label_key_type = tkinter.Label(root, text="Key type:") label_key_type.grid(row=1, column=0, padx=5, pady=5, sticky="w") combo_key_type = tkinter.ttk.Combobox(root, width=5, state="readonly") combo_key_type["values"] = ("Text", "Hex") combo_key_type.current(0) combo_key_type.grid(row=1, column=1, padx=5, pady=5) label_key = tkinter.Label(root, text="Key:") label_key.grid(row=1, column=2, padx=5, pady=5, sticky="w") entry_key = tkinter.Entry(width=32) entry_key.grid(row=1, column=3, padx=5, pady=5, sticky="w") entry_key.focus() # Focus to this widget label_iv_type = tkinter.Label(root, text="IV type:") label_iv_type.grid(row=2, column=0, padx=5, pady=5, sticky="w") combo_iv_type = tkinter.ttk.Combobox(root, width=5, state="readonly") combo_iv_type["values"] = ("Text", "Hex") combo_iv_type.current(0) combo_iv_type.grid(row=2, column=1, padx=5, pady=5) label_iv = tkinter.Label(root, text="IV:") label_iv.grid(row=2, column=2, padx=5, pady=5, sticky="w") entry_iv = tkinter.Entry(width=32) entry_iv.grid(row=2, column=3, padx=5, pady=5, sticky="w") button = tkinter.Button(root, text="OK", command=(lambda data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))) button.grid(row=3, column=0, padx=5, pady=5, columnspan=4) label_ctr = tkinter.Label(root, text="Note:\nThe first seven bytes of IV are used as the nonce and the last one\nbyte is used as the initial value of the counter (compatible with\nCyberChef).", justify="left") label_ctr.grid(row=4, column=0, padx=5, pady=5, columnspan=4, sticky="w") label_ctr.grid_remove() # Set callback functions combo_mode.bind('<<ComboboxSelected>>', lambda event, root=root, cm=combo_mode, cit=combo_iv_type, ei=entry_iv, lc=label_ctr: combo_mode_selected(root, cm, cit, ei, lc)) combo_mode.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) combo_key_type.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) entry_key.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) combo_iv_type.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) entry_iv.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) button.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)) # These are disabled in the initial state (ECB mode) combo_iv_type.configure(state = "disabled") entry_iv.configure(state = "disabled") # Adjust window position sw = root.winfo_screenwidth() sh = root.winfo_screenheight() root.update_idletasks() # Necessary to get width and height of the window ww = root.winfo_width() wh = root.winfo_height() root.geometry('+%d+%d' % ((sw/2) - (ww/2), (sh/2) - (wh/2))) root.mainloop() exit(1) # Not decrypted
[ 2, 201, 198, 2, 26588, 11084, 34117, 532, 14711, 6012, 6163, 3814, 351, 26588, 11084, 201, 198, 2, 201, 198, 2, 15069, 357, 66, 8, 13130, 11, 8140, 315, 8130, 36987, 3216, 201, 198, 2, 1439, 2489, 10395, 13, 201, 198, 2, 201, 198,...
2.520723
2,268
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import cStringIO import struct import dns.exception import dns.inet import dns.name
[ 2, 15069, 357, 34, 8, 4793, 11, 4343, 11, 3717, 12, 9804, 399, 6351, 388, 11, 3457, 13, 198, 2, 198, 2, 2448, 3411, 284, 779, 11, 4866, 11, 13096, 11, 290, 14983, 428, 3788, 290, 663, 198, 2, 10314, 329, 597, 4007, 351, 393, 1...
3.396887
257
__version = '0.1.0' __all__ = ['MultiStreamSelect', 'hexify'] __author__ = 'Natnael Getahun (connect@ngetahun.me)' __name__ = 'multistream' from .multistream import MultiStreamSelect from .utils import hexify
[ 834, 9641, 796, 705, 15, 13, 16, 13, 15, 6, 198, 834, 439, 834, 796, 37250, 29800, 12124, 17563, 3256, 705, 33095, 1958, 20520, 198, 834, 9800, 834, 796, 705, 47849, 2616, 417, 3497, 993, 403, 357, 8443, 31, 782, 316, 993, 403, 13...
2.8
75
"""---------------------------------------------------------------------------------* * Copyright (c) 2010-2018 Pauli Parkkinen, Eelis Solala, Wen-Hua Xu, * * Sergio Losilla, Elias Toivanen, Jonas Juselius * * * * Permission is hereby granted, free of charge, to any person obtaining a copy * * of this software and associated documentation files (the "Software"), to deal * * in the Software without restriction, including without limitation the rights * * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * * copies of the Software, and to permit persons to whom the Software is * * furnished to do so, subject to the following conditions: * * * * The above copyright notice and this permission notice shall be included in all* * copies or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * * SOFTWARE. * *----------------------------------------------------------------------------------""" # Input file reader import os import sys import xml.etree.ElementTree as ET import numpy, ast from .generate_objects import SettingsGenerator from collections import OrderedDict if __name__ == "__main__": if len(sys.argv) <= 1: print("Give the input file name as an input.") else: inp = InputXML(filename = sys.argv[1], definition_filename = os.path.dirname(os.path.realpath(__file__))+"/input_parameters.xml") import dage_fortran dage_fortran.python_interface.run(**inp.prepare())
[ 198, 37811, 10097, 1783, 12, 9, 198, 1635, 220, 220, 220, 15069, 357, 66, 8, 3050, 12, 7908, 3362, 72, 3250, 5116, 268, 11, 412, 417, 271, 4294, 6081, 11, 31164, 12, 39, 6413, 33591, 11, 220, 220, 220, 220, 220, 220, 220, 220, 2...
2.247534
1,115
# -*- coding: utf-8 -*- import pytest from pathlib_mate.pathlib2 import Path if __name__ == "__main__": import os basename = os.path.basename(__file__) pytest.main([basename, "-s", "--tb=native"])
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 11748, 12972, 9288, 198, 6738, 3108, 8019, 62, 9830, 13, 6978, 8019, 17, 1330, 10644, 628, 198, 198, 361, 11593, 3672, 834, 6624, 366, 834, 12417, 834, 1298, 198, ...
2.351648
91
#!/usr/bin/env python3 """ Usage:: usage: auth.py [-h] [{google,apple,github,jwt}] [jwt] Login to your comma account positional arguments: {google,apple,github,jwt} jwt optional arguments: -h, --help show this help message and exit Examples:: ./auth.py # Log in with google account ./auth.py github # Log in with GitHub Account ./auth.py jwt ey......hw # Log in with a JWT from https://jwt.comma.ai, for use in CI """ import argparse import sys import pprint import webbrowser from http.server import BaseHTTPRequestHandler, HTTPServer from typing import Any, Dict from urllib.parse import parse_qs, urlencode from tools.lib.api import APIError, CommaApi, UnauthorizedError from tools.lib.auth_config import set_token, get_token PORT = 3000 def auth_redirect_link(method): provider_id = { 'google': 'g', 'apple': 'a', 'github': 'h', }[method] params = { 'redirect_uri': f"https://api.comma.ai/v2/auth/{provider_id}/redirect/", 'state': f'service,localhost:{PORT}', } if method == 'google': params.update({ 'type': 'web_server', 'client_id': '45471411055-ornt4svd2miog6dnopve7qtmh5mnu6id.apps.googleusercontent.com', 'response_type': 'code', 'scope': 'https://www.googleapis.com/auth/userinfo.email', 'prompt': 'select_account', }) return 'https://accounts.google.com/o/oauth2/auth?' + urlencode(params) elif method == 'github': params.update({ 'client_id': '28c4ecb54bb7272cb5a4', 'scope': 'read:user', }) return 'https://github.com/login/oauth/authorize?' + urlencode(params) elif method == 'apple': params.update({ 'client_id': 'ai.comma.login', 'response_type': 'code', 'response_mode': 'form_post', 'scope': 'name email', }) return 'https://appleid.apple.com/auth/authorize?' + urlencode(params) else: raise NotImplementedError(f"no redirect implemented for method {method}") def login(method): oauth_uri = auth_redirect_link(method) web_server = ClientRedirectServer(('localhost', PORT), ClientRedirectHandler) print(f'To sign in, use your browser and navigate to {oauth_uri}') webbrowser.open(oauth_uri, new=2) while True: web_server.handle_request() if 'code' in web_server.query_params: break elif 'error' in web_server.query_params: print('Authentication Error: "%s". Description: "%s" ' % ( web_server.query_params['error'], web_server.query_params.get('error_description')), file=sys.stderr) break try: auth_resp = CommaApi().post('v2/auth/', data={'code': web_server.query_params['code'], 'provider': web_server.query_params['provider']}) set_token(auth_resp['access_token']) except APIError as e: print(f'Authentication Error: {e}', file=sys.stderr) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Login to your comma account') parser.add_argument('method', default='google', const='google', nargs='?', choices=['google', 'apple', 'github', 'jwt']) parser.add_argument('jwt', nargs='?') args = parser.parse_args() if args.method == 'jwt': if args.jwt is None: print("method JWT selected, but no JWT was provided") exit(1) set_token(args.jwt) else: login(args.method) try: me = CommaApi(token=get_token()).get('/v1/me') print("Authenticated!") pprint.pprint(me) except UnauthorizedError: print("Got invalid JWT") exit(1)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 37811, 198, 28350, 3712, 628, 220, 8748, 25, 6284, 13, 9078, 25915, 71, 60, 685, 90, 13297, 11, 18040, 11, 12567, 11, 73, 46569, 92, 60, 685, 73, 46569, 60, 628, 220, 23093, 28...
2.556041
1,374
#! /usr/bin/env python # -*- coding: utf-8 -*- ''' Create a Baselight folder with current date and time stamp. You must refresh the Job Manager after running the script. Copyright (c) 2020 Igor Rianovi, Igor [at] hdhead.com, www.metafide.com ''' import flapi from getflapi import getflapi from datetime import datetime if __name__=='__main__': conn, msg = getflapi() print msg + '\n' ip = 'localhost' currentScene = 'Test01' folderName = 'MyFolder' make_dated_folder(ip, currentScene, folderName)
[ 2, 0, 1220, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 7061, 6, 198, 16447, 257, 6455, 49984, 9483, 351, 1459, 3128, 290, 640, 17977, 13, 198, 1639, 1276, 14976, 262...
2.671642
201
__doc__ = """ CallBacks ----------- Provides the callBack interface to collect data over time (see `callback_functions.py`). """ from elastica.callback_functions import CallBackBaseClass
[ 834, 15390, 834, 796, 37227, 198, 14134, 33, 4595, 198, 32284, 198, 198, 15946, 1460, 262, 869, 7282, 7071, 284, 2824, 1366, 625, 640, 357, 3826, 4600, 47423, 62, 12543, 2733, 13, 9078, 63, 737, 198, 37811, 198, 198, 6738, 27468, 64, ...
3.537037
54
# Copyright 2016 - Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from collections import namedtuple ActionSpecs = namedtuple( 'ActionSpecs', ['id', 'type', 'targets', 'properties']) EdgeDescription = namedtuple('EdgeDescription', ['edge', 'source', 'target']) ENTITY = 'entity' RELATIONSHIP = 'relationship' # noinspection PyAttributeOutsideInit
[ 2, 15069, 1584, 532, 26182, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 345, 743, 198, 2, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 921, 743, 7330, 198, 2,...
3.6
240
#! /usr/bin/env python3 """Parse through the simulated sequencing group specific kmer counts.""" import argparse as ap from collections import OrderedDict import glob import gzip import os import sys import time import numpy as np import multiprocessing as mp SAMPLES = OrderedDict() KMERS = {} HAMMING = OrderedDict() SAMPLE_COLS = [ 'sample', 'is_bcg', 'is_ba', 'has_lethal', 'simulated_coverage', 'group', 'total_kmers', 'tp', 'tn', 'fp', 'fn', 'kmer_cov_min', 'kmer_cov_mean', 'kmer_cov_median', 'kmer_cov_max', 'non_zero_kmer_cov_min', 'non_zero_kmer_cov_mean', 'non_zero_kmer_cov_median', 'non_zero_kmer_cov_max' ] KMER_COLS = [ 'kmer', 'simulated_coverage', 'group', 'hamming_distance', 'tp', 'tn', 'fp', 'fn', 'group_kmer_cov_min', 'group_kmer_cov_mean', 'group_kmer_cov_median', 'group_kmer_cov_max', 'non_zero_group_kmer_cov_min', 'non_zero_group_kmer_cov_mean', 'non_zero_group_kmer_cov_median', 'non_zero_group_kmer_cov_max', 'outgroup_kmer_cov_min', 'outgroup_kmer_cov_mean', 'outgroup_kmer_cov_median', 'outgroup_kmer_cov_max', 'non_zero_outgroup_kmer_cov_min', 'non_zero_outgroup_kmer_cov_mean', 'non_zero_outgroup_kmer_cov_median', 'non_zero_outgroup_kmer_cov_max' ] def get_group_status(sample, group): """Return if a sample is within a group or not.""" within_group = None if group == 'ba': within_group = True if SAMPLES[sample]['is_ba'] == 'True' else False elif group == 'bcg': within_group = True if SAMPLES[sample]['is_bcg'] == 'True' else False else: # lef within_group = True if SAMPLES[sample]['has_lethal'] else False return within_group def get_coverage_stats(coverage): """Return summary stats of a set of coverages.""" non_zero = [c for c in coverage if c] np_array = np.array(coverage) non_zero_array = np.array(non_zero) return { 'min': min(coverage) if coverage else 0, 'median': int(np.median(np_array)) if coverage else 0, 'mean': "{0:.4f}".format(np.mean(np_array)) if coverage else 0, 'max': max(coverage) if coverage else 0, 'non_zero_min': min(non_zero_array) if non_zero else 0, 'non_zero_median': int(np.median(non_zero_array)) if non_zero else 0, 'non_zero_mean': int(round(np.mean(non_zero_array))) if non_zero else 0, 'non_zero_max': max(non_zero_array) if non_zero else 0, } def reverse_complement(seq): """Reverse complement a DNA sequence.""" complement = { 'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G', 'a': 't', 't': 'a', 'g': 'c', 'c': 'g' } return ''.join([complement[b] for b in seq[::-1]]) def parse_counts(counts, sample, coverage, group, skip_kmers=False, filter_kmers=False): """Parse kmer counts.""" within_group = get_group_status(sample, group) sample_row = {'coverages': [], 'tp': 0, 'tn': 0, 'fp': 0, 'fn': 0} with gzip.open(counts, 'r') as count_handle: for line in count_handle: kmer, count = line.decode().rstrip().split() count = int(count) parse = True if filter_kmers: parse = kmer in KMERS or reverse_complement(kmer) in KMERS elif not skip_kmers: if kmer not in KMERS: kmer = reverse_complement(kmer) if within_group: KMERS[kmer][coverage]['group_coverages'].append(count) if count: KMERS[kmer][coverage]['tp'] += 1 else: KMERS[kmer][coverage]['fn'] += 1 else: KMERS[kmer][coverage]['outgroup_coverages'].append(count) if count: KMERS[kmer][coverage]['fp'] += 1 else: KMERS[kmer][coverage]['tn'] += 1 if parse: sample_row['coverages'].append(count) if within_group: if count: sample_row['tp'] += 1 else: sample_row['fn'] += 1 else: if count: sample_row['fp'] += 1 else: sample_row['tn'] += 1 coverage_stats = get_coverage_stats(sample_row['coverages']) SAMPLES[sample]['results'].append({ 'simulated_coverage': coverage, 'within_group': within_group, 'tp': sample_row['tp'], 'tn': sample_row['tn'], 'fp': sample_row['fp'], 'fn': sample_row['fn'], 'kmer_cov_min': coverage_stats['min'], 'kmer_cov_mean': coverage_stats['mean'], 'kmer_cov_median': coverage_stats['median'], 'kmer_cov_max': coverage_stats['max'], 'non_zero_kmer_cov_min': coverage_stats['non_zero_min'], 'non_zero_kmer_cov_mean': coverage_stats['non_zero_mean'], 'non_zero_kmer_cov_median': coverage_stats['non_zero_median'], 'non_zero_kmer_cov_max': coverage_stats['non_zero_max'], }) def parse_summary(summary): """Parse Summary file.""" cols = None with open(summary, 'r') as summary_handle: # Column Names: # accession, gi, is_bcg, is_ba, species, genome_size, description for line in summary_handle: line = line.rstrip() if line.startswith('#'): cols = line.replace('#', '').split('\t') else: row = dict(zip(cols, line.split('\t'))) SAMPLES[row['accession']] = row if row['accession'] == 'NZ_CP009941': # NZ_CP009941 - Bacillus cereus w/ lef on chromosome SAMPLES[row['accession']]['has_lethal'] = True else: SAMPLES[row['accession']]['has_lethal'] = False SAMPLES[row['accession']]['results'] = [] def print_sample_summary(file_output): """Print the final per sample summaries.""" with open(file_output, 'w') as output_handle: output_handle.write(("\t".join(SAMPLE_COLS))) output_handle.write("\n") for sample in SAMPLES: if SAMPLES[sample]['results']: for result in SAMPLES[sample]['results']: row = { 'sample': sample, 'is_bcg': SAMPLES[sample]['is_bcg'], 'is_ba': SAMPLES[sample]['is_ba'], 'has_lethal': SAMPLES[sample]['has_lethal'], 'simulated_coverage': result['simulated_coverage'], 'group': args.group, 'within_group': result['within_group'], 'total_kmers': total_kmers, 'tp': result['tp'], 'tn': result['tn'], 'fp': result['fp'], 'fn': result['fn'], 'kmer_cov_min': result['kmer_cov_min'], 'kmer_cov_mean': result['kmer_cov_mean'], 'kmer_cov_median': result['kmer_cov_median'], 'kmer_cov_max': result['kmer_cov_max'], 'non_zero_kmer_cov_min': result['non_zero_kmer_cov_min'], 'non_zero_kmer_cov_mean': result['non_zero_kmer_cov_mean'], 'non_zero_kmer_cov_median': result['non_zero_kmer_cov_median'], 'non_zero_kmer_cov_max': result['non_zero_kmer_cov_max'] } output_handle.write(("\t".join([ str(row[col]) for col in SAMPLE_COLS ]))) output_handle.write("\n") def print_kmer_summary(file_output): """Print the final per kmer summaries.""" with open(file_output, 'w') as output_handle: output_handle.write(("\t".join(KMER_COLS))) output_handle.write("\n") for kmer, coverages in KMERS.items(): for coverage in coverages: within_group = get_coverage_stats( KMERS[kmer][coverage]['group_coverages'] ) outgroup = get_coverage_stats( KMERS[kmer][coverage]['outgroup_coverages'] ) row = { 'kmer': kmer, 'simulated_coverage': coverage, 'group': args.group, 'hamming_distance': HAMMING[kmer], 'tp': KMERS[kmer][coverage]['tp'], 'tn': KMERS[kmer][coverage]['tn'], 'fp': KMERS[kmer][coverage]['fp'], 'fn': KMERS[kmer][coverage]['fn'], 'group_kmer_cov_min': within_group['min'], 'group_kmer_cov_mean': within_group['mean'], 'group_kmer_cov_median': within_group['median'], 'group_kmer_cov_max': within_group['max'], 'non_zero_group_kmer_cov_min': within_group['non_zero_min'], 'non_zero_group_kmer_cov_mean': within_group['non_zero_mean'], 'non_zero_group_kmer_cov_median': within_group['non_zero_median'], 'non_zero_group_kmer_cov_max': within_group['non_zero_max'], 'outgroup_kmer_cov_min': outgroup['min'], 'outgroup_kmer_cov_mean': outgroup['mean'], 'outgroup_kmer_cov_median': outgroup['median'], 'outgroup_kmer_cov_max': outgroup['max'], 'non_zero_outgroup_kmer_cov_min': outgroup['non_zero_min'], 'non_zero_outgroup_kmer_cov_mean': outgroup['non_zero_mean'], 'non_zero_outgroup_kmer_cov_median': outgroup['non_zero_median'], 'non_zero_outgroup_kmer_cov_max': outgroup['non_zero_max'], } output_handle.write(("\t".join([ str(row[col]) for col in KMER_COLS ]))) output_handle.write("\n") def read_lines(input_file): """Return lines in a text file as a list.""" lines = [] with open(input_file, 'r') as input_handle: for line in input_handle: lines.append(line.rstrip()) return lines if __name__ == '__main__': parser = ap.ArgumentParser( prog='summarize-kmer-counts.py', conflict_handler='resolve', description=("Summarize kmer counts of each simulation.") ) parser.add_argument('summary', type=str, metavar="SUMMARY", help='Summary of Bacillus genomes.') parser.add_argument('directory', type=str, metavar="SIMUALTION_DIR", help='Directory with group specific 31-mer counts.') parser.add_argument('group', type=str, metavar="GROUP", help='Which group to parse (ba, bcg or lef).') parser.add_argument('kmers', type=str, metavar="KMERS", help='Group specific k-mers.') parser.add_argument('coverages', type=str, metavar="COVERAGES", help=('Coverages to subsample to.')) parser.add_argument('outdir', type=str, metavar="OUTDIR", help='Directory to output to.') parser.add_argument('--cpu', default=1, type=int, metavar="INT", help='Number of cores to use (Default: 1)') parser.add_argument('--single_sample', type=str, metavar="STR", help='Process a single sample.') parser.add_argument('--skip_kmers', action='store_true', default=False, help='Skip kmer processing.') parser.add_argument('--filter', action='store_true', default=False, help='Filter counts based on input kmers.') args = parser.parse_args() if args.group not in ['ba', 'bcg', 'lef']: raise Exception("GROUPS must be 'ba', 'bcg' or 'lef'") coverages = read_lines(args.coverages) print("Parsing Summary") parse_summary(args.summary) print("Parsing Kmers") if args.filter: print("Filtering Kmers") args.skip_kmers = True parse_filter_kmers(args.kmers) else: print("Parsing Kmers") parse_kmers(args.kmers, coverages, skip_kmers=args.skip_kmers, has_hamming=False if args.group == 'lef' else True) total_kmers = len(KMERS) current = 1 samples = list(SAMPLES.keys()) if args.single_sample: samples = [args.single_sample] total = len(samples) for sample in samples: path = "{0}/{1}".format(args.directory, sample) if os.path.exists(path): print("Working on {0} ({1} of {2})".format(sample, current, total)) current += 1 count_files = sorted(glob.glob( "{0}/*-{1}.txt.gz".format(path, args.group) )) for count_file in count_files: coverage = os.path.basename(count_file).split('-')[1] parse_counts(count_file, sample, coverage, args.group, skip_kmers=args.skip_kmers, filter_kmers=args.filter) print("Output sample summary") if args.single_sample: print_sample_summary("{0}/count-summary-{1}-{2}.txt".format( args.outdir, args.single_sample, args.group )) else: print_sample_summary("{0}/count-summary-sample-{1}.txt".format( args.outdir, args.group )) if not args.skip_kmers: print("Output kmer summary") if args.single_sample: print_kmer_summary("{0}/count-summary-kmer-{1}-{2}.txt".format( args.outdir, args.single_sample, args.group )) else: print_kmer_summary("{0}/count-summary-kmer-{1}.txt".format( args.outdir, args.group ))
[ 2, 0, 1220, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 37811, 10044, 325, 832, 262, 28590, 32841, 1448, 2176, 479, 647, 9853, 526, 15931, 198, 11748, 1822, 29572, 355, 2471, 198, 6738, 17268, 1330, 14230, 1068, 35, 713, 198, 11748, 1...
1.90555
7,369
from marshmallow.exceptions import ValidationError __all__ = ('ValidationError', 'ObjectDoesNotExist', 'CommunicationError')
[ 6738, 22397, 42725, 13, 1069, 11755, 1330, 3254, 24765, 12331, 628, 628, 198, 834, 439, 834, 796, 19203, 7762, 24765, 12331, 3256, 705, 10267, 13921, 3673, 3109, 396, 3256, 705, 30813, 3299, 12331, 11537, 198 ]
3.685714
35
from django.contrib import admin from .models import Songs admin.site.register(Songs) # Register your models here.
[ 6738, 42625, 14208, 13, 3642, 822, 1330, 13169, 198, 6738, 764, 27530, 1330, 31772, 198, 198, 28482, 13, 15654, 13, 30238, 7, 50, 28079, 8, 198, 198, 2, 17296, 534, 4981, 994, 13, 198 ]
3.441176
34
# Draw image time series for one or more plots from jicbioimage.core.image import Image import dtoolcore import click from translate_labels import rack_plot_to_image_plot from image_utils import join_horizontally, join_vertically if __name__ == '__main__': main()
[ 2, 15315, 2939, 640, 2168, 329, 530, 393, 517, 21528, 198, 198, 6738, 474, 291, 65, 952, 9060, 13, 7295, 13, 9060, 1330, 7412, 198, 198, 11748, 288, 25981, 7295, 198, 198, 11748, 3904, 198, 198, 6738, 15772, 62, 23912, 1424, 1330, 1...
3.16092
87
from pytpp.properties.response_objects.dataclasses import system_status from pytpp.tools.helpers.date_converter import from_date_string
[ 6738, 12972, 83, 381, 13, 48310, 13, 26209, 62, 48205, 13, 19608, 330, 28958, 1330, 1080, 62, 13376, 198, 6738, 12972, 83, 381, 13, 31391, 13, 16794, 364, 13, 4475, 62, 1102, 332, 353, 1330, 422, 62, 4475, 62, 8841, 628 ]
3.341463
41
from typing import Optional import pytorch_lightning as pl import torch from omegaconf import OmegaConf from torch.utils.data import DataLoader, random_split from transformers import T5Tokenizer from src.data.PaperDataset import PaperDataset if __name__ == "__main__": dm = ArvixDataModule()
[ 6738, 19720, 1330, 32233, 198, 198, 11748, 12972, 13165, 354, 62, 2971, 768, 355, 458, 198, 11748, 28034, 198, 6738, 267, 28917, 7807, 69, 1330, 19839, 18546, 198, 6738, 28034, 13, 26791, 13, 7890, 1330, 6060, 17401, 11, 4738, 62, 35312...
3.236559
93
# -*- coding: utf-8 -*- import os import sys import time import subprocess import wx import ConfigParser from wx.lib.mixins.listctrl import getListCtrlSelection from wx.lib.pubsub import pub from gui.RootGUI import RootGUI from StepsDialog import StepsDialog from PlotFrame import PlotFuncFrame, PlotCorrFrame import interface import mbox
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 11748, 28686, 198, 11748, 25064, 198, 11748, 640, 198, 11748, 850, 14681, 198, 11748, 266, 87, 198, 11748, 17056, 46677, 198, 6738, 266, 87, 13, 8019, 13, 19816, 10...
3.320388
103
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-06 10:07 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django_prices.models
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 2980, 515, 416, 37770, 352, 13, 940, 13, 20, 319, 2177, 12, 2999, 12, 3312, 838, 25, 2998, 198, 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198, 1...
2.906977
86
#!/usr/bin/env python # -*- coding: utf-8 -*- from .base import TestRailAPIBase
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220...
1.275281
178
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: proto/transaction.proto """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='proto/transaction.proto', package='slog', syntax='proto3', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x17proto/transaction.proto\x12\x04slog\"1\n\x0eMasterMetadata\x12\x0e\n\x06master\x18\x01 \x01(\r\x12\x0f\n\x07\x63ounter\x18\x02 \x01(\r\"\x81\x01\n\nValueEntry\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x11\n\tnew_value\x18\x02 \x01(\x0c\x12\x1b\n\x04type\x18\x03 \x01(\x0e\x32\r.slog.KeyType\x12(\n\x08metadata\x18\x04 \x01(\x0b\x32\x14.slog.MasterMetadataH\x00\x42\n\n\x08optional\"C\n\rKeyValueEntry\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12%\n\x0bvalue_entry\x18\x02 \x01(\x0b\x32\x10.slog.ValueEntry\"j\n\x14TransactionEventInfo\x12%\n\x05\x65vent\x18\x01 \x01(\x0e\x32\x16.slog.TransactionEvent\x12\x0c\n\x04time\x18\x02 \x01(\x03\x12\x0f\n\x07machine\x18\x03 \x01(\x05\x12\x0c\n\x04home\x18\x04 \x01(\x05\"\x8c\x03\n\x13TransactionInternal\x12\n\n\x02id\x18\x01 \x01(\x04\x12#\n\x04type\x18\x02 \x01(\x0e\x32\x15.slog.TransactionType\x12\x0c\n\x04home\x18\x03 \x01(\x05\x12\x1b\n\x13\x63oordinating_server\x18\x04 \x01(\r\x12\x11\n\ttimestamp\x18\x05 \x01(\x03\x12\x1b\n\x13involved_partitions\x18\x06 \x03(\r\x12\x19\n\x11\x61\x63tive_partitions\x18\x07 \x03(\r\x12\x18\n\x10involved_regions\x18\x08 \x03(\r\x12*\n\x06\x65vents\x18\t \x03(\x0b\x32\x1a.slog.TransactionEventInfo\x12\'\n\x1fmh_depart_from_coordinator_time\x18\n \x01(\x03\x12\x1e\n\x16mh_arrive_at_home_time\x18\x0b \x01(\x03\x12!\n\x19mh_enter_local_batch_time\x18\x0c \x01(\x03\x12\x1c\n\x14global_log_positions\x18\r \x03(\x03\"H\n\x11RemasterProcedure\x12\x12\n\nnew_master\x18\x01 \x01(\r\x12\x1f\n\x17is_new_master_lock_only\x18\x02 \x01(\x08\"\x19\n\tProcedure\x12\x0c\n\x04\x61rgs\x18\x01 \x03(\x0c\"1\n\nProcedures\x12#\n\nprocedures\x18\x01 \x03(\x0b\x32\x0f.slog.Procedure\"\xb1\x02\n\x0bTransaction\x12+\n\x08internal\x18\x01 \x01(\x0b\x32\x19.slog.TransactionInternal\x12 \n\x04\x63ode\x18\x02 \x01(\x0b\x32\x10.slog.ProceduresH\x00\x12+\n\x08remaster\x18\x03 \x01(\x0b\x32\x17.slog.RemasterProcedureH\x00\x12!\n\x04keys\x18\x04 \x03(\x0b\x32\x13.slog.KeyValueEntry\x12\x14\n\x0c\x64\x65leted_keys\x18\x05 \x03(\x0c\x12\'\n\x06status\x18\x06 \x01(\x0e\x32\x17.slog.TransactionStatus\x12#\n\nabort_code\x18\x07 \x01(\x0e\x32\x0f.slog.AbortCode\x12\x14\n\x0c\x61\x62ort_reason\x18\x08 \x01(\tB\t\n\x07program*L\n\x0fTransactionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0bSINGLE_HOME\x10\x01\x12\x1b\n\x17MULTI_HOME_OR_LOCK_ONLY\x10\x02*@\n\x11TransactionStatus\x12\x0f\n\x0bNOT_STARTED\x10\x00\x12\r\n\tCOMMITTED\x10\x01\x12\x0b\n\x07\x41\x42ORTED\x10\x02*7\n\tAbortCode\x12\t\n\x05OTHER\x10\x00\x12\x10\n\x0cRATE_LIMITED\x10\x01\x12\r\n\tRESTARTED\x10\x02*\x1e\n\x07KeyType\x12\x08\n\x04READ\x10\x00\x12\t\n\x05WRITE\x10\x01*\xde\x06\n\x10TransactionEvent\x12\x07\n\x03\x41LL\x10\x00\x12\x10\n\x0c\x45NTER_SERVER\x10\x01\x12\x1c\n\x18\x45XIT_SERVER_TO_FORWARDER\x10\x02\x12\x13\n\x0f\x45NTER_FORWARDER\x10\x03\x12\x1f\n\x1b\x45XIT_FORWARDER_TO_SEQUENCER\x10\x04\x12(\n$EXIT_FORWARDER_TO_MULTI_HOME_ORDERER\x10\x05\x12\x1c\n\x18\x45NTER_MULTI_HOME_ORDERER\x10\x06\x12%\n!ENTER_MULTI_HOME_ORDERER_IN_BATCH\x10\x07\x12$\n EXIT_MULTI_HOME_ORDERER_IN_BATCH\x10\x08\x12\x1b\n\x17\x45XIT_MULTI_HOME_ORDERER\x10\t\x12\x13\n\x0f\x45NTER_SEQUENCER\x10\n\x12.\n*EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH\x10\x0b\x12\x15\n\x11\x45NTER_LOCAL_BATCH\x10\x0c\x12\x1c\n\x18\x45NTER_SEQUENCER_IN_BATCH\x10\r\x12\x1b\n\x17\x45XIT_SEQUENCER_IN_BATCH\x10\x0e\x12\x1e\n\x1a\x45NTER_LOG_MANAGER_IN_BATCH\x10\x0f\x12\x1b\n\x17\x45NTER_LOG_MANAGER_ORDER\x10\x10\x12\x14\n\x10\x45XIT_LOG_MANAGER\x10\x11\x12\x13\n\x0f\x45NTER_SCHEDULER\x10\x12\x12\x16\n\x12\x45NTER_SCHEDULER_LO\x10\x13\x12\x16\n\x12\x45NTER_LOCK_MANAGER\x10\x14\x12\x15\n\x11\x44\x45\x41\x44LOCK_DETECTED\x10\x15\x12\x0e\n\nDISPATCHED\x10\x16\x12\x13\n\x0f\x44ISPATCHED_FAST\x10\x17\x12\x13\n\x0f\x44ISPATCHED_SLOW\x10\x18\x12\x1e\n\x1a\x44ISPATCHED_SLOW_DEADLOCKED\x10\x19\x12\x10\n\x0c\x45NTER_WORKER\x10\x1a\x12\x14\n\x10GOT_REMOTE_READS\x10\x1b\x12\x1f\n\x1bGOT_REMOTE_READS_DEADLOCKED\x10\x1c\x12\x0f\n\x0b\x45XIT_WORKER\x10\x1d\x12\x14\n\x10RETURN_TO_SERVER\x10\x1e\x12\x19\n\x15\x45XIT_SERVER_TO_CLIENT\x10\x1f\x62\x06proto3' ) _TRANSACTIONTYPE = _descriptor.EnumDescriptor( name='TransactionType', full_name='slog.TransactionType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='UNKNOWN', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SINGLE_HOME', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MULTI_HOME_OR_LOCK_ONLY', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=1252, serialized_end=1328, ) _sym_db.RegisterEnumDescriptor(_TRANSACTIONTYPE) TransactionType = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONTYPE) _TRANSACTIONSTATUS = _descriptor.EnumDescriptor( name='TransactionStatus', full_name='slog.TransactionStatus', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='NOT_STARTED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='COMMITTED', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ABORTED', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=1330, serialized_end=1394, ) _sym_db.RegisterEnumDescriptor(_TRANSACTIONSTATUS) TransactionStatus = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONSTATUS) _ABORTCODE = _descriptor.EnumDescriptor( name='AbortCode', full_name='slog.AbortCode', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='OTHER', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RATE_LIMITED', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RESTARTED', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=1396, serialized_end=1451, ) _sym_db.RegisterEnumDescriptor(_ABORTCODE) AbortCode = enum_type_wrapper.EnumTypeWrapper(_ABORTCODE) _KEYTYPE = _descriptor.EnumDescriptor( name='KeyType', full_name='slog.KeyType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='READ', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='WRITE', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=1453, serialized_end=1483, ) _sym_db.RegisterEnumDescriptor(_KEYTYPE) KeyType = enum_type_wrapper.EnumTypeWrapper(_KEYTYPE) _TRANSACTIONEVENT = _descriptor.EnumDescriptor( name='TransactionEvent', full_name='slog.TransactionEvent', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='ALL', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_SERVER', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_SERVER_TO_FORWARDER', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_FORWARDER', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_FORWARDER_TO_SEQUENCER', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_FORWARDER_TO_MULTI_HOME_ORDERER', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_MULTI_HOME_ORDERER', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_MULTI_HOME_ORDERER_IN_BATCH', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_MULTI_HOME_ORDERER_IN_BATCH', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_MULTI_HOME_ORDERER', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_SEQUENCER', index=10, number=10, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH', index=11, number=11, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_LOCAL_BATCH', index=12, number=12, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_SEQUENCER_IN_BATCH', index=13, number=13, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_SEQUENCER_IN_BATCH', index=14, number=14, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_LOG_MANAGER_IN_BATCH', index=15, number=15, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_LOG_MANAGER_ORDER', index=16, number=16, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_LOG_MANAGER', index=17, number=17, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_SCHEDULER', index=18, number=18, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_SCHEDULER_LO', index=19, number=19, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_LOCK_MANAGER', index=20, number=20, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DEADLOCK_DETECTED', index=21, number=21, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DISPATCHED', index=22, number=22, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DISPATCHED_FAST', index=23, number=23, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DISPATCHED_SLOW', index=24, number=24, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DISPATCHED_SLOW_DEADLOCKED', index=25, number=25, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTER_WORKER', index=26, number=26, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='GOT_REMOTE_READS', index=27, number=27, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='GOT_REMOTE_READS_DEADLOCKED', index=28, number=28, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_WORKER', index=29, number=29, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RETURN_TO_SERVER', index=30, number=30, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXIT_SERVER_TO_CLIENT', index=31, number=31, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=1486, serialized_end=2348, ) _sym_db.RegisterEnumDescriptor(_TRANSACTIONEVENT) TransactionEvent = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONEVENT) UNKNOWN = 0 SINGLE_HOME = 1 MULTI_HOME_OR_LOCK_ONLY = 2 NOT_STARTED = 0 COMMITTED = 1 ABORTED = 2 OTHER = 0 RATE_LIMITED = 1 RESTARTED = 2 READ = 0 WRITE = 1 ALL = 0 ENTER_SERVER = 1 EXIT_SERVER_TO_FORWARDER = 2 ENTER_FORWARDER = 3 EXIT_FORWARDER_TO_SEQUENCER = 4 EXIT_FORWARDER_TO_MULTI_HOME_ORDERER = 5 ENTER_MULTI_HOME_ORDERER = 6 ENTER_MULTI_HOME_ORDERER_IN_BATCH = 7 EXIT_MULTI_HOME_ORDERER_IN_BATCH = 8 EXIT_MULTI_HOME_ORDERER = 9 ENTER_SEQUENCER = 10 EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH = 11 ENTER_LOCAL_BATCH = 12 ENTER_SEQUENCER_IN_BATCH = 13 EXIT_SEQUENCER_IN_BATCH = 14 ENTER_LOG_MANAGER_IN_BATCH = 15 ENTER_LOG_MANAGER_ORDER = 16 EXIT_LOG_MANAGER = 17 ENTER_SCHEDULER = 18 ENTER_SCHEDULER_LO = 19 ENTER_LOCK_MANAGER = 20 DEADLOCK_DETECTED = 21 DISPATCHED = 22 DISPATCHED_FAST = 23 DISPATCHED_SLOW = 24 DISPATCHED_SLOW_DEADLOCKED = 25 ENTER_WORKER = 26 GOT_REMOTE_READS = 27 GOT_REMOTE_READS_DEADLOCKED = 28 EXIT_WORKER = 29 RETURN_TO_SERVER = 30 EXIT_SERVER_TO_CLIENT = 31 _MASTERMETADATA = _descriptor.Descriptor( name='MasterMetadata', full_name='slog.MasterMetadata', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='master', full_name='slog.MasterMetadata.master', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='counter', full_name='slog.MasterMetadata.counter', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=33, serialized_end=82, ) _VALUEENTRY = _descriptor.Descriptor( name='ValueEntry', full_name='slog.ValueEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='value', full_name='slog.ValueEntry.value', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='new_value', full_name='slog.ValueEntry.new_value', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='slog.ValueEntry.type', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='slog.ValueEntry.metadata', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='optional', full_name='slog.ValueEntry.optional', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=85, serialized_end=214, ) _KEYVALUEENTRY = _descriptor.Descriptor( name='KeyValueEntry', full_name='slog.KeyValueEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='slog.KeyValueEntry.key', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value_entry', full_name='slog.KeyValueEntry.value_entry', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=216, serialized_end=283, ) _TRANSACTIONEVENTINFO = _descriptor.Descriptor( name='TransactionEventInfo', full_name='slog.TransactionEventInfo', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='slog.TransactionEventInfo.event', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='time', full_name='slog.TransactionEventInfo.time', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='machine', full_name='slog.TransactionEventInfo.machine', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='home', full_name='slog.TransactionEventInfo.home', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=285, serialized_end=391, ) _TRANSACTIONINTERNAL = _descriptor.Descriptor( name='TransactionInternal', full_name='slog.TransactionInternal', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='id', full_name='slog.TransactionInternal.id', index=0, number=1, type=4, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='slog.TransactionInternal.type', index=1, number=2, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='home', full_name='slog.TransactionInternal.home', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='coordinating_server', full_name='slog.TransactionInternal.coordinating_server', index=3, number=4, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='timestamp', full_name='slog.TransactionInternal.timestamp', index=4, number=5, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='involved_partitions', full_name='slog.TransactionInternal.involved_partitions', index=5, number=6, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='active_partitions', full_name='slog.TransactionInternal.active_partitions', index=6, number=7, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='involved_regions', full_name='slog.TransactionInternal.involved_regions', index=7, number=8, type=13, cpp_type=3, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='slog.TransactionInternal.events', index=8, number=9, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='mh_depart_from_coordinator_time', full_name='slog.TransactionInternal.mh_depart_from_coordinator_time', index=9, number=10, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='mh_arrive_at_home_time', full_name='slog.TransactionInternal.mh_arrive_at_home_time', index=10, number=11, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='mh_enter_local_batch_time', full_name='slog.TransactionInternal.mh_enter_local_batch_time', index=11, number=12, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='global_log_positions', full_name='slog.TransactionInternal.global_log_positions', index=12, number=13, type=3, cpp_type=2, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=394, serialized_end=790, ) _REMASTERPROCEDURE = _descriptor.Descriptor( name='RemasterProcedure', full_name='slog.RemasterProcedure', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='new_master', full_name='slog.RemasterProcedure.new_master', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_new_master_lock_only', full_name='slog.RemasterProcedure.is_new_master_lock_only', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=792, serialized_end=864, ) _PROCEDURE = _descriptor.Descriptor( name='Procedure', full_name='slog.Procedure', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='args', full_name='slog.Procedure.args', index=0, number=1, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=866, serialized_end=891, ) _PROCEDURES = _descriptor.Descriptor( name='Procedures', full_name='slog.Procedures', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='procedures', full_name='slog.Procedures.procedures', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=893, serialized_end=942, ) _TRANSACTION = _descriptor.Descriptor( name='Transaction', full_name='slog.Transaction', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='internal', full_name='slog.Transaction.internal', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='code', full_name='slog.Transaction.code', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='remaster', full_name='slog.Transaction.remaster', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='keys', full_name='slog.Transaction.keys', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='deleted_keys', full_name='slog.Transaction.deleted_keys', index=4, number=5, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='status', full_name='slog.Transaction.status', index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='abort_code', full_name='slog.Transaction.abort_code', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='abort_reason', full_name='slog.Transaction.abort_reason', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='program', full_name='slog.Transaction.program', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=945, serialized_end=1250, ) _VALUEENTRY.fields_by_name['type'].enum_type = _KEYTYPE _VALUEENTRY.fields_by_name['metadata'].message_type = _MASTERMETADATA _VALUEENTRY.oneofs_by_name['optional'].fields.append( _VALUEENTRY.fields_by_name['metadata']) _VALUEENTRY.fields_by_name['metadata'].containing_oneof = _VALUEENTRY.oneofs_by_name['optional'] _KEYVALUEENTRY.fields_by_name['value_entry'].message_type = _VALUEENTRY _TRANSACTIONEVENTINFO.fields_by_name['event'].enum_type = _TRANSACTIONEVENT _TRANSACTIONINTERNAL.fields_by_name['type'].enum_type = _TRANSACTIONTYPE _TRANSACTIONINTERNAL.fields_by_name['events'].message_type = _TRANSACTIONEVENTINFO _PROCEDURES.fields_by_name['procedures'].message_type = _PROCEDURE _TRANSACTION.fields_by_name['internal'].message_type = _TRANSACTIONINTERNAL _TRANSACTION.fields_by_name['code'].message_type = _PROCEDURES _TRANSACTION.fields_by_name['remaster'].message_type = _REMASTERPROCEDURE _TRANSACTION.fields_by_name['keys'].message_type = _KEYVALUEENTRY _TRANSACTION.fields_by_name['status'].enum_type = _TRANSACTIONSTATUS _TRANSACTION.fields_by_name['abort_code'].enum_type = _ABORTCODE _TRANSACTION.oneofs_by_name['program'].fields.append( _TRANSACTION.fields_by_name['code']) _TRANSACTION.fields_by_name['code'].containing_oneof = _TRANSACTION.oneofs_by_name['program'] _TRANSACTION.oneofs_by_name['program'].fields.append( _TRANSACTION.fields_by_name['remaster']) _TRANSACTION.fields_by_name['remaster'].containing_oneof = _TRANSACTION.oneofs_by_name['program'] DESCRIPTOR.message_types_by_name['MasterMetadata'] = _MASTERMETADATA DESCRIPTOR.message_types_by_name['ValueEntry'] = _VALUEENTRY DESCRIPTOR.message_types_by_name['KeyValueEntry'] = _KEYVALUEENTRY DESCRIPTOR.message_types_by_name['TransactionEventInfo'] = _TRANSACTIONEVENTINFO DESCRIPTOR.message_types_by_name['TransactionInternal'] = _TRANSACTIONINTERNAL DESCRIPTOR.message_types_by_name['RemasterProcedure'] = _REMASTERPROCEDURE DESCRIPTOR.message_types_by_name['Procedure'] = _PROCEDURE DESCRIPTOR.message_types_by_name['Procedures'] = _PROCEDURES DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION DESCRIPTOR.enum_types_by_name['TransactionType'] = _TRANSACTIONTYPE DESCRIPTOR.enum_types_by_name['TransactionStatus'] = _TRANSACTIONSTATUS DESCRIPTOR.enum_types_by_name['AbortCode'] = _ABORTCODE DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE DESCRIPTOR.enum_types_by_name['TransactionEvent'] = _TRANSACTIONEVENT _sym_db.RegisterFileDescriptor(DESCRIPTOR) MasterMetadata = _reflection.GeneratedProtocolMessageType('MasterMetadata', (_message.Message,), { 'DESCRIPTOR' : _MASTERMETADATA, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.MasterMetadata) }) _sym_db.RegisterMessage(MasterMetadata) ValueEntry = _reflection.GeneratedProtocolMessageType('ValueEntry', (_message.Message,), { 'DESCRIPTOR' : _VALUEENTRY, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.ValueEntry) }) _sym_db.RegisterMessage(ValueEntry) KeyValueEntry = _reflection.GeneratedProtocolMessageType('KeyValueEntry', (_message.Message,), { 'DESCRIPTOR' : _KEYVALUEENTRY, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.KeyValueEntry) }) _sym_db.RegisterMessage(KeyValueEntry) TransactionEventInfo = _reflection.GeneratedProtocolMessageType('TransactionEventInfo', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONEVENTINFO, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.TransactionEventInfo) }) _sym_db.RegisterMessage(TransactionEventInfo) TransactionInternal = _reflection.GeneratedProtocolMessageType('TransactionInternal', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONINTERNAL, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.TransactionInternal) }) _sym_db.RegisterMessage(TransactionInternal) RemasterProcedure = _reflection.GeneratedProtocolMessageType('RemasterProcedure', (_message.Message,), { 'DESCRIPTOR' : _REMASTERPROCEDURE, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.RemasterProcedure) }) _sym_db.RegisterMessage(RemasterProcedure) Procedure = _reflection.GeneratedProtocolMessageType('Procedure', (_message.Message,), { 'DESCRIPTOR' : _PROCEDURE, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.Procedure) }) _sym_db.RegisterMessage(Procedure) Procedures = _reflection.GeneratedProtocolMessageType('Procedures', (_message.Message,), { 'DESCRIPTOR' : _PROCEDURES, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.Procedures) }) _sym_db.RegisterMessage(Procedures) Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTION, '__module__' : 'proto.transaction_pb2' # @@protoc_insertion_point(class_scope:slog.Transaction) }) _sym_db.RegisterMessage(Transaction) # @@protoc_insertion_point(module_scope)
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 2980, 515, 416, 262, 8435, 11876, 17050, 13, 220, 8410, 5626, 48483, 0, 198, 2, 2723, 25, 44876, 14, 7645, 2673, 13, 1676, 1462, 198, 37811, 8645, 515, 8435, 11876,...
2.335532
17,432
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import unittest from srl import movement from srl import simulation from srl import world
[ 2, 15069, 2177, 3012, 3457, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198, 2, 921, 743, ...
3.816667
180
from llvmlite import ir import xml.etree.ElementTree as et int32 = ir.IntType(32) int64 = ir.IntType(64) int1 = ir.IntType(1) void_type = ir.VoidType() function_names = [] registers, functions, uniques, extracts = {}, {}, {}, {} internal_functions = {} memory = {} flags = ["ZF", "CF", "OF", "SF"] pointers = ["RSP", "RIP", "RBP", "EBP", "ESP"] # noinspection DuplicatedCode
[ 6738, 32660, 85, 4029, 578, 1330, 4173, 198, 11748, 35555, 13, 316, 631, 13, 20180, 27660, 355, 2123, 198, 198, 600, 2624, 796, 4173, 13, 5317, 6030, 7, 2624, 8, 198, 600, 2414, 796, 4173, 13, 5317, 6030, 7, 2414, 8, 198, 600, 16,...
2.569536
151
#/usr/bin/env python # -*- Coding: UTF-8 -*- # # WPSeku: Wordpress Security Scanner # # @url: https://github.com/m4ll0k/WPSeku # @author: Momo Outaadi (M4ll0k) import re from lib import wphttp from lib import wpprint
[ 2, 14, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 327, 7656, 25, 41002, 12, 23, 532, 9, 12, 198, 2, 198, 2, 370, 3705, 988, 84, 25, 9678, 8439, 4765, 20937, 1008, 198, 2, 198, 2, 2488, 6371, 25, 3740, 1378, 12567, ...
2.294737
95
from tw2.jit.widgets.chart import (AreaChart, BarChart, PieChart) from tw2.jit.widgets.graph import (ForceDirectedGraph, RadialGraph) from tw2.jit.widgets.tree import (SpaceTree, HyperTree, Sunburst, Icicle, TreeMap) from tw2.jit.widgets.ajax import AjaxRadialGraph from tw2.jit.widgets.sqla import SQLARadialGraph
[ 198, 6738, 665, 17, 13, 45051, 13, 28029, 11407, 13, 40926, 1330, 357, 30547, 45488, 11, 2409, 45488, 11, 21690, 45488, 8, 198, 6738, 665, 17, 13, 45051, 13, 28029, 11407, 13, 34960, 1330, 357, 10292, 13470, 276, 37065, 11, 5325, 498,...
2.464789
142
import json, requests, os, shlex, asyncio, uuid, shutil from typing import Tuple from pyrogram import Client, filters from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery # Configs API_HASH = os.environ['API_HASH'] APP_ID = int(os.environ['APP_ID']) BOT_TOKEN = os.environ['BOT_TOKEN'] downloads = './downloads/{}/' #Button START_BUTTONS=[ [ InlineKeyboardButton('Source', url='https://github.com/X-Gorn/TikTokDL'), InlineKeyboardButton('Project Channel', url='https://t.me/xTeamBots'), ], [InlineKeyboardButton('Author', url='https://t.me/xgorn')], ] DL_BUTTONS=[ [ InlineKeyboardButton('No Watermark', callback_data='nowm'), InlineKeyboardButton('Watermark', callback_data='wm'), ], [InlineKeyboardButton('Audio', callback_data='audio')], ] # Running bot xbot = Client('TikTokDL', api_id=APP_ID, api_hash=API_HASH, bot_token=BOT_TOKEN) # Helpers # Thanks to FridayUB # Start # Downloader for tiktok # Callbacks xbot.run()
[ 11748, 33918, 11, 7007, 11, 28686, 11, 427, 2588, 11, 30351, 952, 11, 334, 27112, 11, 4423, 346, 201, 198, 6738, 19720, 1330, 309, 29291, 201, 198, 6738, 12972, 39529, 1330, 20985, 11, 16628, 201, 198, 6738, 12972, 39529, 13, 19199, 1...
2.358407
452
import wx import wx.adv import random import util import config import time import datetime import threading import requests import json from functools import partial
[ 11748, 266, 87, 198, 11748, 266, 87, 13, 32225, 198, 11748, 4738, 198, 11748, 7736, 198, 11748, 4566, 198, 198, 11748, 640, 198, 11748, 4818, 8079, 198, 11748, 4704, 278, 198, 198, 11748, 7007, 198, 11748, 33918, 628, 198, 6738, 1257, ...
3.680851
47
"""High-level search API. This module implements application-specific search semantics on top of App Engine's search API. There are two chief operations: querying for entities, and managing entities in the search facility. Add and remove Card entities in the search facility: insert_cards([models.Card]) delete_cards([models.Card]) Query for Card entities: query_cards(query_string, limit=20) -> search.SearchResults The results items will have the following fields: user_key, user_nickname, front, back, info, tag (repeated), added, modified, source_url The query_string is free-form, as a user would enter it, and passes through a custom query processor before the query is submitted to App Engine. Notably, pass @username to restrict the query to entities authored by username, and #tag to restrict the query to only documents matching the given tag. Multiple @usernames or #tags result in an OR query. """ import re from google.appengine.api import search from google.appengine.ext import ndb QUERY_LIMIT = 20 CARD_INDEX_NAME = 'cards' # Increase this value when _card2doc changes its format so that # queries can determine the data available on returned documents. CARD_DOCUMENT_VERSION = '1' # Ensure we're under the 2000 character limit from # https://developers.google.com/appengine/docs/python/search/query_strings MAX_QUERY_LEN = 200 # TODO(chris): it would be better if this module didn't know about # specific entity types, but instead defined a protocol to get # metadata from an entity and generate a document. def insert_cards(cards): """Insert or update models.Card entities in the search facility.""" # TODO(chris): should we allow more than 200 cards per call? assert len(cards) <= 200, len(cards) card_docs = map(_card2doc, cards) index = search.Index(name=CARD_INDEX_NAME) index.put(card_docs) def delete_cards(cards): """Delete models.Card entities from the search facility.""" index = search.Index(name=CARD_INDEX_NAME) card_doc_ids = map(_card2docid, cards) index.delete(card_doc_ids) def query_cards(query_str, limit=QUERY_LIMIT, web_safe_cursor=None, ids_only=False, user_key=None): """Return the search.SearchResults for a query. ids_only is useful because the returned document IDs are url-safe keys for models.Card entities. """ if web_safe_cursor: cursor = search.Cursor(web_safe_string=web_safe_cursor) else: cursor = None index = search.Index(name=CARD_INDEX_NAME) query_processor = _QueryProcessor( query_str, name_field='user_nickname', tag_field='tag', private_field='private', user_key_field='user_key', query_options=search.QueryOptions(limit=limit, cursor=cursor, ids_only=ids_only), user_key=user_key) search_results = index.search(query_processor.query()) # TODO(chris): should this return partially-instantiated # models.Card instances instead of leaking implementation details # like we do now? return search_results
[ 37811, 11922, 12, 5715, 2989, 7824, 13, 198, 198, 1212, 8265, 23986, 3586, 12, 11423, 2989, 33815, 319, 1353, 286, 198, 4677, 7117, 338, 2989, 7824, 13, 1318, 389, 734, 4039, 4560, 25, 42517, 1112, 329, 198, 298, 871, 11, 290, 11149, ...
2.994236
1,041
import copy import sys PLAYER1, PLAYER2, EMPTY, BLOCKED = [0, 1, 2, 3] S_PLAYER1, S_PLAYER2, S_EMPTY, S_BLOCKED, = ['0', '1', '.', 'x'] CHARTABLE = [(PLAYER1, S_PLAYER1), (PLAYER2, S_PLAYER2), (EMPTY, S_EMPTY), (BLOCKED, S_BLOCKED)] DIRS = [ ((-1, 0), "up"), ((1, 0), "down"), ((0, 1), "right"), ((0, -1), "left") ] #the information of the whole grid
[ 11748, 4866, 198, 11748, 25064, 198, 198, 31519, 1137, 16, 11, 28180, 1137, 17, 11, 38144, 9936, 11, 9878, 11290, 1961, 796, 685, 15, 11, 352, 11, 362, 11, 513, 60, 198, 50, 62, 31519, 1137, 16, 11, 311, 62, 31519, 1137, 17, 11, ...
2.027322
183
import sys sys.setrecursionlimit(10000) T = int(input()) for _ in range(T): M, N, K = map(int, input().split()) board = [[0] * M for _ in range(N)] for _ in range(K): c, r = map(int, input().split()) board[r][c] = 1 visit = [[False] * M for _ in range(N)] cnt = 0 for r in range(N): for c in range(M): if not visit[r][c] and board[r][c] == 1: cnt += 1 dfs(r, c) for ele in visit: print(ele) print() print(cnt)
[ 11748, 25064, 201, 198, 17597, 13, 2617, 8344, 24197, 32374, 7, 49388, 8, 201, 198, 201, 198, 51, 796, 493, 7, 15414, 28955, 201, 198, 1640, 4808, 287, 2837, 7, 51, 2599, 201, 198, 220, 220, 220, 337, 11, 399, 11, 509, 796, 3975, ...
1.757576
330