hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
28314a6aa38b202f5f8b63288cc331bac9012154 | 542 | py | Python | lebus/print_schedule.py | johnlarusic/lebus | 9e8e80e08ad6bbb38a55cb9777386a40f21e5ee0 | [
"MIT"
] | 5 | 2019-04-24T16:43:51.000Z | 2020-10-11T18:22:39.000Z | lebus/print_schedule.py | johnlarusic/lebus | 9e8e80e08ad6bbb38a55cb9777386a40f21e5ee0 | [
"MIT"
] | null | null | null | lebus/print_schedule.py | johnlarusic/lebus | 9e8e80e08ad6bbb38a55cb9777386a40f21e5ee0 | [
"MIT"
] | null | null | null | from . import *
from datetime import datetime
def print_schedule(stops):
now = datetime.now()
print "Schedule as of {}".format(now)
for stop in stops:
print stop.route,
if len(stop.next) > 0:
for s in stop.next:
val = "now"
if s > now:
delta = (s - now).seconds / 60
if delta > 0:
val = delta
print " {}".format(val),
print
else:
print " ?"
print
| 20.074074 | 50 | 0.429889 |
69ef8e4568a321ed84d0a0b63028c16865e08e6a | 3,675 | py | Python | migrations/versions/10e455e8d206_tables.py | knadir/microblog-nadir | 48e4ec974ffd5d1c6c75374c3e7bc8dc3d558335 | [
"MIT"
] | null | null | null | migrations/versions/10e455e8d206_tables.py | knadir/microblog-nadir | 48e4ec974ffd5d1c6c75374c3e7bc8dc3d558335 | [
"MIT"
] | null | null | null | migrations/versions/10e455e8d206_tables.py | knadir/microblog-nadir | 48e4ec974ffd5d1c6c75374c3e7bc8dc3d558335 | [
"MIT"
] | null | null | null | """tables
Revision ID: 10e455e8d206
Revises:
Create Date: 2019-05-27 09:11:35.340861
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '10e455e8d206'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('about_me', sa.String(length=140), nullable=True),
sa.Column('last_seen', sa.DateTime(), nullable=True),
sa.Column('last_message_read_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('followers',
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
op.create_table('message',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('sender_id', sa.Integer(), nullable=True),
sa.Column('recipient_id', sa.Integer(), nullable=True),
sa.Column('body', sa.String(length=140), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['recipient_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['sender_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_timestamp'), 'message', ['timestamp'], unique=False)
op.create_table('notification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('timestamp', sa.Float(), nullable=True),
sa.Column('payload_json', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_notification_name'), 'notification', ['name'], unique=False)
op.create_index(op.f('ix_notification_timestamp'), 'notification', ['timestamp'], unique=False)
op.create_table('post',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.String(length=140), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('language', sa.String(length=5), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_post_timestamp'), 'post', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_post_timestamp'), table_name='post')
op.drop_table('post')
op.drop_index(op.f('ix_notification_timestamp'), table_name='notification')
op.drop_index(op.f('ix_notification_name'), table_name='notification')
op.drop_table('notification')
op.drop_index(op.f('ix_message_timestamp'), table_name='message')
op.drop_table('message')
op.drop_table('followers')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
| 41.761364 | 99 | 0.676463 |
df62aebef854e46b8008a4e733f3ced9148dba11 | 3,289 | py | Python | share/UNCLEAN/parse_cobol.py | racker/zeroclickinfo-fathead | 600fdf04fbf47b4035cc5451f245c87979446754 | [
"Apache-2.0"
] | 1 | 2021-01-05T16:48:23.000Z | 2021-01-05T16:48:23.000Z | share/UNCLEAN/parse_cobol.py | yanirs/zeroclickinfo-fathead | c06003560ef3368da5857df301938aad549a7d6b | [
"Apache-2.0"
] | null | null | null | share/UNCLEAN/parse_cobol.py | yanirs/zeroclickinfo-fathead | c06003560ef3368da5857df301938aad549a7d6b | [
"Apache-2.0"
] | 1 | 2016-06-12T06:12:02.000Z | 2016-06-12T06:12:02.000Z | from BeautifulSoup import BeautifulSoup
import re
import os
import sys
import string
openclosetags = re.compile('''<.*?>|</.*?>''',re.DOTALL)
spaces = re.compile('''\s+''',re.DOTALL)
files = []
files.append('./docs/cobol.htm')
args = {
'ABS':'( argument-1 )',
'ACOS':'( argument-1 )',
'ANNUITY':'( argument-1 argument-2 )',
'ASIN':'( argument-1 )',
'ATAN':'( argument-1 )',
'CHAR':'( argument-1 )',
'CHAR-NATIONAL':'( argument-1 )',
'COS':'( argument-1 )',
'CURRENT-DATE':'',
'DATE-OF-INTEGER':'( argument-1 )',
'DATE-TO-YYYYMMDD':'( argument-1 [argument-2] )',
'DAY-OF-INTEGER':'( argument-1 )',
'DAY-TO-YYYYDDD':'( argument-1 [argument-2] )',
'DISPLAY-OF':'( argument-1 [argument-2] )',
'E':'',
'EXP':'( argument-1 )',
'EXP10':'( argument-1 )',
'FACTORIAL':'( argument-1 )',
'FRACTION-PART':'( argument-1 )',
'INTEGER':'( argument-1 )',
'INTEGER-OF-DATE':'( argument-1 )',
'INTEGER-OF-DAY':'( argument-1 )',
'INTEGER-PART':'( argument-1 )',
'LENGTH':'( argument-1 )',
'LENGTH-AN':'( argument-1 )',
'LOG':'( argument-1 )',
'LOG10':'( argument-1 )',
'LOWER-CASE':'( argument-1 )',
'MAX':'( argument-1 )',
'MEAN':'( { argument-1 } ... )',
'MEDIAN':'( { argument-1 } ... )',
'MIDRANGE':'( { argument-1 } ... )',
'MIN':'( { argument-1 } ... )',
'MOD':'( argument-1 argument-2 )',
'NATIONAL-OF':'( argument-1 [argument-2] )',
'NUMVAL':'( argument-1 )',
'NUMVAL-C':'( argument-1 [argument-2] )',
'ORD':'( argument-1 )',
'ORD-MAX':'( { argument-1 } ... )',
'ORD-MIN':'( { argument-1 } ... )',
'PI':'',
'PRESENT-VALUE':'( argument-1 [argument-2] )',
'RANDOM':'[ ( argument-1 ) ]',
'RANGE':'( { argument-1 } ... )',
'REM':'( argument-1 argument-2 )',
'REVERSE':'( argument-1 )',
'SIGN':'( argument-1 )',
'SIN':'( argument-1 )',
'SQRT':'( argument-1 )',
'STANDARD-DEVIATION':'( { argument-1 } ... )',
'SUM':'( { argument-1 } ... )',
'TAN':'( argument-1 )',
'UPPER-CASE':'( argument-1 )',
'VARIANCE':'( { argument-1 } ... )',
'WHEN-COMPILED':'',
'YEAR-TO-YYYY':'( argument-1 [argument-2] ) ',
}
for file in files:
filecontents = open(file).read()
soup = BeautifulSoup(filecontents)
for s in soup.findAll('h3'):
t = re.compile('''[0-9]{1,2}\.[0-9]{1,2}\.[0-9]{1,2} ''',re.DOTALL)
name = t.sub('',str(s))
name = openclosetags.sub('',name.replace('The ','').replace(' Function','').replace(' function',''))
desc = str(s.nextSibling.nextSibling)
if "dialm.gif" in desc:
desc = str(s.nextSibling.nextSibling.nextSibling.nextSibling)
desc = openclosetags.sub('',desc)
url = "http://supportline.microfocus.com/documentation/books/sx20books/lrpdf7.htm#%s"%s.findAll('a')[0]['name']
synopsis = "FUNCTION %s %s"%(name,args[name.strip()])
if len(sys.argv) == 1 or sys.argv[1].lower() == 'tsv':
print "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s"%(name,'',url,desc.replace("\n","__NEWLINE__"),synopsis.replace("\n","__NEWLINE__"),'','cobol','en')
if sys.argv[1].lower() == 'sql':
print '''INSERT INTO functions (`id`, `name`, `namespace`, `url`, `description`, `synopsis`, `detail`, `type`, `lang`) VALUES (NULL, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s');'''%(name,'',url,desc,synopsis,'','cobol','en')
| 33.561224 | 232 | 0.543934 |
a98f6b390bfa2bb08f5758534a5a09c31d3a1b04 | 6,133 | py | Python | yt/utilities/parallel_tools/io_runner.py | tukss/yt | 8bf6fce609cad3d4b291ebd94667019ab2e18377 | [
"BSD-3-Clause-Clear"
] | null | null | null | yt/utilities/parallel_tools/io_runner.py | tukss/yt | 8bf6fce609cad3d4b291ebd94667019ab2e18377 | [
"BSD-3-Clause-Clear"
] | 8 | 2020-04-02T16:51:49.000Z | 2022-01-11T14:12:44.000Z | yt/utilities/parallel_tools/io_runner.py | tukss/yt | 8bf6fce609cad3d4b291ebd94667019ab2e18377 | [
"BSD-3-Clause-Clear"
] | 2 | 2020-08-12T15:46:11.000Z | 2021-02-09T13:09:17.000Z | import time
from contextlib import contextmanager
import numpy as np
from yt.utilities.io_handler import BaseIOHandler
from yt.utilities.logger import ytLogger as mylog
from .parallel_analysis_interface import ProcessorPool, parallel_objects
try:
from .parallel_analysis_interface import MPI
except ImportError:
pass
YT_TAG_MESSAGE = 317 # Cell 317 knows where to go
class IOCommunicator(BaseIOHandler):
def __init__(self, ds, wg, pool):
mylog.info("Initializing IOCommunicator")
self.ds = ds
self.wg = wg # We don't need to use this!
self.pool = pool
self.comm = pool.comm
# We read our grids here
self.grids = []
storage = {}
grids = ds.index.grids.tolist()
grids.sort(key=lambda a: a.filename)
for sto, g in parallel_objects(grids, storage=storage):
sto.result = self.comm.rank
sto.result_id = g.id
self.grids.append(g)
self._id_offset = ds.index.grids[0]._id_offset
mylog.info("Reading from disk ...")
self.initialize_data()
mylog.info("Broadcasting ...")
self.comm.comm.bcast(storage, root=wg.ranks[0])
mylog.info("Done.")
self.hooks = []
def initialize_data(self):
ds = self.ds
fields = [
f for f in ds.field_list if not ds.field_info[f].sampling_type == "particle"
]
pfields = [
f for f in ds.field_list if ds.field_info[f].sampling_type == "particle"
]
# Preload is only defined for Enzo ...
if ds.index.io._dataset_type == "enzo_packed_3d":
self.queue = ds.index.io.queue
ds.index.io.preload(self.grids, fields)
for g in self.grids:
for f in fields:
if f not in self.queue[g.id]:
d = np.zeros(g.ActiveDimensions, dtype="float64")
self.queue[g.id][f] = d
for f in pfields:
self.queue[g.id][f] = self._read(g, f)
else:
self.queue = {}
for g in self.grids:
for f in fields + pfields:
self.queue[g.id][f] = ds.index.io._read(g, f)
def _read(self, g, f):
fi = self.ds.field_info[f]
if fi.sampling_type == "particle" and g.NumberOfParticles == 0:
# because this gets upcast to float
return np.array([], dtype="float64")
try:
temp = self.ds.index.io._read_data_set(g, f)
except Exception: # self.ds.index.io._read_exception as exc:
if fi.not_in_all:
temp = np.zeros(g.ActiveDimensions, dtype="float64")
else:
raise
return temp
def wait(self):
status = MPI.Status()
while True:
if self.comm.comm.Iprobe(MPI.ANY_SOURCE, YT_TAG_MESSAGE, status=status):
msg = self.comm.comm.recv(source=status.source, tag=YT_TAG_MESSAGE)
if msg["op"] == "end":
mylog.debug("Shutting down IO.")
break
self._send_data(msg, status.source)
status = MPI.Status()
else:
time.sleep(1e-2)
def _send_data(self, msg, dest):
grid_id = msg["grid_id"]
field = msg["field"]
ts = self.queue[grid_id][field].astype("float64")
mylog.debug("Opening send to %s (%s)", dest, ts.shape)
self.hooks.append(self.comm.comm.Isend([ts, MPI.DOUBLE], dest=dest))
class IOHandlerRemote(BaseIOHandler):
_dataset_type = "remote"
def __init__(self, ds, wg, pool):
self.ds = ds
self.wg = wg # probably won't need
self.pool = pool
self.comm = pool.comm
self.proc_map = self.comm.comm.bcast(None, root=pool["io"].ranks[0])
super(IOHandlerRemote, self).__init__()
def _read_data_set(self, grid, field):
dest = self.proc_map[grid.id]
msg = dict(grid_id=grid.id, field=field, op="read")
mylog.debug("Requesting %s for %s from %s", field, grid, dest)
if self.ds.field_info[field].sampling_type == "particle":
data = np.empty(grid.NumberOfParticles, "float64")
else:
data = np.empty(grid.ActiveDimensions, "float64")
hook = self.comm.comm.Irecv([data, MPI.DOUBLE], source=dest)
self.comm.comm.send(msg, dest=dest, tag=YT_TAG_MESSAGE)
mylog.debug("Waiting for data.")
MPI.Request.Wait(hook)
return data
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
# sl = tuple(reversed(sl))
return self._read_data_set(grid, field)[tuple(sl)]
def terminate(self):
msg = dict(op="end")
if self.wg.comm.rank == 0:
for rank in self.pool["io"].ranks:
mylog.debug("Sending termination message to %s", rank)
self.comm.comm.send(msg, dest=rank, tag=YT_TAG_MESSAGE)
@contextmanager
def remote_io(ds, wg, pool):
original_io = ds.index.io
ds.index.io = IOHandlerRemote(ds, wg, pool)
yield
ds.index.io.terminate()
ds.index.io = original_io
def io_nodes(fn, n_io, n_work, func, *args, **kwargs):
from yt.loaders import load
pool, wg = ProcessorPool.from_sizes([(n_io, "io"), (n_work, "work")])
rv = None
if wg.name == "work":
ds = load(fn)
with remote_io(ds, wg, pool):
rv = func(ds, *args, **kwargs)
elif wg.name == "io":
ds = load(fn)
io = IOCommunicator(ds, wg, pool)
io.wait()
# We should broadcast the result
rv = pool.comm.mpi_bcast(rv, root=pool["work"].ranks[0])
pool.free_all()
mylog.debug("Return value: %s", rv)
return rv
# Here is an example of how to use this functionality.
if __name__ == "__main__":
def gq(ds):
dd = ds.all_data()
return dd.quantities["TotalQuantity"]("CellMassMsun")
q = io_nodes("DD0087/DD0087", 8, 24, gq)
mylog.info(q)
| 34.072222 | 88 | 0.576879 |
a46967909f0cc76d30618f8f3a171952c699ed78 | 500 | py | Python | pokepay/request/update_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | null | null | null | pokepay/request/update_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | null | null | null | pokepay/request/update_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
] | 1 | 2022-01-28T03:00:12.000Z | 2022-01-28T03:00:12.000Z | # DO NOT EDIT: File is generated by code generator.
from pokepay_partner_python_sdk.pokepay.request.request import PokepayRequest
from pokepay_partner_python_sdk.pokepay.response.shop_with_accounts import ShopWithAccounts
class UpdateShop(PokepayRequest):
def __init__(self, shop_id, **rest_args):
self.path = "/shops" + "/" + shop_id
self.method = "PATCH"
self.body_params = {}
self.body_params.update(rest_args)
self.response_class = ShopWithAccounts
| 35.714286 | 91 | 0.738 |
acac95a767e449f01f3e061f3f4c85c6cabf597b | 507 | py | Python | ibsng/handler/bw/del_leaf.py | ParspooyeshFanavar/pyibsng | d48bcf4f25e3f23461528bf0ff8870cc3d537444 | [
"MIT"
] | 6 | 2018-03-06T10:16:36.000Z | 2021-12-05T12:43:10.000Z | ibsng/handler/bw/del_leaf.py | ParspooyeshFanavar/pyibsng | d48bcf4f25e3f23461528bf0ff8870cc3d537444 | [
"MIT"
] | 3 | 2018-03-06T10:27:08.000Z | 2022-01-02T15:21:27.000Z | ibsng/handler/bw/del_leaf.py | ParspooyeshFanavar/pyibsng | d48bcf4f25e3f23461528bf0ff8870cc3d537444 | [
"MIT"
] | 3 | 2018-01-06T16:28:31.000Z | 2018-09-17T19:47:19.000Z | """Delete leaf API method."""
from ibsng.handler.handler import Handler
class delLeaf(Handler):
"""Delete leaf method class."""
def control(self):
"""Validate inputs after setup method.
:return: None
:rtype: None
"""
self.is_valid(self.leaf_name, str)
def setup(self, leaf_name):
"""Setup required parameters.
:param str leaf_name: leaf name
:return: None
:rtype: None
"""
self.leaf_name = leaf_name
| 20.28 | 46 | 0.583826 |
a7f40a825d6104dc655880f126b42dfea4dd1831 | 408 | py | Python | app/lcars.py | aliasmaus/rpi_lcars | 6757432a9bb5584ae351d3ff6bd4c312981b9345 | [
"MIT"
] | null | null | null | app/lcars.py | aliasmaus/rpi_lcars | 6757432a9bb5584ae351d3ff6bd4c312981b9345 | [
"MIT"
] | null | null | null | app/lcars.py | aliasmaus/rpi_lcars | 6757432a9bb5584ae351d3ff6bd4c312981b9345 | [
"MIT"
] | null | null | null | from screens.authorize import ScreenAuthorize
from screens.main import ScreenMain
from ui.ui import UserInterface
import config
if __name__ == "__main__":
#firstScreen = ScreenAuthorize()
firstScreen = ScreenMain()
ui = UserInterface(firstScreen, config.RESOLUTION, config.UI_PLACEMENT_MODE, config.FPS, config.DEV_MODE,
config.SOUND)
while (True):
ui.tick()
| 29.142857 | 109 | 0.710784 |
3d9d702ee4033b6908d837c417f0044bb28c5859 | 10,459 | py | Python | tests/unit/modules/test_ssh.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_ssh.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_ssh.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
import subprocess
import tempfile
from textwrap import dedent
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import skipIf, TestCase
from tests.support.mock import (
MagicMock,
NO_MOCK,
NO_MOCK_REASON,
patch
)
# Import Salt Libs
import salt.utils.files
import salt.utils.platform
import salt.modules.cmdmod as cmd
import salt.modules.ssh as ssh
from salt.exceptions import CommandExecutionError
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
def _mock_ssh_keyscan(*args, **kwargs):
if 'ssh-keyscan' in args[0]:
if '-H' in args[0]:
return dedent('''
# home.waynewerner.com:55555 SSH-2.0-OpenSSH_7.4p1 Raspbian-10+deb9u3
|1|0yq63FhgFbcGawJwr7XyBPEL2Fs=|HkqTDf6bE0p2CMLHyCY7fdH5Uo0= ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDCY7tcbLrsTFPb2je3VFiH9cC9+ac04H0X8BQG7croyqvdUY5zTLmIidXJe6R1zUS7Jqpy/pXwHSB5HWpsMu+ytovPZ/LKl6AiYlcdcpS//QASb7TbcDzHFIlcdCoL5C5TOHXdRKrgIa64akuPMxvXxbgXAHjud+2jK1FhGTBbTkbrWA4xhDukWkswLpCRyHhsNzJd/seP651UDd/3rkrbgFSN9o/4LXZtsEfV3xRfJOaZq5/SW+sDVNlArFgg9EXXOzrKKWkSjS9BnN0hBaK3IyJfUAwppLYHgF0LvcNl4jF38EAU00pkNX5mknGbAFF7OMkcQI9/vkl+jaajv8Q3
# home.waynewerner.com:55555 SSH-2.0-OpenSSH_7.4p1 Raspbian-10+deb9u3
|1|F1wCSzHHJMMPw/DAuRJGMKeTwFk=|GKQ9FyLzHqe0n+WaWKWHzzmS5/c= ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIOEPebJNvI/rqc0ttSuow97J6i8k3YLRF69v1GhF1+gCvM9NW1UQs1gzwB/cLPds9PuwCgyKzUxVqpP7ua41WU=
# home.waynewerner.com:55555 SSH-2.0-OpenSSH_7.4p1 Raspbian-10+deb9u3
|1|SZAE/yAB5UH3OOJvkU6ks1yfHO8=|lay+ajhv8yXZ9kke2j86F7RJunw= ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBGI17y+DW7z4q4r13Ewd/WnrorEwQWqaE2unjU1TS7G
''').lstrip()
else:
return dedent('''
[example.com]:12345 ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDCY7tcbLrsTFPb2je3VFiH9cC9+ac04H0X8BQG7croyqvdUY5zTLmIidXJe6R1zUS7Jqpy/pXwHSB5HWpsMu+ytovPZ/LKl6AiYlcdcpS//QASb7TbcDzHFIlcdCoL5C5TOHXdRKrgIa64akuPMxvXxbgXAHjud+2jK1FhGTBbTkbrWA4xhDukWkswLpCRyHhsNzJd/seP651UDd/3rkrbgFSN9o/4LXZtsEfV3xRfJOaZq5/SW+sDVNlArFgg9EXXOzrKKWkSjS9BnN0hBaK3IyJfUAwppLYHgF0LvcNl4jF38EAU00pkNX5mknGbAFF7OMkcQI9/vkl+jaajv8Q3
[example.com]:12345 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIOEPebJNvI/rqc0ttSuow97J6i8k3YLRF69v1GhF1+gCvM9NW1UQs1gzwB/cLPds9PuwCgyKzUxVqpP7ua41WU=
[example.com]:12345 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBGI17y+DW7z4q4r13Ewd/WnrorEwQWqaE2unjU1TS7G
''').lstrip()
else:
return cmd.run(*args, **kwargs)
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SSHAuthKeyTestCase(TestCase, LoaderModuleMockMixin):
"""
TestCase for salt.modules.ssh
"""
def setup_loader_modules(self):
return {
ssh: {
"__salt__": {
"user.info": lambda u: getattr(self, "user_info_mock", None),
}
}
}
def tearDown(self):
try:
delattr(self, "user_info_mock")
except AttributeError:
pass
def test_expand_user_token(self):
"""
Test if the %u, %h, and %% tokens are correctly expanded
"""
output = ssh._expand_authorized_keys_path("/home/%u", "user", "/home/user")
self.assertEqual(output, "/home/user")
output = ssh._expand_authorized_keys_path("/home/%h", "user", "/home/user")
self.assertEqual(output, "/home//home/user")
output = ssh._expand_authorized_keys_path("%h/foo", "user", "/home/user")
self.assertEqual(output, "/home/user/foo")
output = ssh._expand_authorized_keys_path(
"/srv/%h/aaa/%u%%", "user", "/home/user"
)
self.assertEqual(output, "/srv//home/user/aaa/user%")
user = "dude"
home = "/home/dude"
path = "/home/dude%"
self.assertRaises(
CommandExecutionError, ssh._expand_authorized_keys_path, path, user, home
)
path = "/home/%dude"
self.assertRaises(
CommandExecutionError, ssh._expand_authorized_keys_path, path, user, home
)
def test_set_auth_key_invalid(self):
self.user_info_mock = {"home": "/dev/null"}
# Inserting invalid public key should be rejected
invalid_key = "AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY" # missing padding
self.assertEqual(ssh.set_auth_key("user", invalid_key), "Invalid public key")
def test_replace_auth_key(self):
"""
Test the _replace_auth_key with some different authorized_keys examples
"""
# First test a known working example, gathered from the authorized_keys file
# in the integration test files.
enc = "ssh-rsa"
key = (
"AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+"
"PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNl"
"GEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWp"
"XLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal"
"72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi"
"/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=="
)
options = 'command="/usr/local/lib/ssh-helper"'
email = "github.com"
empty_line = "\n"
comment_line = "# this is a comment\n"
# Write out the authorized key to a temporary file
temp_file = tempfile.NamedTemporaryFile(delete=False, mode="w+")
temp_file.close()
with salt.utils.files.fopen(temp_file.name, "w") as _fh:
# Add comment
_fh.write(comment_line)
# Add empty line for #41335
_fh.write(empty_line)
_fh.write("{0} {1} {2} {3}".format(options, enc, key, email))
with patch.dict(ssh.__salt__, {"user.info": MagicMock(return_value={})}):
with patch(
"salt.modules.ssh._get_config_file",
MagicMock(return_value=temp_file.name),
):
ssh._replace_auth_key("foo", key, config=temp_file.name)
# The previous authorized key should have been replaced by the simpler one
with salt.utils.files.fopen(temp_file.name) as _fh:
file_txt = salt.utils.stringutils.to_unicode(_fh.read())
self.assertIn(enc, file_txt)
self.assertIn(key, file_txt)
self.assertNotIn(options, file_txt)
self.assertNotIn(email, file_txt)
# Now test a very simple key using ecdsa instead of ssh-rsa and with multiple options
enc = "ecdsa-sha2-nistp256"
key = "abcxyz"
with salt.utils.files.fopen(temp_file.name, "a") as _fh:
_fh.write(salt.utils.stringutils.to_str("{0} {1}".format(enc, key)))
# Replace the simple key from before with the more complicated options + new email
# Option example is taken from Pull Request #39855
options = [
"no-port-forwarding",
"no-agent-forwarding",
"no-X11-forwarding",
'command="echo \'Please login as the user "ubuntu" rather than the user "root".\'',
]
email = "foo@example.com"
with patch.dict(ssh.__salt__, {"user.info": MagicMock(return_value={})}):
with patch(
"salt.modules.ssh._get_config_file",
MagicMock(return_value=temp_file.name),
):
ssh._replace_auth_key(
"foo",
key,
enc=enc,
comment=email,
options=options,
config=temp_file.name,
)
# Assert that the new line was added as-is to the file
with salt.utils.files.fopen(temp_file.name) as _fh:
file_txt = salt.utils.stringutils.to_unicode(_fh.read())
self.assertIn(enc, file_txt)
self.assertIn(key, file_txt)
self.assertIn("{0} ".format(",".join(options)), file_txt)
self.assertIn(email, file_txt)
self.assertIn(empty_line, file_txt)
self.assertIn(comment_line, file_txt)
@skipIf(not salt.utils.path.which('ssh-keyscan'), 'ssh-keyscan not installed')
def test_recv_known_hosts_hashed_shoud_be_findable_by_ssh_keygen(self):
hostname = 'example.com'
port = 12345
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as temp_file:
with patch.dict(ssh.__salt__, {'cmd.run': MagicMock(side_effect=_mock_ssh_keyscan)}):
entries = ssh.recv_known_host_entries(
hostname=hostname,
port=port,
hash_known_hosts=True,
)
for entry in entries:
print(
'{0[hostname]} {0[enc]} {0[key]}'.format(entry),
file=temp_file,
)
temp_file.flush()
result = subprocess.check_output([
'ssh-keygen',
'-f',
temp_file.name,
'-F',
'[{hostname}]:{port}'.format(hostname=hostname, port=port),
])
def test_recv_known_hosts_hashed_should_return_hashed_hostnames(self):
hostname = 'example.com'
port = 12345
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as temp_file:
with patch.dict(ssh.__salt__, {'cmd.run': MagicMock(side_effect=_mock_ssh_keyscan)}):
entries = ssh.recv_known_host_entries(
hostname=hostname,
port=port,
hash_known_hosts=True,
)
hostnames = [e.get('hostname') for e in entries]
# We better have *some* hostnames, or the next test is
# irrelevant
self.assertTrue(bool(hostnames))
bad_hostnames = [h for h in hostnames if h.startswith(hostname)]
self.assertFalse(bool(bad_hostnames), bad_hostnames)
| 43.761506 | 457 | 0.637537 |
a5960cf1f993479f6aa2a4138f14b88dcd3daa94 | 4,687 | py | Python | bin/update_site.py | Mozilla-GitHub-Standards/d1f672f1bcefb364864bc3fcece0e1461840e6ed1ade1fe7f67552d85c28621f | d124c26bb50f8d78c032802bee1a7a5213cee762 | [
"BSD-3-Clause"
] | null | null | null | bin/update_site.py | Mozilla-GitHub-Standards/d1f672f1bcefb364864bc3fcece0e1461840e6ed1ade1fe7f67552d85c28621f | d124c26bb50f8d78c032802bee1a7a5213cee762 | [
"BSD-3-Clause"
] | null | null | null | bin/update_site.py | Mozilla-GitHub-Standards/d1f672f1bcefb364864bc3fcece0e1461840e6ed1ade1fe7f67552d85c28621f | d124c26bb50f8d78c032802bee1a7a5213cee762 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
"""
Usage: update_site.py [options]
Updates a server's sources, vendor libraries, packages CSS/JS
assets, migrates the database, and other nifty deployment tasks.
Options:
-h, --help show this help message and exit
-e ENVIRONMENT, --environment=ENVIRONMENT
Type of environment. One of (prod|dev|stage) Example:
update_site.py -e stage
-v, --verbose Echo actions before taking them.
"""
import os
import sys
from textwrap import dedent
from optparse import OptionParser
#from hashlib import md5
# Constants
PROJECT = 0
VENDOR = 1
ENV_BRANCH = {
# 'environment': [PROJECT_BRANCH, VENDOR_BRANCH],
'dev': ['base', 'master'],
'stage': ['master', 'master'],
'prod': ['master', 'master'],
}
# The URL of the SVN repository with the localization files (*.po). If you set
# it to a non-empty value, remember to `git rm --cached -r locale` in the root
# of the project. Example:
# LOCALE_REPO_URL = 'https://svn.mozilla.org/projects/l10n-misc/trunk/playdoh/locale'
# LOCALE_REPO_URL = ''
GIT_PULL = "git pull -q origin %(branch)s"
GIT_SUBMODULE = "git submodule update --init --recursive"
#SVN_CO = "svn checkout --force %(url)s locale"
#SVN_UP = "svn update"
#COMPILE_MO = "./bin/compile-mo.sh %(localedir)s %(unique)s"
EXEC = 'exec'
CHDIR = 'chdir'
def update_site(env, debug):
"""Run through commands to update this site."""
error_updating = False
here = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
#locale = os.path.join(here, 'locale')
#unique = md5(locale).hexdigest()
#project_branch = {'branch': ENV_BRANCH[env][PROJECT]}
#vendor_branch = {'branch': ENV_BRANCH[env][VENDOR]}
commands = [
(CHDIR, here),
#(EXEC, GIT_PULL % project_branch),
#(EXEC, GIT_SUBMODULE),
]
# Checkout the locale repo into locale/ if the URL is known
#if LOCALE_REPO_URL and not os.path.exists(os.path.join(locale, '.svn')):
# commands += [
# (EXEC, SVN_CO % {'url': LOCALE_REPO_URL}),
# (EXEC, COMPILE_MO % {'localedir': locale, 'unique': unique}),
# ]
# Update locale dir if applicable
#if os.path.exists(os.path.join(locale, '.svn')):
# commands += [
# (CHDIR, locale),
# (EXEC, SVN_UP),
# (CHDIR, here),
# (EXEC, COMPILE_MO % {'localedir': locale, 'unique': unique}),
# ]
#elif os.path.exists(os.path.join(locale, '.git')):
# commands += [
# (CHDIR, locale),
# (EXEC, GIT_PULL % 'master'),
# (CHDIR, here),
# ]
commands += [
# (CHDIR, os.path.join(here, 'vendor')),
# (EXEC, GIT_PULL % vendor_branch),
# (EXEC, GIT_SUBMODULE),
# (CHDIR, os.path.join(here)),
(EXEC, 'python2.6 manage.py collectstatic --noinput'),
(EXEC, 'python2.6 manage.py migrate'),
# un-comment if you haven't moved to django-compressor yet
#(EXEC, 'python2.6 manage.py compress_assets'),
]
for cmd, cmd_args in commands:
if CHDIR == cmd:
if debug:
sys.stdout.write("cd %s\n" % cmd_args)
os.chdir(cmd_args)
elif EXEC == cmd:
if debug:
sys.stdout.write("%s\n" % cmd_args)
if not 0 == os.system(cmd_args):
error_updating = True
break
else:
raise Exception("Unknown type of command %s" % cmd)
if error_updating:
sys.stderr.write("There was an error while updating. Please try again "
"later. Aborting.\n")
def main():
""" Handels command line args. """
debug = False
usage = dedent("""\
%prog [options]
Updates a server's sources, vendor libraries, packages CSS/JS
assets, migrates the database, and other nifty deployment tasks.
""".rstrip())
options = OptionParser(usage=usage)
e_help = "Type of environment. One of (%s) Example: update_site.py \
-e stage" % '|'.join(ENV_BRANCH.keys())
options.add_option("-e", "--environment", help=e_help)
options.add_option("-v", "--verbose",
help="Echo actions before taking them.",
action="store_true", dest="verbose")
(opts, _) = options.parse_args()
if opts.verbose:
debug = True
if opts.environment in ENV_BRANCH.keys():
update_site(opts.environment, debug)
else:
sys.stderr.write("Invalid environment!\n")
options.print_help(sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()
| 32.324138 | 85 | 0.587583 |
6eefd9661210f2253b0b03bdcde39e792369baa9 | 19,232 | py | Python | models/stylegan2/model.py | aiksir/pixel2style2pixel | 147b29c1d4bd10229df0fdf6e67705d3401f3372 | [
"MIT"
] | 2,450 | 2020-10-01T11:04:45.000Z | 2022-03-31T09:46:00.000Z | models/stylegan2/model.py | aiksir/pixel2style2pixel | 147b29c1d4bd10229df0fdf6e67705d3401f3372 | [
"MIT"
] | 255 | 2020-10-07T02:50:39.000Z | 2022-03-29T15:07:23.000Z | models/stylegan2/model.py | aiksir/pixel2style2pixel | 147b29c1d4bd10229df0fdf6e67705d3401f3372 | [
"MIT"
] | 438 | 2020-10-01T11:06:35.000Z | 2022-03-31T10:27:00.000Z | import math
import random
import torch
from torch import nn
from torch.nn import functional as F
from models.stylegan2.op import FusedLeakyReLU, fused_leaky_relu, upfirdn2d
class PixelNorm(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input):
return input * torch.rsqrt(torch.mean(input ** 2, dim=1, keepdim=True) + 1e-8)
def make_kernel(k):
k = torch.tensor(k, dtype=torch.float32)
if k.ndim == 1:
k = k[None, :] * k[:, None]
k /= k.sum()
return k
class Upsample(nn.Module):
def __init__(self, kernel, factor=2):
super().__init__()
self.factor = factor
kernel = make_kernel(kernel) * (factor ** 2)
self.register_buffer('kernel', kernel)
p = kernel.shape[0] - factor
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2
self.pad = (pad0, pad1)
def forward(self, input):
out = upfirdn2d(input, self.kernel, up=self.factor, down=1, pad=self.pad)
return out
class Downsample(nn.Module):
def __init__(self, kernel, factor=2):
super().__init__()
self.factor = factor
kernel = make_kernel(kernel)
self.register_buffer('kernel', kernel)
p = kernel.shape[0] - factor
pad0 = (p + 1) // 2
pad1 = p // 2
self.pad = (pad0, pad1)
def forward(self, input):
out = upfirdn2d(input, self.kernel, up=1, down=self.factor, pad=self.pad)
return out
class Blur(nn.Module):
def __init__(self, kernel, pad, upsample_factor=1):
super().__init__()
kernel = make_kernel(kernel)
if upsample_factor > 1:
kernel = kernel * (upsample_factor ** 2)
self.register_buffer('kernel', kernel)
self.pad = pad
def forward(self, input):
out = upfirdn2d(input, self.kernel, pad=self.pad)
return out
class EqualConv2d(nn.Module):
def __init__(
self, in_channel, out_channel, kernel_size, stride=1, padding=0, bias=True
):
super().__init__()
self.weight = nn.Parameter(
torch.randn(out_channel, in_channel, kernel_size, kernel_size)
)
self.scale = 1 / math.sqrt(in_channel * kernel_size ** 2)
self.stride = stride
self.padding = padding
if bias:
self.bias = nn.Parameter(torch.zeros(out_channel))
else:
self.bias = None
def forward(self, input):
out = F.conv2d(
input,
self.weight * self.scale,
bias=self.bias,
stride=self.stride,
padding=self.padding,
)
return out
def __repr__(self):
return (
f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]},'
f' {self.weight.shape[2]}, stride={self.stride}, padding={self.padding})'
)
class EqualLinear(nn.Module):
def __init__(
self, in_dim, out_dim, bias=True, bias_init=0, lr_mul=1, activation=None
):
super().__init__()
self.weight = nn.Parameter(torch.randn(out_dim, in_dim).div_(lr_mul))
if bias:
self.bias = nn.Parameter(torch.zeros(out_dim).fill_(bias_init))
else:
self.bias = None
self.activation = activation
self.scale = (1 / math.sqrt(in_dim)) * lr_mul
self.lr_mul = lr_mul
def forward(self, input):
if self.activation:
out = F.linear(input, self.weight * self.scale)
out = fused_leaky_relu(out, self.bias * self.lr_mul)
else:
out = F.linear(
input, self.weight * self.scale, bias=self.bias * self.lr_mul
)
return out
def __repr__(self):
return (
f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]})'
)
class ScaledLeakyReLU(nn.Module):
def __init__(self, negative_slope=0.2):
super().__init__()
self.negative_slope = negative_slope
def forward(self, input):
out = F.leaky_relu(input, negative_slope=self.negative_slope)
return out * math.sqrt(2)
class ModulatedConv2d(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
demodulate=True,
upsample=False,
downsample=False,
blur_kernel=[1, 3, 3, 1],
):
super().__init__()
self.eps = 1e-8
self.kernel_size = kernel_size
self.in_channel = in_channel
self.out_channel = out_channel
self.upsample = upsample
self.downsample = downsample
if upsample:
factor = 2
p = (len(blur_kernel) - factor) - (kernel_size - 1)
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2 + 1
self.blur = Blur(blur_kernel, pad=(pad0, pad1), upsample_factor=factor)
if downsample:
factor = 2
p = (len(blur_kernel) - factor) + (kernel_size - 1)
pad0 = (p + 1) // 2
pad1 = p // 2
self.blur = Blur(blur_kernel, pad=(pad0, pad1))
fan_in = in_channel * kernel_size ** 2
self.scale = 1 / math.sqrt(fan_in)
self.padding = kernel_size // 2
self.weight = nn.Parameter(
torch.randn(1, out_channel, in_channel, kernel_size, kernel_size)
)
self.modulation = EqualLinear(style_dim, in_channel, bias_init=1)
self.demodulate = demodulate
def __repr__(self):
return (
f'{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, '
f'upsample={self.upsample}, downsample={self.downsample})'
)
def forward(self, input, style):
batch, in_channel, height, width = input.shape
style = self.modulation(style).view(batch, 1, in_channel, 1, 1)
weight = self.scale * self.weight * style
if self.demodulate:
demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + 1e-8)
weight = weight * demod.view(batch, self.out_channel, 1, 1, 1)
weight = weight.view(
batch * self.out_channel, in_channel, self.kernel_size, self.kernel_size
)
if self.upsample:
input = input.view(1, batch * in_channel, height, width)
weight = weight.view(
batch, self.out_channel, in_channel, self.kernel_size, self.kernel_size
)
weight = weight.transpose(1, 2).reshape(
batch * in_channel, self.out_channel, self.kernel_size, self.kernel_size
)
out = F.conv_transpose2d(input, weight, padding=0, stride=2, groups=batch)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
out = self.blur(out)
elif self.downsample:
input = self.blur(input)
_, _, height, width = input.shape
input = input.view(1, batch * in_channel, height, width)
out = F.conv2d(input, weight, padding=0, stride=2, groups=batch)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
else:
input = input.view(1, batch * in_channel, height, width)
out = F.conv2d(input, weight, padding=self.padding, groups=batch)
_, _, height, width = out.shape
out = out.view(batch, self.out_channel, height, width)
return out
class NoiseInjection(nn.Module):
def __init__(self):
super().__init__()
self.weight = nn.Parameter(torch.zeros(1))
def forward(self, image, noise=None):
if noise is None:
batch, _, height, width = image.shape
noise = image.new_empty(batch, 1, height, width).normal_()
return image + self.weight * noise
class ConstantInput(nn.Module):
def __init__(self, channel, size=4):
super().__init__()
self.input = nn.Parameter(torch.randn(1, channel, size, size))
def forward(self, input):
batch = input.shape[0]
out = self.input.repeat(batch, 1, 1, 1)
return out
class StyledConv(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
upsample=False,
blur_kernel=[1, 3, 3, 1],
demodulate=True,
):
super().__init__()
self.conv = ModulatedConv2d(
in_channel,
out_channel,
kernel_size,
style_dim,
upsample=upsample,
blur_kernel=blur_kernel,
demodulate=demodulate,
)
self.noise = NoiseInjection()
# self.bias = nn.Parameter(torch.zeros(1, out_channel, 1, 1))
# self.activate = ScaledLeakyReLU(0.2)
self.activate = FusedLeakyReLU(out_channel)
def forward(self, input, style, noise=None):
out = self.conv(input, style)
out = self.noise(out, noise=noise)
# out = out + self.bias
out = self.activate(out)
return out
class ToRGB(nn.Module):
def __init__(self, in_channel, style_dim, upsample=True, blur_kernel=[1, 3, 3, 1]):
super().__init__()
if upsample:
self.upsample = Upsample(blur_kernel)
self.conv = ModulatedConv2d(in_channel, 3, 1, style_dim, demodulate=False)
self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1))
def forward(self, input, style, skip=None):
out = self.conv(input, style)
out = out + self.bias
if skip is not None:
skip = self.upsample(skip)
out = out + skip
return out
class Generator(nn.Module):
def __init__(
self,
size,
style_dim,
n_mlp,
channel_multiplier=2,
blur_kernel=[1, 3, 3, 1],
lr_mlp=0.01,
):
super().__init__()
self.size = size
self.style_dim = style_dim
layers = [PixelNorm()]
for i in range(n_mlp):
layers.append(
EqualLinear(
style_dim, style_dim, lr_mul=lr_mlp, activation='fused_lrelu'
)
)
self.style = nn.Sequential(*layers)
self.channels = {
4: 512,
8: 512,
16: 512,
32: 512,
64: 256 * channel_multiplier,
128: 128 * channel_multiplier,
256: 64 * channel_multiplier,
512: 32 * channel_multiplier,
1024: 16 * channel_multiplier,
}
self.input = ConstantInput(self.channels[4])
self.conv1 = StyledConv(
self.channels[4], self.channels[4], 3, style_dim, blur_kernel=blur_kernel
)
self.to_rgb1 = ToRGB(self.channels[4], style_dim, upsample=False)
self.log_size = int(math.log(size, 2))
self.num_layers = (self.log_size - 2) * 2 + 1
self.convs = nn.ModuleList()
self.upsamples = nn.ModuleList()
self.to_rgbs = nn.ModuleList()
self.noises = nn.Module()
in_channel = self.channels[4]
for layer_idx in range(self.num_layers):
res = (layer_idx + 5) // 2
shape = [1, 1, 2 ** res, 2 ** res]
self.noises.register_buffer(f'noise_{layer_idx}', torch.randn(*shape))
for i in range(3, self.log_size + 1):
out_channel = self.channels[2 ** i]
self.convs.append(
StyledConv(
in_channel,
out_channel,
3,
style_dim,
upsample=True,
blur_kernel=blur_kernel,
)
)
self.convs.append(
StyledConv(
out_channel, out_channel, 3, style_dim, blur_kernel=blur_kernel
)
)
self.to_rgbs.append(ToRGB(out_channel, style_dim))
in_channel = out_channel
self.n_latent = self.log_size * 2 - 2
def make_noise(self):
device = self.input.input.device
noises = [torch.randn(1, 1, 2 ** 2, 2 ** 2, device=device)]
for i in range(3, self.log_size + 1):
for _ in range(2):
noises.append(torch.randn(1, 1, 2 ** i, 2 ** i, device=device))
return noises
def mean_latent(self, n_latent):
latent_in = torch.randn(
n_latent, self.style_dim, device=self.input.input.device
)
latent = self.style(latent_in).mean(0, keepdim=True)
return latent
def get_latent(self, input):
return self.style(input)
def forward(
self,
styles,
return_latents=False,
return_features=False,
inject_index=None,
truncation=1,
truncation_latent=None,
input_is_latent=False,
noise=None,
randomize_noise=True,
):
if not input_is_latent:
styles = [self.style(s) for s in styles]
if noise is None:
if randomize_noise:
noise = [None] * self.num_layers
else:
noise = [
getattr(self.noises, f'noise_{i}') for i in range(self.num_layers)
]
if truncation < 1:
style_t = []
for style in styles:
style_t.append(
truncation_latent + truncation * (style - truncation_latent)
)
styles = style_t
if len(styles) < 2:
inject_index = self.n_latent
if styles[0].ndim < 3:
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
else:
latent = styles[0]
else:
if inject_index is None:
inject_index = random.randint(1, self.n_latent - 1)
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
latent2 = styles[1].unsqueeze(1).repeat(1, self.n_latent - inject_index, 1)
latent = torch.cat([latent, latent2], 1)
out = self.input(latent)
out = self.conv1(out, latent[:, 0], noise=noise[0])
skip = self.to_rgb1(out, latent[:, 1])
i = 1
for conv1, conv2, noise1, noise2, to_rgb in zip(
self.convs[::2], self.convs[1::2], noise[1::2], noise[2::2], self.to_rgbs
):
out = conv1(out, latent[:, i], noise=noise1)
out = conv2(out, latent[:, i + 1], noise=noise2)
skip = to_rgb(out, latent[:, i + 2], skip)
i += 2
image = skip
if return_latents:
return image, latent
elif return_features:
return image, out
else:
return image, None
class ConvLayer(nn.Sequential):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
downsample=False,
blur_kernel=[1, 3, 3, 1],
bias=True,
activate=True,
):
layers = []
if downsample:
factor = 2
p = (len(blur_kernel) - factor) + (kernel_size - 1)
pad0 = (p + 1) // 2
pad1 = p // 2
layers.append(Blur(blur_kernel, pad=(pad0, pad1)))
stride = 2
self.padding = 0
else:
stride = 1
self.padding = kernel_size // 2
layers.append(
EqualConv2d(
in_channel,
out_channel,
kernel_size,
padding=self.padding,
stride=stride,
bias=bias and not activate,
)
)
if activate:
if bias:
layers.append(FusedLeakyReLU(out_channel))
else:
layers.append(ScaledLeakyReLU(0.2))
super().__init__(*layers)
class ResBlock(nn.Module):
def __init__(self, in_channel, out_channel, blur_kernel=[1, 3, 3, 1]):
super().__init__()
self.conv1 = ConvLayer(in_channel, in_channel, 3)
self.conv2 = ConvLayer(in_channel, out_channel, 3, downsample=True)
self.skip = ConvLayer(
in_channel, out_channel, 1, downsample=True, activate=False, bias=False
)
def forward(self, input):
out = self.conv1(input)
out = self.conv2(out)
skip = self.skip(input)
out = (out + skip) / math.sqrt(2)
return out
class Discriminator(nn.Module):
def __init__(self, size, channel_multiplier=2, blur_kernel=[1, 3, 3, 1]):
super().__init__()
channels = {
4: 512,
8: 512,
16: 512,
32: 512,
64: 256 * channel_multiplier,
128: 128 * channel_multiplier,
256: 64 * channel_multiplier,
512: 32 * channel_multiplier,
1024: 16 * channel_multiplier,
}
convs = [ConvLayer(3, channels[size], 1)]
log_size = int(math.log(size, 2))
in_channel = channels[size]
for i in range(log_size, 2, -1):
out_channel = channels[2 ** (i - 1)]
convs.append(ResBlock(in_channel, out_channel, blur_kernel))
in_channel = out_channel
self.convs = nn.Sequential(*convs)
self.stddev_group = 4
self.stddev_feat = 1
self.final_conv = ConvLayer(in_channel + 1, channels[4], 3)
self.final_linear = nn.Sequential(
EqualLinear(channels[4] * 4 * 4, channels[4], activation='fused_lrelu'),
EqualLinear(channels[4], 1),
)
def forward(self, input):
out = self.convs(input)
batch, channel, height, width = out.shape
group = min(batch, self.stddev_group)
stddev = out.view(
group, -1, self.stddev_feat, channel // self.stddev_feat, height, width
)
stddev = torch.sqrt(stddev.var(0, unbiased=False) + 1e-8)
stddev = stddev.mean([2, 3, 4], keepdims=True).squeeze(2)
stddev = stddev.repeat(group, 1, height, width)
out = torch.cat([out, stddev], 1)
out = self.final_conv(out)
out = out.view(batch, -1)
out = self.final_linear(out)
return out
| 28.534125 | 101 | 0.521215 |
992d996a485de94ad55305552e42c7fbc92ec64b | 3,327 | py | Python | tensorflow/contrib/data/python/kernel_tests/serialization/serialization_integration_test.py | tianyapiaozi/tensorflow | fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a | [
"Apache-2.0"
] | 71 | 2017-05-25T16:02:15.000Z | 2021-06-09T16:08:08.000Z | tensorflow/contrib/data/python/kernel_tests/serialization/serialization_integration_test.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 133 | 2017-04-26T16:49:49.000Z | 2019-10-15T11:39:26.000Z | tensorflow/contrib/data/python/kernel_tests/serialization/serialization_integration_test.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 26 | 2017-04-12T16:25:44.000Z | 2018-10-30T10:10:15.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Integration test for dataset serialization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.contrib.data.python.ops import iterator_ops as contrib_iterator_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
from tensorflow.python.training import saver as saver_lib
class SerializationIntegrationTest(test.TestCase):
def _build_input_pipeline(self, name, num_outputs):
with ops.name_scope(name):
ds = dataset_ops.Dataset.range(num_outputs).shuffle(
10, reshuffle_each_iteration=False).prefetch(10)
iterator = ds.make_initializable_iterator()
saveable = contrib_iterator_ops.make_saveable_from_iterator(iterator)
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable)
return iterator.initializer, iterator.get_next()
def _build_graph(self, num_pipelines, num_outputs):
init_ops = []
get_next_ops = []
for i in range(num_pipelines):
name = "input_pipeline_%d" % i
init_op, get_next_op = self._build_input_pipeline(name, num_outputs)
init_ops.append(init_op)
get_next_ops.append(get_next_op)
saver = saver_lib.Saver()
return init_ops, get_next_ops, saver
def _ckpt_path(self):
return os.path.join(self.get_temp_dir(), "iterator")
def testConcurrentSaves(self):
num_pipelines = 100
num_outputs = 100
break_point = 10
all_outputs = [[] for _ in range(num_pipelines)]
with ops.Graph().as_default() as g:
init_ops, get_next_ops, saver = self._build_graph(num_pipelines,
num_outputs)
with self.test_session(graph=g) as sess:
sess.run(init_ops)
for _ in range(break_point):
output = sess.run(get_next_ops)
for i in range(num_pipelines):
all_outputs[i].append(output[i])
saver.save(sess, self._ckpt_path())
with ops.Graph().as_default() as g:
init_ops, get_next_ops, saver = self._build_graph(num_pipelines,
num_outputs)
with self.test_session(graph=g) as sess:
saver.restore(sess, self._ckpt_path())
for _ in range(num_outputs - break_point):
output = sess.run(get_next_ops)
for i in range(num_pipelines):
all_outputs[i].append(output[i])
for output in all_outputs:
self.assertSequenceEqual(sorted(output), range(num_outputs))
if __name__ == "__main__":
test.main()
| 38.686047 | 83 | 0.687707 |
7c5134c016677603c817258e844c77c9c00bcdd3 | 10,942 | py | Python | nodes/matlab_controller_node.py | icoderaven/slytherin_dagger | 9a12cd92c9f884c25764139f3efdb8535484cc29 | [
"BSD-3-Clause"
] | null | null | null | nodes/matlab_controller_node.py | icoderaven/slytherin_dagger | 9a12cd92c9f884c25764139f3efdb8535484cc29 | [
"BSD-3-Clause"
] | null | null | null | nodes/matlab_controller_node.py | icoderaven/slytherin_dagger | 9a12cd92c9f884c25764139f3efdb8535484cc29 | [
"BSD-3-Clause"
] | 1 | 2018-11-22T00:47:23.000Z | 2018-11-22T00:47:23.000Z | #!/usr/bin/env python
import roslib
roslib.load_manifest('slytherin_dagger')
import rospy
import numpy as np
import random as rnd
import subprocess
import shlex
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import src.linear_predictor as predictor
import src.visual_features as feature
from std_msgs.msg import Empty
from std_msgs.msg import Float32MultiArray
from geometry_msgs.msg import Twist
class Controller:
# ======== code for initializing the Controller ========
# ----------------------------------------------------------------------
# constructor
# ----------------------------------------------------------------------
def __init__(self):
rospy.loginfo("[DAgger] Initializing controller")
#load parameters from parameter server
self.load_params()
#init member variables to store sensor data
self.init_variables()
#load predictor
if self.load_pred:
rospy.loginfo("[DAgger] Loading Predictor")
self.pred_yaw = predictor.load(self.predy_file)
self.pred_pitch = predictor.load(self.predp_file)
rospy.loginfo("[DAgger] Predictor Loaded")
#init publisher for sending velocity commands
self.init_publishers()
#subscribe to topics for reading sensor data
self.init_subscribers()
#init timer for updating controls
rospy.loginfo("[DAgger] Starting Control Loop")
#rospy.Timer(rospy.Duration(1.0 / self.ctrl_rate_hz), self.update_control)
#----------------------------------------------------------------------
#desconstructor
#----------------------------------------------------------------------
def __del__(self):
#stop recording bag file if we were
if self.record_proc_started:
self.record_proc.send_signal(subprocess.signal.SIGINT)
self.record_proc.wait()
#----------------------------------------------------------------------
#init member variables
#----------------------------------------------------------------------
def init_variables(self):
rospy.logdebug("[DAgger] Initializing Variables")
self.is_auto = False
self.record_proc_started = False
self.last_vis_feat = np.array([])
self.last_joy_vel = Twist()
self.last_yaw = 0.0
self.last_pitch = 0.0
self.last_state = np.array([])
#dt = 1.0 / float(self.ctrl_rate_hz)
#----------------------------------------------------------------------
#load parameter from parameter server
#----------------------------------------------------------------------
def load_params(self):
rospy.logdebug("[DAgger] Loading Params")
self.load_pred = rospy.get_param('load_pred', default=False)
self.predy_file = rospy.get_param('predy_file')
self.predp_file = rospy.get_param('predp_file')
self.ctrl_rate_hz = rospy.get_param('ctrl_rate_hz')
self.expert_prob = rospy.get_param('expert_prob')
self.pitch_gain = rospy.get_param('ang_y_gain')
self.yaw_gain = rospy.get_param('ang_x_gain')
self.do_record = rospy.get_param('do_record')
self.record_dir = rospy.get_param('record_dir')
self.pub_cmd_vel = rospy.get_param('pub_cmd_vel', default='sim_cmd_vel')
self.pub_record = rospy.get_param('pub_record')
self.pub_joy_vel = rospy.get_param('pub_joy_vel')
self.pub_joy_start = rospy.get_param('pub_joy_start')
self.pub_joy_stop = rospy.get_param('pub_joy_stop')
self.pub_vis_feat = rospy.get_param('pub_vis_feat')
self.pub_state = rospy.get_param('pub_state', default='pose_info')
#----------------------------------------------------------------------
#subscribe callbacks to sensor data topics
#----------------------------------------------------------------------
def init_subscribers(self):
rospy.logdebug("[DAgger] Initializing Subscribers")
rospy.Subscriber(self.pub_joy_vel, Twist, self.joy_vel_update)
rospy.Subscriber(self.pub_joy_start, Empty, self.joy_start_update)
rospy.Subscriber(self.pub_joy_stop, Empty, self.joy_stop_update)
rospy.Subscriber(self.pub_vis_feat, Float32MultiArray, self.vis_feat_update)
rospy.Subscriber(self.pub_state, Float32MultiArray, self.state_update)
#----------------------------------------------------------------------
#initialize publisher to send velocity commands to quadrotor
#----------------------------------------------------------------------
def init_publishers(self):
rospy.logdebug("[DAgger] Initializing Publishers")
self.cmd_vel_publisher = rospy.Publisher(self.pub_cmd_vel, Twist,queue_size=10)
self.record_publisher = rospy.Publisher(self.pub_record, Float32MultiArray,queue_size=10)
#======== code for sensor data subscriber callback ========
#----------------------------------------------------------------------
#callback for visual feature update
#----------------------------------------------------------------------
def vis_feat_update(self, features):
rospy.logdebug("[DAgger] Received Visual Feature Update: %s", np.array(features.data, dtype=np.float32))
#create numpy array with visual features
self.last_vis_feat = np.array(features.data, dtype=np.float32)
def state_update(self, state_msg):
rospy.logdebug("[DAgger] Received state Update: %s", np.array(state_msg.data, dtype=np.float32))
#create numpy array with visual features
self.last_state = np.array(state_msg.data, dtype=np.float32)
#----------------------------------------------------------------------
#callback for joystick velocity update
#----------------------------------------------------------------------
def joy_vel_update(self, vel):
rospy.logdebug("[DAgger] Received Joystick Velocity Update: %s", vel)
#store last velocity command from expert
self.last_joy_vel = vel
self.update_control()
#----------------------------------------------------------------------
#callback for start update
#----------------------------------------------------------------------
def joy_start_update(self, empty): #start issued from joystick
rospy.loginfo("[DAgger] Start Detected")
self.is_auto = True
self.last_yaw = 0.0
self.last_pitch = 0.0
if self.do_record:
self.record_proc = subprocess.Popen(shlex.split("rosbag record -a"), cwd=self.record_dir)
self.record_proc_started = True
#----------------------------------------------------------------------
#callback for stop update
#----------------------------------------------------------------------
def joy_stop_update(self, empty): #stop issued from joystick
rospy.loginfo("[DAgger] Stop Detected")
self.is_auto = False
if self.record_proc_started:
self.record_proc.send_signal(subprocess.signal.SIGINT)
#======== code for computing and sending controls to snake ========
#----------------------------------------------------------------------
#construct control msg to send to snake from output prediction
#----------------------------------------------------------------------
def construct_control_msg(self, pred_yaw, pred_pitch):
ctrl = Twist()
ctrl.linear.x = pred_yaw
ctrl.linear.y = pred_pitch
ctrl.linear.z = 0.0
ctrl.angular.x = 0.0
ctrl.angular.y = 0.0
ctrl.angular.z = 0.0
return ctrl
#----------------------------------------------------------------------
#computes new control command when in autonomous mode
#----------------------------------------------------------------------
def update_control_auto(self):
#dt = 0
#if not (event.last_real is None):
# dt = event.current_real.to_time() - event.last_real.to_time()
################################### construct feature array
feat_array = self.last_vis_feat ### ..............find features..........
state = self.last_state
##########################################################
expert_yaw = self.last_joy_vel.linear.x * self.yaw_gain
expert_pitch = self.last_joy_vel.linear.y * self.pitch_gain
pred_yaw = expert_yaw
pred_pitch = expert_pitch
#randomly pick whether we currently execute expert control or predictor control
if self.load_pred and rnd.random() >= self.expert_prob:
pred_yaw = self.pred_yaw.predict(feat_array)
pred_pitch = self.pred_pitch.predict(feat_array)
rospy.loginfo("[DAgger] predicted yaw: %f", pred_yaw)
rospy.loginfo("[DAgger] expert yaw: %f", expert_yaw)
rospy.loginfo("[DAgger] predicted pitch: %f", pred_pitch)
rospy.loginfo("[DAgger] expert pitch: %f", expert_pitch)
#record current datapoint for learning
self.record(feat_array, expert_yaw, expert_pitch) ############## RECORDING FEATURES AND ACTION
#send control message
ctrl_msg = self.construct_control_msg(pred_yaw, pred_pitch)
self.send_control_msg(ctrl_msg)
self.last_yaw = pred_yaw
self.last_pitch = pred_pitch
#----------------------------------------------------------------------
#callback function that compute and send control to snake using
#latest sensor data
#----------------------------------------------------------------------
def update_control(self):
if self.is_auto:
rospy.loginfo("[DAgger] Auto running")
self.update_control_auto()
else:
#pilot in control
rospy.loginfo("[DAgger] Pilot in control")
self.send_control_msg(self.last_joy_vel)
#----------------------------------------------------------------------
#send control to snake
#----------------------------------------------------------------------
def send_control_msg(self, ctrl_msg):
rospy.logdebug("[DAgger] Sending control: %s", ctrl_msg)
self.cmd_vel_publisher.publish(ctrl_msg)
#======== code for recording data for future training ========
#----------------------------------------------------------------------
#record current feature vector with target yaw and pitch in record topic
#----------------------------------------------------------------------
def record(self, feat_array, pred_yaw, pred_pitch):
if self.do_record:
ar = np.append(feat_array,np.array([pred_yaw]), np.array([pred_pitch]))
self.record_publisher.publish(None, ar)
if __name__ == '__main__':
rospy.init_node('matlab_controller_node', log_level=rospy.DEBUG)
ctrler = Controller()
rospy.spin()
| 43.593625 | 112 | 0.527235 |
4679751404b7d8908d666b7246c20e1745117ecf | 6,824 | py | Python | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reexecution.py | basilvetas/dagster | b08f5534a0b0277dab38cb7b6a46d324e94b8940 | [
"Apache-2.0"
] | 2 | 2021-06-21T17:50:26.000Z | 2021-06-21T19:14:23.000Z | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reexecution.py | basilvetas/dagster | b08f5534a0b0277dab38cb7b6a46d324e94b8940 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reexecution.py | basilvetas/dagster | b08f5534a0b0277dab38cb7b6a46d324e94b8940 | [
"Apache-2.0"
] | 1 | 2021-08-18T17:21:57.000Z | 2021-08-18T17:21:57.000Z | from dagster.core.utils import make_new_run_id
from dagster_graphql.client.query import (
LAUNCH_PIPELINE_EXECUTION_MUTATION,
LAUNCH_PIPELINE_REEXECUTION_MUTATION,
)
from dagster_graphql.test.utils import execute_dagster_graphql, infer_pipeline_selector
from .graphql_context_test_suite import ExecutingGraphQLContextTestMatrix
from .setup import csv_hello_world_solids_config, csv_hello_world_solids_config_fs_storage
RUN_QUERY = """
query RunQuery($runId: ID!) {
pipelineRunOrError(runId: $runId) {
__typename
... on PipelineRun {
status
}
}
}
"""
class TestReexecution(ExecutingGraphQLContextTestMatrix):
def test_full_pipeline_reexecution_fs_storage(self, graphql_context, snapshot):
selector = infer_pipeline_selector(graphql_context, "csv_hello_world")
run_id = make_new_run_id()
result_one = execute_dagster_graphql(
graphql_context,
LAUNCH_PIPELINE_EXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": csv_hello_world_solids_config_fs_storage(),
"executionMetadata": {"runId": run_id},
"mode": "default",
}
},
)
assert (
result_one.data["launchPipelineExecution"]["__typename"] == "LaunchPipelineRunSuccess"
)
result_one.data["launchPipelineExecution"]["run"]["runId"] = "<runId dummy value>"
result_one.data["launchPipelineExecution"]["run"][
"runConfigYaml"
] = "<runConfigYaml dummy value>"
snapshot.assert_match(result_one.data)
# reexecution
new_run_id = make_new_run_id()
result_two = execute_dagster_graphql(
graphql_context,
LAUNCH_PIPELINE_REEXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": csv_hello_world_solids_config_fs_storage(),
"executionMetadata": {
"runId": new_run_id,
"rootRunId": run_id,
"parentRunId": run_id,
},
"mode": "default",
}
},
)
query_result = result_two.data["launchPipelineReexecution"]
assert query_result["__typename"] == "LaunchPipelineRunSuccess"
assert query_result["run"]["rootRunId"] == run_id
assert query_result["run"]["parentRunId"] == run_id
def test_full_pipeline_reexecution_in_memory_storage(self, graphql_context, snapshot):
run_id = make_new_run_id()
selector = infer_pipeline_selector(graphql_context, "csv_hello_world")
result_one = execute_dagster_graphql(
graphql_context,
LAUNCH_PIPELINE_EXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": csv_hello_world_solids_config(),
"executionMetadata": {"runId": run_id},
"mode": "default",
}
},
)
assert (
result_one.data["launchPipelineExecution"]["__typename"] == "LaunchPipelineRunSuccess"
)
result_one.data["launchPipelineExecution"]["run"]["runId"] = "<runId dummy value>"
result_one.data["launchPipelineExecution"]["run"][
"runConfigYaml"
] = "<runConfigYaml dummy value>"
snapshot.assert_match(result_one.data)
# reexecution
new_run_id = make_new_run_id()
result_two = execute_dagster_graphql(
graphql_context,
LAUNCH_PIPELINE_REEXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": csv_hello_world_solids_config(),
"executionMetadata": {
"runId": new_run_id,
"rootRunId": run_id,
"parentRunId": run_id,
},
"mode": "default",
}
},
)
query_result = result_two.data["launchPipelineReexecution"]
assert query_result["__typename"] == "LaunchPipelineRunSuccess"
assert query_result["run"]["rootRunId"] == run_id
assert query_result["run"]["parentRunId"] == run_id
def test_pipeline_reexecution_successful_launch(self, graphql_context):
selector = infer_pipeline_selector(graphql_context, "no_config_pipeline")
run_id = make_new_run_id()
result = execute_dagster_graphql(
context=graphql_context,
query=LAUNCH_PIPELINE_EXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": {"storage": {"filesystem": {}}},
"executionMetadata": {"runId": run_id},
"mode": "default",
}
},
)
assert result.data["launchPipelineExecution"]["__typename"] == "LaunchPipelineRunSuccess"
assert result.data["launchPipelineExecution"]["run"]["status"] == "NOT_STARTED"
graphql_context.instance.run_launcher.join()
result = execute_dagster_graphql(
context=graphql_context, query=RUN_QUERY, variables={"runId": run_id}
)
assert result.data["pipelineRunOrError"]["__typename"] == "PipelineRun"
assert result.data["pipelineRunOrError"]["status"] == "SUCCESS"
# reexecution
new_run_id = make_new_run_id()
result = execute_dagster_graphql(
context=graphql_context,
query=LAUNCH_PIPELINE_REEXECUTION_MUTATION,
variables={
"executionParams": {
"selector": selector,
"runConfigData": {"storage": {"filesystem": {}}},
"executionMetadata": {
"runId": new_run_id,
"rootRunId": run_id,
"parentRunId": run_id,
},
"mode": "default",
}
},
)
assert result.data["launchPipelineReexecution"]["__typename"] == "LaunchPipelineRunSuccess"
graphql_context.instance.run_launcher.join()
result = execute_dagster_graphql(
context=graphql_context, query=RUN_QUERY, variables={"runId": new_run_id}
)
assert result.data["pipelineRunOrError"]["__typename"] == "PipelineRun"
assert result.data["pipelineRunOrError"]["status"] == "SUCCESS"
| 37.494505 | 99 | 0.5784 |
296145f8d3d22876ed73277e901cd3478f7e5931 | 9,139 | py | Python | utils/image_blending.py | ishitsuka-hikaru/data-augmentation-for-coco | b7d18f7ebc02102d2037835937ce2c74ad923091 | [
"MIT"
] | null | null | null | utils/image_blending.py | ishitsuka-hikaru/data-augmentation-for-coco | b7d18f7ebc02102d2037835937ce2c74ad923091 | [
"MIT"
] | null | null | null | utils/image_blending.py | ishitsuka-hikaru/data-augmentation-for-coco | b7d18f7ebc02102d2037835937ce2c74ad923091 | [
"MIT"
] | null | null | null | import cv2
import glob
import matplotlib.pyplot as plt
import numpy as np
import os
import random
def rescale_anns(anns, s, t):
for ann in anns:
ann['bbox'] = [_*s for _ in ann['bbox']]
ann['bbox'][0] += t[0]
ann['bbox'][1] += t[1]
ann['area'] *= s*s
for i, segms in enumerate(ann['segmentation']):
_segms = []
for j, seg in enumerate(segms):
if j % 2 == 0:
_segms.append(seg*s + t[0])
else:
_segms.append(seg*s + t[1])
ann['segmentation'][i] = _segms
return anns
class ImageBlending:
def __init__(self, fg, fg_mask, bg, bg_mask=None, seed=None):
self.fg = self.load_image(fg)
self.fg_mask = self.load_mask(fg_mask)
self.bg = self.load_image(bg)
self.bg_mask = bg_mask if bg_mask is None else self.load_mask(bg_mask)
self.seed = seed
self.t = np.array([0, 0])
self.random_seed(seed)
self.scale = 1
self.resize()
def random_seed(self, seed):
random.seed(seed)
np.random.seed(seed)
def get_fg(self):
return np.clip(self.fg*255, 0, 255).astype('uint8')
def get_fg_mask(self):
return np.clip(self.fg_mask[:, :, 0], 0, 1).astype('uint8')
def get_bg(self):
return np.clip(self.bg*255, 0, 255).astype('uint8')
def get_bg_mask(self):
return np.clip(self.bg_mask[:, :, 0], 0, 1).astype('uint8')
def get_occlusion(self):
return self.calc_occlusion(self.fg_mask, self.bg_mask)
def load_image(self, image):
img = plt.imread(image) if isinstance(image, str) else image
return np.clip(img/255, 0, 1).astype(np.float64)
def load_mask(self, mask):
msk = np.load(mask) if isinstance(mask, str) else mask
return np.stack([msk, msk, msk], axis=2).astype(np.float64)
def translation(self, image, t):
h, w = image.shape[:2]
m = np.float32([[1, 0, t[0]], [0, 1, t[1]]])
return cv2.warpAffine(image, m, (w, h))
def rotation(self, _in, rot):
pass
def _bbox(self, mask):
"""
:param mask (ndarray, shape (H, W, 3), dtype uint8)
:return: xmin, ymin, xmax, ymax (tuple, int)
"""
rows = np.any(mask, axis=1)
cols = np.any(mask, axis=0)
ymin, ymax = np.where(rows)[0][[0, -1]]
xmin, xmax = np.where(cols)[0][[0, -1]]
return xmin, ymin, xmax, ymax
def _close(self, xmin, ymin, xmax, ymax, x, y, fg_depth, bg_depth):
dep1 = fg_depth * self.fg_mask
dep1 = dep1[ymin:ymax, xmin:xmax]
for ty in np.arange(-ymin, y-ymax):
for tx in np.arange(-xmin, x-xmax):
t = np.array([tx, ty])
dep2 = bg_depth * self.translation(self.fg_mask, t)
dep2 = dep2[ymin+ty:ymax+ty, xmin+tx:xmax+tx]
d = self._distance(dep1, dep2)
if not 'd_tmp' in locals() or d < d_tmp:
t_tmp = t
d_tmp = d
return t_tmp
def _occlusion(self, xmin, ymin, xmax, ymax, x, y, occ_vals=None):
for ty in np.arange(-ymin, y-ymax):
for tx in np.arange(-xmin, x-xmax):
t = np.array([tx, ty])
o = self.calc_occlusion(self.translation(self.fg_mask, t), self.bg_mask)
if occ_vals[0] < o < occ_vals[1]:
return t
return None
def calc_occlusion(self, fg_mask, bg_mask):
tmp = np.zeros(fg_mask.shape, fg_mask.dtype)
tmp[fg_mask+bg_mask == 2] = 1
return np.sum(tmp) / np.sum(bg_mask)
def paste(self, method='random', fg_depth=None, bg_depth=None, occ_vals=None):
"""
:param method (str) : ['random', 'close', 'occlusion']
"""
xmin, ymin, xmax, ymax = self._bbox(self.fg_mask)
y, x = self.fg_mask.shape[:2]
if method == 'random':
tx = np.random.randint(-xmin, x-xmax)
ty = np.random.randint(-ymin, y-ymax)
elif method == 'close':
tx, ty = self._close(xmin, ymin, xmax, ymax, x, y, fg_depth, bg_depth)
elif method == 'occlusion':
t = self._occlusion(xmin, ymin, xmax, ymax, x, y, occ_vals)
assert t is not None, f'occlution not fall into the range of {occ_vals}'
tx, ty = t
else:
assert False, f'invalid method: {method}'
self.t = np.array([tx, ty])
self.fg = self.translation(self.fg, self.t)
self.fg_mask = self.translation(self.fg_mask, self.t)
def _distance(self, a, b):
return np.sum(np.abs(a - b))
def resize(self):
self.fg, self.scale = self._resize(self.fg, self.bg)
self.fg_mask, _ = self._resize(self.fg_mask, self.bg)
self.fg = self.fg.clip(0, 1)
self.fg_mask = self.fg_mask.clip(0, 1)
def _resize(self, _source, _target, interpolation=cv2.INTER_CUBIC):
src_h, src_w = _source.shape[:2]
tar_h, tar_w = _target.shape[:2]
scale_w = src_w / tar_w
scale_h = src_h / tar_h
scale = 1
if scale_w > 1 or scale_h > 1:
scale = 1 / scale_h if scale_w <= scale_h else 1 / scale_w
src = cv2.resize(
_source, dsize=None, fx=scale, fy=scale,
interpolation=interpolation)
else:
src = _source
ret = np.zeros(_target.shape, dtype=_source.dtype)
h, w = src.shape[:2]
ret[:h, :w] = src
return ret, scale
def inverse(self, mask):
return -1 * (mask-1)
def _gray_world(self, _source, _mask=None, g_vals=[0.5, 0.5, 0.5]):
ret = _source.copy()
if _mask is not None:
ret *= _mask
ret[ret == 0] = np.nan
for i in range(ret.shape[2]):
ret[:, :, i] *= g_vals[i] / np.nanmean(ret[:, :, i])
if _mask is not None:
ret = np.nan_to_num(ret)
ret += self.inverse(_mask) * _source
return ret.clip(0, 1).astype(np.float64)
def copy_and_paste(self, gray_world=None):
if gray_world == 'GRAY_ALL':
fg = self._gray_world(self.fg)
elif gray_world == 'GRAY_MASK':
fg = self._gray_world(self.fg, self.fg_mask)
elif gray_world == 'GRAY_TARGET':
g_vals = [np.mean(self.bg[:, :, i]) for i in range(self.bg.shape[2])]
fg = self._gray_world(self.fg, g_vals=g_vals)
else:
fg = self.fg
fg *= self.fg_mask
bg = self.bg * self.inverse(self.fg_mask)
ret = np.clip((fg+bg)*255, 0, 255).astype('uint8')
return ret
def gaussian_blend(self, ksize=7, gray_world=None):
mask = cv2.GaussianBlur(self.fg_mask, (ksize, ksize), 0)
if gray_world == 'GRAY_ALL':
fg = self._gray_world(self.fg)
elif gray_world == 'GRAY_MASK':
fg = self._gray_world(self.fg, mask)
elif gray_world == 'GRAY_TARGET':
g_vals = [np.mean(self.bg[:, :, i]) for i in range(self.bg.shape[2])]
fg = self._gray_world(self.fg, g_vals=g_vals)
else:
fg = self.fg
# fg *= mask
fg = fg * mask
bg = self.bg * self.inverse(mask)
ret = np.clip((fg+bg)*255, 0, 255).astype('uint8')
return ret
def poisson_blend(self, ksize=11, iterations=1, gray_world=None):
if gray_world == 'GRAY_ALL':
fg = self._gray_world(self.fg)
elif gray_world == 'GRAY_MASK':
fg = self._gray_world(self.fg, self.fg_mask)
elif gray_world == 'GRAY_TARGET':
g_vals = [np.mean(self.bg[:, :, i]) for i in range(self.bg.shape[2])]
fg = self._gray_world(self.fg, g_vals=g_vals)
else:
fg = self.fg
fg = np.clip(fg*255, 0, 255).astype('uint8')
msk = np.clip(self.get_fg_mask()*255, 0, 255)
bg = self.get_bg()
# avoid error
margin = fg.shape[0] // 2 if fg.shape[0] > fg.shape[1] else fg.shape[1] // 2
top, bottom, left, right = margin, margin, margin, margin
fg = cv2.copyMakeBorder(fg, top, bottom, left, right, cv2.BORDER_CONSTANT, value=[0, 0, 0])
msk = cv2.copyMakeBorder(msk, top, bottom, left, right, cv2.BORDER_CONSTANT, value=0)
bg = cv2.copyMakeBorder(bg, top, bottom, left, right, cv2.BORDER_CONSTANT, value=[0, 0, 0])
def _center(mask):
rows = np.any(mask, axis=1)
cols = np.any(mask, axis=0)
ymin, ymax = np.where(rows)[0][[0, -1]]
xmin, xmax = np.where(cols)[0][[0, -1]]
return int(xmin+(xmax-xmin)*0.5), int(ymin+(ymax-ymin)*0.5)
p = _center(msk)
if ksize > 0:
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (ksize, ksize))
msk = cv2.dilate(msk, kernel, iterations=iterations)
ret = cv2.seamlessClone(fg, bg, msk, p=p, flags=cv2.NORMAL_CLONE)
ret = ret[top:-bottom, left:-right]
return ret
| 33.973978 | 99 | 0.543714 |
ddb560bd2fd931823f4cd34fc9e8a7abe1794d5f | 9,749 | py | Python | threathunter_common_python/threathunter_common/ip_risk_repo_dao.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | 2 | 2019-03-17T04:03:08.000Z | 2019-05-01T09:42:23.000Z | threathunter_common_python/threathunter_common/ip_risk_repo_dao.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | null | null | null | threathunter_common_python/threathunter_common/ip_risk_repo_dao.py | threathunterX/python_lib | e2d4052de04c82cb7bccd08042f28db824cab442 | [
"Apache-2.0"
] | 4 | 2019-06-24T05:47:24.000Z | 2020-09-29T05:00:31.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import datetime,json,datetime,time,re,ipcalc,six
from .SSDB import SSDB
from .util import curr_timestamp
from .ip_opt import Ip
__author__ = "nebula"
#share
class IpRiskRepoDAO:
def __init__(self,ssdb_port,ssdb_host):
self.ssdb = SSDB(host=ssdb_host, port=ssdb_port)
def get_ip_section(self,ip):
ip_section_info = {}
try:
org_ip = Ip.ip2int(ip) if isinstance(ip, six.string_types) else ip
ip = org_ip*100+99
ret = self.ssdb.request('zrscan',['ip_section','',ip,0,1]).data
if ret["index"]:
ip_section_key = ret["index"][0]
ip_net_with_mask = ret['items'][ip_section_key]
ip_section = ip_net_with_mask/100
mask = int(str(ip_net_with_mask)[-2:])
cin = ipcalc.Network(ip_section,mask=mask)
if cin.in_network(org_ip):
info = self.ssdb.request('get', [ip_section_key]).data
if info:
ip_section_info = json.loads(info)
except Exception,e:
pass
return ip_section_info
def update_ip_section(self,ip_section,mask,values):
"""
@param ip_section:ip地址 1.1.1.1
@param mask: cidr 24 16 8 ...
"""
try:
if values:
ip_section = Ip.ip2int(ip_section)
mask = int(mask)
net = "t%s"%(str(ip_section*100+mask))
values["mask"] = mask
content = self.ssdb.request('get', [net]).data
if content:
new_values = json.loads(content)
new_values.update(values)
self.ssdb.request('set',[net,json.dumps(new_values)])
return True
else:
values["ip_section"] = ip_section
self._insert_ip_section(values)
return True
except Exception,e:
print e
return None
def _insert_ip_section(self,values):
raw_values = {
"is_vpn":None,
"is_proxy":None,
"is_crawler":None,
"is_brute_force":None,
"is_web_server":None,
"is_mail_server":None,
"is_dns":None,
"is_org":None,
"is_seo":None,
"server_type":"",
"seo_type":"",
"city":"",
"province":"",
"isp":"",
"country":"",
"area":"",
"address":"",
"info":"",
"remark":"",
}
values["insert_time"] = int(time.time())
ip_section = values["ip_section"]
mask = values["mask"]
ip_section_score = ip_section*100+mask
ip_section_key = "t%s"%(ip_section_score)
values = self.values_prepare(values)
raw_values.update(values)
self.ssdb.request('set',[ip_section_key,json.dumps(raw_values)])
self.ssdb.request('zset',['ip_section',ip_section_key,ip_section_score])
# 查询可用的proxyip
def get_ip(self,ip,update_query_time=False,ip_section_search = False):
"""
@param update_query_time 是否进行检查时间戳更新
@param ip_section_search 是否搜索段信息
"""
ip_info = {}
try:
ip = Ip.ip2int(ip)
ret = self.ssdb.request('get', [ip]).data
if ret:
if update_query_time:
values ={"query_time":int(time.time())}
new_values = json.loads(ret)
new_values.update(values)
self.ssdb.request('set',[ip,json.dumps(new_values)])
ip_info = json.loads(ret)
ip_section_info = self.get_ip_section(ip)
if ip_section_search:
for k,v in ip_section_info.iteritems():
if v and k not in ['insert_time','mask']:
ip_info[k] = v
else:
if ip_section_search:
ip_section_info = self.get_ip_section(ip)
for k,v in ip_section_info.iteritems():
if v and k not in ['insert_time','mask']:
ip_info[k] = v
except Exception,e:
print e
return ip_info
def verify_mobile(self,mobile):
mobile_exp = re.compile("^0?(13[0-9]|15[012356789]|17[678]|18[0-9]|14[57])[0-9]{8}$")
mobile = str(mobile) if isinstance(mobile,int) else mobile
if mobile_exp.match(mobile):
return 'i'+mobile
else:
return None
def get_mobile(self,mobile,update_query_time=False):
mobile = self.verify_mobile(mobile)
if mobile:
ret = self.ssdb.request('get', [mobile]).data
if ret:
if update_query_time:
values ={"query_time":int(time.time())}
new_values = json.loads(ret)
new_values.update(values)
self.ssdb.request('set',[mobile,json.dumps(new_values)])
return json.loads(ret)
return None
def insert_mobile(self,values):
try:
ret = 0
mobile = values.get('mobile',None)
if mobile:
ret = self.update_mobile(mobile,values)
return ret
except Exception,e:
print e
return ret
def update_mobile(self,mobile,values):
try:
if values:
mobile = self.verify_mobile(mobile)
if mobile:
content = self.ssdb.request('get', [mobile]).data
if content:
values = self.values_prepare(values)
new_values = json.loads(content)
new_values.update(values)
new_values["update_time"] = curr_timestamp()
self.ssdb.request('set',[mobile,json.dumps(new_values)])
return 1
else:
values["mobile"] = mobile
self._insert_mobile(values)
return 2
except Exception,e:
print e
return 0
def _insert_mobile(self,values):
raw_values = {
"is_notreal":None, #非真实用户
"is_fraud":None,
"is_black_marked":None,#是否被用户标记为黑名单
"source_mark":"", #数据池来源 f02 阿里小号等
"is_crank_call":None #是否为骚扰电话
}
values["insert_time"] = int(time.time())
mobile = values["mobile"]
values = self.values_prepare(values)
raw_values.update(values)
self.ssdb.request('set',[mobile,json.dumps(raw_values)])
def update(self,ip,values):
try:
if values:
ip_ = Ip.ip2int(ip)
content = self.ssdb.request('get', [ip_]).data
if content:
values = self.values_prepare(values)
new_values = json.loads(content)
new_values.update(values)
new_values["update_time"] = curr_timestamp()
self.ssdb.request('set',[ip_,json.dumps(new_values)])
return 1
else:
values["ip"] = ip
self._insert(values)
return 2
except Exception,e:
print e
return 0
def insert(self,values):
try:
ret = 0
ip = values.get("ip",None)
if ip:
ret = self.update(ip,values)
return ret
except Exception,e:
print e
return ret
def _insert(self,values):
raw_values = {
"is_vpn":None,
"is_proxy":None,
"is_crawler":None,
"is_brute_force":None,
"is_web_server":None,
"is_mail_server":None,
"is_dns":None,
"is_org":None,
"is_seo":None,
"is_black_marked":None,#是否被用户标记为黑名单
"server_type":"",
"seo_type":"",
"city":"",
"province":"",
"isp":"",
"country":"",
"area":"",
"address":"", #地址
"info":"", #公司 组织出口等信息
"remark":"",#备注
"source_mark":"",#本条数据来源
}
values["insert_time"] = int(time.time())
ip = values["ip"]
ip = Ip.ip2int(ip)
values = self.values_prepare(values)
raw_values.update(values)
self.ssdb.request('set',[ip,json.dumps(raw_values)])
def values_prepare(self,values):
"""
数据预处理
"""
if values.has_key("ip"):
values.pop("ip")
if values.has_key("mobile"):
values.pop("mobile")
if values.has_key("ip_section"):
values.pop("ip_section")
if values.has_key("check_time") and isinstance(values["check_time"],datetime.datetime):
values["check_time"] = int(time.mktime(values["check_time"].timetuple()))
if values.has_key("insert_time") and isinstance(values["insert_time"],datetime.datetime):
values["insert_time"] = int(time.mktime(values["insert_time"].timetuple()))
return values
| 34.69395 | 97 | 0.487024 |
e66a7660d3b595476c42807ac5e4408e6f8dd7a1 | 3,491 | py | Python | evaluation-read.py | baillielab/maic | c0a91045554b1a81a4f715fbc4ef4dd8cadbba2c | [
"MIT"
] | 4 | 2020-01-15T16:17:40.000Z | 2022-02-14T10:38:56.000Z | evaluation-read.py | baillielab/maic | c0a91045554b1a81a4f715fbc4ef4dd8cadbba2c | [
"MIT"
] | 1 | 2020-02-07T15:20:41.000Z | 2020-02-13T11:38:31.000Z | evaluation-read.py | baillielab/maic | c0a91045554b1a81a4f715fbc4ef4dd8cadbba2c | [
"MIT"
] | null | null | null | # coding=utf-8
import json
import numpy as np
import string, sys, os, subprocess
import argparse
import pandas as pd
import seaborn as sns
import matplotlib.pylab as plt
#-----------------------------
scriptpath = os.path.dirname(os.path.realpath(__file__))
#-----------------------------
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory',\
default="../evaluation_results/e170662",
help='evaluation output directory')
args = parser.parse_args()
#-----------------------------
graphdir = '../'
exclusions = ["errors.txt"]
top = 10
#-----------------------------
r = list(string.ascii_uppercase)
r += ["{}{}".format(i,j) for i in r for j in r]
real_genes = r[:100]
def meanabsoluteerror(thislist):
# find location of all real genes and append to index list
index_list = []
for i in real_genes:
try:
val = thislist.index(i)
except:
# break when i is not in list
break
index_list.append(val)
# find absolute error from each real gene from where they should be located
z = 0
index_list2 = []
for i in index_list:
i = abs(i - z)
index_list2.append(i)
z += 1
return np.mean(index_list2)
def compensatedaverageoverlap(lista, listb=real_genes):
compa=1
compb=1
if len(listb)>len(lista):
compb = float(len(listb))/len(lista)
if len(lista)>len(listb):
compb = float(len(lista))/len(listb)
similarities = [float(len(set(lista[:int(i*compa)])&set(listb[:int(i*compb)])))/i for i in range(1,min(len(lista),len(listb)))]
return float(sum(similarities))/len(similarities)
def makeviolins(df, label):
df = df.reindex_axis(df.abs().max().sort_values().index, axis='columns')
sns.set(style="whitegrid")
#----------------------------
ax = sns.violinplot(data=df, cut=0)
fig = ax.get_figure()
fig.savefig('../graph_{}.png'.format(label))
#-----------------------------
wee = df[list(df)[:top]]
ax = sns.violinplot(data=wee, cut=0)
plt.xticks(rotation=90)
plt.tight_layout()
fig = ax.get_figure()
fig.savefig('../graph_top{}_{}.png'.format(top, label))
#-----------------------------
print (wee.describe())
#-----------------------------
resultfiles = [x for x in os.listdir(args.directory) if x not in exclusions]
#-----------------------------
dfc = pd.DataFrame({})
dfm = pd.DataFrame({})
for res in resultfiles:
caodic={}
maedic={}
with open(os.path.join(args.directory, res)) as f:
data = json.load(f)
resultdic = {}
for thisresult in data:
thisletter = thisresult['name']
s = thisresult['adjusted_scores']
for method in s.keys():
try:
resultdic[method]
except:
resultdic[method]={}
resultdic[method][thisletter]=s[method]
for method in resultdic:
d = resultdic[method]
outputlist = [k for k in sorted(d, key=d.get, reverse=True)]
cao = compensatedaverageoverlap(outputlist)
mae = meanabsoluteerror(outputlist)
caodic[method]=cao
maedic[method]=mae
caonew = pd.DataFrame(caodic, index=[res])
maenew = pd.DataFrame(maedic, index=[res])
dfc = pd.concat([dfc, caonew], sort=True)
dfm = pd.concat([dfm, caonew], sort=True)
#-----------------------------
makeviolins(dfc,"cao")
makeviolins(dfm,"mae")
| 31.169643 | 131 | 0.562017 |
6d8cfafbc425b16a20d0e11d634a5547d7df9f14 | 864 | py | Python | var/spack/repos/builtin/packages/rgb/package.py | robertmaynard/spack | 35d81a9006a5fa32a012a40874ac6cbdefd4a259 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2021-09-19T10:20:43.000Z | 2021-09-19T10:20:43.000Z | var/spack/repos/builtin/packages/rgb/package.py | robertmaynard/spack | 35d81a9006a5fa32a012a40874ac6cbdefd4a259 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2021-01-06T19:26:40.000Z | 2021-01-06T19:42:17.000Z | var/spack/repos/builtin/packages/rgb/package.py | shintaro-iwasaki/spack | 47998b3f4733c1264760c4a9744b1669661354b9 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Rgb(AutotoolsPackage, XorgPackage):
"""X color name database.
This package includes both the list mapping X color names to RGB values
(rgb.txt) and, if configured to use a database for color lookup, the
rgb program to convert the text file into the binary database format.
The "others" subdirectory contains some alternate color databases."""
homepage = "http://cgit.freedesktop.org/xorg/app/rgb"
xorg_mirror_path = "app/rgb-1.0.6.tar.gz"
version('1.0.6', sha256='cb998035e08b9f58ad3150cab60461c3225bdd075238cffc665e24da40718933')
depends_on('xorg-server')
depends_on('xproto', type='build')
| 33.230769 | 95 | 0.741898 |
fe5bece8c5187eb7a85cf59c0f624c5c3c4f61d0 | 883 | py | Python | Medium/exist.py | a-shah8/LeetCode | a654e478f51b2254f7b49055beba6b5675bc5223 | [
"MIT"
] | 1 | 2021-06-02T15:03:41.000Z | 2021-06-02T15:03:41.000Z | Medium/exist.py | a-shah8/LeetCode | a654e478f51b2254f7b49055beba6b5675bc5223 | [
"MIT"
] | null | null | null | Medium/exist.py | a-shah8/LeetCode | a654e478f51b2254f7b49055beba6b5675bc5223 | [
"MIT"
] | null | null | null | ## Using DFS with recursion
class Solution:
def exist(self, board: List[List[str]], word: str) -> bool:
def dfs(board, i, j, word):
if len(word)==0:
return True
if i<0 or i>=len(board) or j<0 or j>=len(board[0]) or word[0]!=board[i][j]:
return False
tmp = board[i][j]
board[i][j] = '#'
result = dfs(board, i+1, j, word[1:]) or dfs(board, i-1, j, word[1:]) \
or dfs(board, i, j+1, word[1:]) or dfs(board, i, j-1, word[1:])
board[i][j] = tmp
return result
if not board:
return False
for i in range(len(board)):
for j in range(len(board[0])):
if dfs(board, i, j, word):
return True
return False
| 26.757576 | 87 | 0.428086 |
dd684379433e31abd7ed6ab91522e37046a7a771 | 4,246 | py | Python | app.py | kethan1/full-stack-web-development-4 | 44ac5d22ae2f248add49b22e7b54e579adc03d9d | [
"MIT"
] | null | null | null | app.py | kethan1/full-stack-web-development-4 | 44ac5d22ae2f248add49b22e7b54e579adc03d9d | [
"MIT"
] | null | null | null | app.py | kethan1/full-stack-web-development-4 | 44ac5d22ae2f248add49b22e7b54e579adc03d9d | [
"MIT"
] | null | null | null | from flask import *
from flask_pymongo import PyMongo
from flask_moment import Moment
import datetime
from werkzeug.exceptions import HTTPException
app = Flask(__name__)
app.url_map.strict_slashes = False
app.config['MONGO_URI'] = "mongodb://localhost:27017/full-stack-web-development4"
app.secret_key = \
b"h\xa2\\xe\xdc\x82*\xffc<<vx\xa0\x84\xfe\xcd\xdd/?,\x8d\x89\xfd.T;\xb0\fdasdfa/sdfa/assdf" \
b"jwijiwjiejijeijfijifjidjofdijpoijdipjiojdiodijijzx2838 amr33j8j82j8j jj8jxae\x1a\x9f\\x`."
mongo = PyMongo(app)
moment = Moment(app)
@app.route('/', methods=['GET', 'POST'])
def register():
try:
if session['logged_in'] != {}:
flash('You Are Already Logged In')
return redirect('/home')
except:
pass
if request.method == 'GET':
return render_template('register.html', year=datetime.date.today().year)
elif request.method == 'POST':
if request.form['confirm_password'] == request.form['password']:
if mongo.db.users.find_one({'email': request.form['email']}) is None:
first_name = request.form['first_name']
last_name = request.form['last_name']
email = request.form['email']
password = request.form['password']
mongo.db.users.insert_one({
'first_name': first_name,
'last_name': last_name,
'email': email,
'password': password
})
session['logged_in'] = {
'first_name': first_name,
'last_name': last_name,
'email': email,
'logged_in_time': datetime.datetime.utcnow()
}
flash('Successfully Logged In')
return redirect('/home')
else:
flash('Account with That Email Already Exists')
else:
flash('Confirm Password Does Not Match Password')
return redirect('/')
@app.route('/login', methods=['GET', 'POST'])
def login():
try:
if session['logged_in'] != {}:
flash('You Are Already Logged In')
return redirect('/home')
except:
pass
if request.method == 'GET':
return render_template('login.html', year=datetime.date.today().year)
elif request.method == 'POST':
if mongo.db.users.find_one({'email': request.form['email'], 'password': request.form['password']}) is not None:
found = mongo.db.users.find_one(
{'email': request.form['email'], 'password': request.form['password']}
)
info = {
'first_name': found['first_name'],
'last_name': found['last_name'],
'email': request.form['email'],
'logged_in_time': datetime.datetime.utcnow()
}
session['logged_in'] = info
flash('Successfully Logged In')
return redirect('/home')
else:
if mongo.db.users.find_one({'email': request.form['email']}) is None:
flash('An Account with That Email Address Does Not Exist')
return redirect('/login')
else:
flash('Wrong Password')
return redirect('/login')
@app.route('/home')
def home():
try:
if session['logged_in'] == {}:
flash('You Not Already Logged In')
return redirect('/')
except:
flash('You Are Not Logged In')
return redirect('/')
time_diff = (datetime.datetime.utcnow() - session['logged_in']['logged_in_time']).seconds
return render_template('home.html', logged_in=session['logged_in'], time_diff=time_diff/86400, year=datetime.date.today().year)
@app.route('/logout')
def logout():
session['logged_in'] = {}
return redirect('/')
@app.errorhandler(HTTPException)
def page_not_found(e):
return render_template('page_not_found.html', error=e, year=datetime.date.today().year, title="Page Not Found")
app.add_template_global(datetime.datetime.utcnow, name='utcnow')
app.add_template_global(datetime.timedelta, name='timedelta')
if __name__ == '__main__':
app.run() | 35.983051 | 131 | 0.578898 |
b34c5fc9e0ba8923f44c005c6b8a0a4a1cbb79cc | 7,701 | py | Python | sdk/python/tests/compiler/component_builder_test.py | tomar27/pipelines | fc6a2761b3770cb3b854115b841c1a50876665c3 | [
"Apache-2.0"
] | 2,860 | 2018-05-24T04:55:01.000Z | 2022-03-31T13:49:56.000Z | sdk/python/tests/compiler/component_builder_test.py | tomar27/pipelines | fc6a2761b3770cb3b854115b841c1a50876665c3 | [
"Apache-2.0"
] | 7,331 | 2018-05-16T09:03:26.000Z | 2022-03-31T23:22:04.000Z | sdk/python/tests/compiler/component_builder_test.py | tomar27/pipelines | fc6a2761b3770cb3b854115b841c1a50876665c3 | [
"Apache-2.0"
] | 1,359 | 2018-05-15T11:05:41.000Z | 2022-03-31T09:42:09.000Z | # Copyright 2018 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.containers._component_builder import _generate_dockerfile, _dependency_to_requirements, VersionedDependency, DependencyHelper
import os
import unittest
class TestVersionedDependency(unittest.TestCase):
def test_version(self):
"""test version overrides min_version and max_version."""
version = VersionedDependency(
name='tensorflow',
version='0.3.0',
min_version='0.1.0',
max_version='0.4.0')
self.assertTrue(version.min_version == '0.3.0')
self.assertTrue(version.max_version == '0.3.0')
self.assertTrue(version.has_versions())
self.assertTrue(version.name == 'tensorflow')
def test_minmax_version(self):
"""test if min_version and max_version are configured when version is
not given."""
version = VersionedDependency(
name='tensorflow', min_version='0.1.0', max_version='0.4.0')
self.assertTrue(version.min_version == '0.1.0')
self.assertTrue(version.max_version == '0.4.0')
self.assertTrue(version.has_versions())
def test_min_or_max_version(self):
"""test if min_version and max_version are configured when version is
not given."""
version = VersionedDependency(name='tensorflow', min_version='0.1.0')
self.assertTrue(version.min_version == '0.1.0')
self.assertTrue(version.has_versions())
version = VersionedDependency(name='tensorflow', max_version='0.3.0')
self.assertTrue(version.max_version == '0.3.0')
self.assertTrue(version.has_versions())
def test_no_version(self):
"""test the no version scenario."""
version = VersionedDependency(name='tensorflow')
self.assertFalse(version.has_min_version())
self.assertFalse(version.has_max_version())
self.assertFalse(version.has_versions())
class TestDependencyHelper(unittest.TestCase):
def test_generate_requirement(self):
"""Test generating requirement file."""
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependency_helper = DependencyHelper()
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='tensorflow', min_version='0.10.0', max_version='0.11.0'))
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='kubernetes', min_version='0.6.0'))
dependency_helper.add_python_package(
dependency=VersionedDependency(name='pytorch', max_version='0.3.0'))
dependency_helper.generate_pip_requirements(temp_file)
golden_requirement_payload = '''\
tensorflow >= 0.10.0, <= 0.11.0
kubernetes >= 0.6.0
pytorch <= 0.3.0
'''
with open(temp_file, 'r') as f:
target_requirement_payload = f.read()
self.assertEqual(target_requirement_payload, golden_requirement_payload)
os.remove(temp_file)
def test_add_python_package(self):
"""Test add_python_package."""
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependency_helper = DependencyHelper()
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='tensorflow', min_version='0.10.0', max_version='0.11.0'))
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='kubernetes', min_version='0.6.0'))
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='tensorflow', min_version='0.12.0'),
override=True)
dependency_helper.add_python_package(
dependency=VersionedDependency(
name='kubernetes', min_version='0.8.0'),
override=False)
dependency_helper.add_python_package(
dependency=VersionedDependency(name='pytorch', version='0.3.0'))
dependency_helper.generate_pip_requirements(temp_file)
golden_requirement_payload = '''\
tensorflow >= 0.12.0
kubernetes >= 0.6.0
pytorch >= 0.3.0, <= 0.3.0
'''
with open(temp_file, 'r') as f:
target_requirement_payload = f.read()
self.assertEqual(target_requirement_payload, golden_requirement_payload)
os.remove(temp_file)
class TestGenerator(unittest.TestCase):
def test_generate_dockerfile(self):
"""Test generate dockerfile."""
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
target_dockerfile = os.path.join(test_data_dir,
'component.temp.dockerfile')
golden_dockerfile_payload_one = '''\
FROM gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0
RUN apt-get update -y && apt-get install --no-install-recommends -y -q python3 python3-pip python3-setuptools
ADD main.py /ml/main.py
'''
golden_dockerfile_payload_two = '''\
FROM gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0
RUN apt-get update -y && apt-get install --no-install-recommends -y -q python3 python3-pip python3-setuptools
ADD requirements.txt /ml/requirements.txt
RUN python3 -m pip install -r /ml/requirements.txt
ADD main.py /ml/main.py
'''
# check
_generate_dockerfile(
filename=target_dockerfile,
base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
add_files={'main.py': '/ml/main.py'})
with open(target_dockerfile, 'r') as f:
target_dockerfile_payload = f.read()
self.assertEqual(target_dockerfile_payload,
golden_dockerfile_payload_one)
_generate_dockerfile(
filename=target_dockerfile,
base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.10.0',
requirement_filename='requirements.txt',
add_files={'main.py': '/ml/main.py'})
with open(target_dockerfile, 'r') as f:
target_dockerfile_payload = f.read()
self.assertEqual(target_dockerfile_payload,
golden_dockerfile_payload_two)
# clean up
os.remove(target_dockerfile)
def test_generate_requirement(self):
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
temp_file = os.path.join(test_data_dir, 'test_requirements.tmp')
dependencies = [
VersionedDependency(
name='tensorflow', min_version='0.10.0', max_version='0.11.0'),
VersionedDependency(name='kubernetes', min_version='0.6.0'),
]
_dependency_to_requirements(dependencies, filename=temp_file)
golden_payload = '''\
tensorflow >= 0.10.0, <= 0.11.0
kubernetes >= 0.6.0
'''
with open(temp_file, 'r') as f:
target_payload = f.read()
self.assertEqual(target_payload, golden_payload)
os.remove(temp_file)
| 40.319372 | 134 | 0.662901 |
90255bd292a4394a1b517499b2e34ef5121681c4 | 1,682 | py | Python | azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr.py | CharaD7/azure-sdk-for-python | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | [
"MIT"
] | null | null | null | azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr.py | CharaD7/azure-sdk-for-python | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | [
"MIT"
] | null | null | null | azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr.py | CharaD7/azure-sdk-for-python | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .catalog_item import CatalogItem
class USqlAssemblyClr(CatalogItem):
"""A Data Lake Analytics catalog U-SQL assembly CLR item.
:param compute_account_name: the name of the Data Lake Analytics account.
:type compute_account_name: str
:param version: the version of the catalog item.
:type version: str
:param database_name: the name of the database.
:type database_name: str
:param name: the name of the assembly.
:type name: str
:param clr_name: the name of the CLR.
:type clr_name: str
"""
_attribute_map = {
'compute_account_name': {'key': 'computeAccountName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'name': {'key': 'assemblyClrName', 'type': 'str'},
'clr_name': {'key': 'clrName', 'type': 'str'},
}
def __init__(self, compute_account_name=None, version=None, database_name=None, name=None, clr_name=None):
super(USqlAssemblyClr, self).__init__(compute_account_name=compute_account_name, version=version)
self.database_name = database_name
self.name = name
self.clr_name = clr_name
| 39.116279 | 110 | 0.621284 |
f4996797a541881e2bc80668110134427090ce81 | 11,478 | py | Python | UI/PlatformWin.py | MaoGreenDou/ArtisticCloudBlog | bf0608864b0b91368f0dfd3bc593bbedaa7576ef | [
"MIT"
] | 1 | 2020-07-18T10:21:36.000Z | 2020-07-18T10:21:36.000Z | UI/PlatformWin.py | MaoGreenDou/ArtisticCloudBlog | bf0608864b0b91368f0dfd3bc593bbedaa7576ef | [
"MIT"
] | null | null | null | UI/PlatformWin.py | MaoGreenDou/ArtisticCloudBlog | bf0608864b0b91368f0dfd3bc593bbedaa7576ef | [
"MIT"
] | null | null | null | import sys
import UI.Ui_PlatformWin
import BLL.FileSystem
import BLL.ClientSocket
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QFileDialog, QMessageBox
import UI.TransferWin
import UI.CommentDialog
from PIL import Image
from UI.LoginWin import simpleMessageBox
# 用户界面类
class PlatformWin(QtWidgets.QMainWindow, UI.Ui_PlatformWin.Ui_PlatformWin):
def __init__(self, parent=None):
super(PlatformWin, self).__init__(parent)
# 调整初始窗口位置
screen = QtWidgets.QDesktopWidget().screenGeometry()
size = QtCore.QSize(1200, 800)
print(size)
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
# 窗口组件初始化
self.setupUi(self)
# 设置窗口名称
self.setWindowTitle("云艺术博客")
# 文件信息列表初始化
self.fileInfoList = []
# token初始化
self.token = ""
# 用户名初始化
self.account = ""
# 客户端套接字初始化
self.clientSocket = BLL.ClientSocket.ClientSocket()
# 动作与函数绑定
# 上传本地图片
self.upLoadButton.clicked.connect(self.upLoadWork)
# 查看指定作品
self.downLoadButton.clicked.connect(self.downLoadWork_search)
# 双击文件列表栏中列表项打开文件
self.listWidget.doubleClicked.connect(self.fileOpen)
# 点击迁移按钮打开迁移窗口
self.transferButton.clicked.connect(self.transfer)
# 点击浏览热门作品按钮
self.browseButton.clicked.connect(self.downLoadWork_browse)
# 点击查看我的作品按钮
self.myWorkButton.clicked.connect(self.downLoadWork_myWork)
# 点击评论按钮
self.commentButton.clicked.connect(self.comment)
# 点击点赞按钮
self.starButton.clicked.connect(self.star)
# 点击另存为按钮
self.saveAsButton.clicked.connect(self.fileSaveAs)
# 点击删除按钮
self.deleteButton.clicked.connect(self.fileDelete)
# 弹出的工作窗口
self.mainWin = None
# 点赞
def star(self):
# 调用客户端的评论函数发送评论
# 获取目前选中的列表项的行数
curRow = self.listWidget.currentRow()
# 获取作品id
iid = self.fileInfoList[curRow]['iid']
response = self.clientSocket.star(self.token, iid)
if not response:
simpleMessageBox('提示', '已点赞过该作品')
else:
simpleMessageBox('提示', '点赞成功,当前点赞数: ' + response)
# 评论
def comment(self):
# 显示评论这里需要用评论弹窗代替,暂时打印
print("comment:")
curRow = self.listWidget.currentRow()
commentList = self.fileInfoList[curRow]['comment']
# 获取作品id
iid = self.fileInfoList[curRow]['iid']
cDlg = UI.CommentDialog.CommentDialog(self.token, iid, commentList)
cDlg.exec_()
# 设置工作界面
def setMainWin(self, mainWin):
self.mainWin = mainWin
# 打开工作界面
def transfer(self):
self.mainWin.setUserInfo(self.token, self.account)
self.mainWin.show()
# 导出文件至本地
def fileSaveAs(self):
# 弹窗获取导出路径
filePath, fileType = QFileDialog.getSaveFileName(self.centralwidget, '选择导出路径',
self.fileSystem.traImgDirPath, "png (*.png);;gif (*.gif)")
# 若用户未选择则返回
if filePath == '':
return
# 获取目前选中的列表项的行数
curRow = self.listWidget.currentRow()
# 获取文件原地址
oldFilePath = self.fileInfoList[curRow]['filePath']
# 调用BLL层,将文件另存为至导出路径
self.fileSystem.fileSaveAs(oldFilePath, filePath)
# 打开文件
def fileOpen(self):
# 获取目前选中的列表项的行数
curRow = self.listWidget.currentRow()
# 获取图片路径
path = self.fileInfoList[curRow]['filePath']
if self.fileInfoList[curRow]['fileType'] == "gif":
# 获取图片
img = QtGui.QMovie(path)
image = Image.open(path)
imgWidth = image.width
imgHeight = image.height
# 设置最大长宽
maxSize = (500, 500)
# 按比例放缩(最大长宽通过maxSize限制)
while imgWidth > maxSize[0] or imgHeight > maxSize[1]:
imgWidth = imgWidth * 0.9
imgHeight = imgHeight * 0.9
newSize = QtCore.QSize(imgWidth, imgHeight)
img.setScaledSize(newSize)
# 设置原图片标签
self.imgLabel.setMovie(img)
# gif开始播放
img.start()
else:
img = QtGui.QImage(path)
print(img.size())
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
if img.width() > maxSize.width() or img.height() > maxSize.height():
img = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
else:
img = QtGui.QPixmap.fromImage(img)
self.imgLabel.setPixmap(img)
# 设置标题栏
self.titleEdit.setText(self.fileInfoList[curRow]['fileName'])
# 设置配文栏
self.textLabel.setText(self.fileInfoList[curRow]['text'])
# 启用右侧页面
self.pageWidget.setEnabled(True)
# 显示按钮
self.starButton.setVisible(True)
self.commentButton.setVisible(True)
self.saveAsButton.setVisible(True)
if self.fileInfoList[curRow]['account'] == self.account:
self.deleteButton.setVisible(True)
else:
self.deleteButton.setVisible(False)
# 文件删除(涉及到服务器通信,暂时无效)
def fileDelete(self):
# 弹出提示窗口询问
messageBox = QMessageBox()
messageBox.setWindowTitle('提示')
messageBox.setText('确定删除该作品?')
messageBox.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
buttonY = messageBox.button(QMessageBox.Yes)
buttonY.setText('确定')
buttonN = messageBox.button(QMessageBox.No)
buttonN.setText('取消')
messageBox.exec_()
response = messageBox.clickedButton()
if response == buttonN:
return
# 获取目前选中的列表项的行数
curRow = self.listWidget.currentRow()
# 获取作品id
iid = self.fileInfoList[curRow]['iid']
response = self.clientSocket.deleteWork(self.token, iid)
if response:
simpleMessageBox('提示', '删除成功,作品id: ' + iid)
# 刷新文件列表
self.downLoadWork_myWork()
else:
simpleMessageBox('提示', '删除失败')
# 创建文件列表栏的单个复杂窗口
def createFileListItem(self, fileInfo):
print("createFileListItem")
itemWidget = QtWidgets.QWidget()
# 设置容器名为文件名
itemWidget.setObjectName(fileInfo['fileName'])
vLayout = QtWidgets.QVBoxLayout(itemWidget)
# 标题标签
titleLabel = QtWidgets.QLabel(itemWidget)
# 将过长的标题截断并加上省略号
fontMetrics = QtGui.QFontMetrics(QtGui.QFont())
fileName = fontMetrics.elidedText(fileInfo['fileName'], QtCore.Qt.ElideRight, 120)
titleLabel.setText(fileName)
titleLabel.setFont(QtGui.QFont("微软雅黑", 12, QtGui.QFont.Bold))
# 类型标签
commentNum = len(fileInfo['comment'])
commentNumLabel = QtWidgets.QLabel(itemWidget)
commentNumLabel.setText("评论:" + str(commentNum))
# id标签
IDLabel = QtWidgets.QLabel(itemWidget)
IDLabel.setText("序号:" + fileInfo['iid'])
# 用户名标签
accountLabel = QtWidgets.QLabel(itemWidget)
accountLabel.setText("作者:" + fileInfo['account'])
# 点赞数标签
starNumLabel = QtWidgets.QLabel(itemWidget)
starNumLabel.setText("点赞:" + fileInfo['starNum'])
# 将标签添加进布局
vLayout.addWidget(titleLabel)
vLayout.addWidget(IDLabel)
vLayout.addWidget(accountLabel)
vLayout.addWidget(starNumLabel)
vLayout.addWidget(commentNumLabel)
# 设置四方向间隔
vLayout.setContentsMargins(10, 10, 0, 10)
# 设置容器高度(宽度为自适应)
itemWidget.setFixedHeight(180)
return itemWidget
# 更新文件列表
def showFileList(self):
print("showFileList")
# 清除旧的文件列表
self.listWidget.clear()
# 根据当前的文件信息列表添加文件列表项
for fileInfo in self.fileInfoList:
itemWidget = self.createFileListItem(fileInfo)
item = QtWidgets.QListWidgetItem(self.listWidget)
item.setSizeHint(QtCore.QSize(200, 180))
self.listWidget.setItemWidget(item, itemWidget)
# 切换过程中隐藏右侧页面
def disablePage(self):
self.pageWidget.setEnabled(False)
# 隐藏按钮
self.starButton.setVisible(False)
self.commentButton.setVisible(False)
self.deleteButton.setVisible(False)
self.saveAsButton.setVisible(False)
# 将当前显示的图片标签清空(防止无法删除gif)
img = QtGui.QMovie("")
self.imgLabel.setMovie(img)
# 将当前显示的配文标签清空
self.textLabel.clear()
# 删除BrowseImage临时本地文件夹内的旧文件
self.fileSystem.dirDelete(self.fileSystem.browsePath)
# 显示热门作品的文件列表
def downLoadWork_browse(self):
print("downLoadWork_browse")
# 隐藏右侧页面
self.disablePage()
# 调用客户端函数获取文件以及文件信息列表更新
self.fileInfoList = self.clientSocket.receiveWork_browse(self.token)
# 更新文件列表
self.showFileList()
# 查找用户个人已上传的作品
def downLoadWork_myWork(self):
print("downLoadWork_myWork")
# 隐藏右侧页面
self.disablePage()
# 调用客户端函数获取文件以及文件信息列表更新
self.fileInfoList = self.clientSocket.receiveWork_myWork(self.token)
# 更新文件列表
self.showFileList()
# 查找特定作品(根据图片id)
def downLoadWork_search(self):
print("downLoadWork_search")
# 弹出窗口,获取用户输入的新笔记名
iid, okPressed = QtWidgets.QInputDialog.getText(self, "查找", "图片id:",
QtWidgets.QLineEdit.Normal)
# 若用户未输入或未点击确定按钮,则返回
if iid == '' or not okPressed:
return
# 隐藏右侧页面
self.disablePage()
# 调用客户端函数获取文件信息列表
self.fileInfoList = self.clientSocket.receiveWork_search(self.token, iid)
# 更新文件列表
self.showFileList()
# 成功查找到文件,弹窗提示
if self.listWidget.count() >= 1:
simpleMessageBox('提示', '查找成功')
# 将焦点设置到唯一的结果并打开
self.listWidget.setCurrentRow(0)
self.fileOpen()
# 查找失败
else:
simpleMessageBox('提示', '查找失败')
# 将本地作品上传至服务器
def upLoadWork(self):
# 弹窗获取导出路径
filePath, fileType = QFileDialog.getOpenFileName(self.centralwidget, '选择需要的上传图片',
self.fileSystem.traImgDirPath, "png (*.png);;gif (*.gif)")
# 若用户未选择则返回
if filePath == '':
return
# 弹出窗口,获取图片的配文
text, okPressed = QtWidgets.QInputDialog.getText(self, "配文", "输入一行文字与图片相配",
QtWidgets.QLineEdit.Normal)
if not okPressed:
return
# 作品上传至服务器,返回值为作品id
iid = self.clientSocket.sendWork(self.token, filePath, text)
# 完成上传后,弹窗提示
simpleMessageBox('提示', '已将作品上传至服务器,id为' + iid)
# 刷新文件列表
self.downLoadWork_myWork()
# 登录结束后初始化,获取用户token和用户名
def setUserInfo(self, token, account):
self.token = token
self.account = account
# 用户账户标签显示
self.accountLabel.setText(self.account)
# 刷新文件列表栏
self.downLoadWork_myWork()
def main():
app = QtWidgets.QApplication(sys.argv)
platformWin = PlatformWin()
platformWin.setUserInfo("123", "123")
platformWin.show()
app.exec_()
if __name__ == '__main__':
main()
| 33.561404 | 115 | 0.59662 |
b31765e2371a1834a694abcf7999e8a43361d4b7 | 6,287 | py | Python | src/sage/schemes/hyperelliptic_curves/jacobian_homset.py | bopopescu/sage-5 | 9d85b34956ca2edd55af307f99c5d3859acd30bf | [
"BSL-1.0"
] | 2 | 2021-08-20T00:30:35.000Z | 2021-11-17T10:54:00.000Z | src/sage/schemes/hyperelliptic_curves/jacobian_homset.py | bopopescu/sage-5 | 9d85b34956ca2edd55af307f99c5d3859acd30bf | [
"BSL-1.0"
] | null | null | null | src/sage/schemes/hyperelliptic_curves/jacobian_homset.py | bopopescu/sage-5 | 9d85b34956ca2edd55af307f99c5d3859acd30bf | [
"BSL-1.0"
] | null | null | null | """
Rational point sets on a Jacobian
EXAMPLES::
sage: x = QQ['x'].0
sage: f = x^5 + x + 1
sage: C = HyperellipticCurve(f); C
Hyperelliptic Curve over Rational Field defined by y^2 = x^5 + x + 1
sage: C(QQ)
Set of rational points of Hyperelliptic Curve over Rational Field defined by y^2 = x^5 + x + 1
sage: P = C([0,1,1])
sage: J = C.jacobian(); J
Jacobian of Hyperelliptic Curve over Rational Field defined by y^2 = x^5 + x + 1
sage: Q = J(QQ)(P); Q
(x, y - 1)
sage: Q + Q
(x^2, y - 1/2*x - 1)
sage: Q*3
(x^2 - 1/64*x + 1/8, y + 255/512*x + 65/64)
::
sage: F.<a> = GF(3)
sage: R.<x> = F[]
sage: f = x^5-1
sage: C = HyperellipticCurve(f)
sage: J = C.jacobian()
sage: X = J(F)
sage: a = x^2-x+1
sage: b = -x +1
sage: c = x-1
sage: d = 0
sage: D1 = X([a,b])
sage: D1
(x^2 + 2*x + 1, y + x + 2)
sage: D2 = X([c,d])
sage: D2
(x + 2, y)
sage: D1+D2
(x^2 + 2*x + 2, y + 2*x + 1)
"""
#*****************************************************************************
# Copyright (C) 2006 David Kohel <kohel@maths.usyd.edu>
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.rings.all import is_Polynomial, PolynomialRing, Integer, is_Integer, ZZ
from sage.schemes.generic.homset import SchemeHomset_points
from sage.schemes.generic.morphism import is_SchemeMorphism
from sage.schemes.generic.spec import Spec, is_Spec
from jacobian_morphism import JacobianMorphism_divisor_class_field
class JacobianHomset_divisor_classes(SchemeHomset_points):
def __init__(self, Y, X, **kwds):
R = X.base_ring()
S = Y.coordinate_ring()
SchemeHomset_points.__init__(self, Y, X, **kwds)
P2 = X.curve()._printing_ring
if S != R:
y = str(P2.gen())
x = str(P2.base_ring().gen())
P1 = PolynomialRing(S,name=x)
P2 = PolynomialRing(P1,name=y)
self._printing_ring = P2
def __call__(self, P):
r"""
Returns a rational point P in the abstract Homset J(K), given:
0. A point P in J = Jac(C), returning P; 1. A point P on the curve
C such that J = Jac(C), where C is an odd degree model, returning
[P - oo]; 2. A pair of points (P, Q) on the curve C such that J =
Jac(C), returning [P-Q]; 2. A list of polynomials (a,b) such that
`b^2 + h*b - f = 0 mod a`, returning [(a(x),y-b(x))].
EXAMPLES::
sage: P.<x> = PolynomialRing(QQ)
sage: f = x^5 - x + 1; h = x
sage: C = HyperellipticCurve(f,h,'u,v')
sage: P = C(0,1,1)
sage: J = C.jacobian()
sage: Q = J(QQ)(P)
sage: for i in range(6): i*Q
(1)
(u, v - 1)
(u^2, v + u - 1)
(u^2, v + 1)
(u, v + 1)
(1)
::
sage: F.<a> = GF(3)
sage: R.<x> = F[]
sage: f = x^5-1
sage: C = HyperellipticCurve(f)
sage: J = C.jacobian()
sage: X = J(F)
sage: a = x^2-x+1
sage: b = -x +1
sage: c = x-1
sage: d = 0
sage: D1 = X([a,b])
sage: D1
(x^2 + 2*x + 1, y + x + 2)
sage: D2 = X([c,d])
sage: D2
(x + 2, y)
sage: D1+D2
(x^2 + 2*x + 2, y + 2*x + 1)
"""
if isinstance(P,(int,long,Integer)) and P == 0:
R = PolynomialRing(self.value_ring(), 'x')
return JacobianMorphism_divisor_class_field(self, (R(1),R(0)))
elif isinstance(P,(list,tuple)):
if len(P) == 1 and P[0] == 0:
R = PolynomialRing(self.value_ring(), 'x')
return JacobianMorphism_divisor_class_field(self, (R(1),R(0)))
elif len(P) == 2:
P1 = P[0]
P2 = P[1]
if is_Integer(P1) and is_Integer(P2):
R = PolynomialRing(self.value_ring(), 'x')
P1 = R(P1)
P2 = R(P2)
return JacobianMorphism_divisor_class_field(self, tuple([P1,P2]))
if is_Integer(P1) and is_Polynomial(P2):
R = PolynomialRing(self.value_ring(), 'x')
P1 = R(P1)
return JacobianMorphism_divisor_class_field(self, tuple([P1,P2]))
if is_Integer(P2) and is_Polynomial(P1):
R = PolynomialRing(self.value_ring(), 'x')
P2 = R(P2)
return JacobianMorphism_divisor_class_field(self, tuple([P1,P2]))
if is_Polynomial(P1) and is_Polynomial(P2):
return JacobianMorphism_divisor_class_field(self, tuple(P))
if is_SchemeMorphism(P1) and is_SchemeMorphism(P2):
return self(P1) - self(P2)
raise TypeError, "Argument P (= %s) must have length 2."%P
elif isinstance(P,JacobianMorphism_divisor_class_field) and self == P.parent():
return P
elif is_SchemeMorphism(P):
x0 = P[0]; y0 = P[1]
R, x = PolynomialRing(self.value_ring(), 'x').objgen()
return self((x-x0,R(y0)))
raise TypeError, "Argument P (= %s) does not determine a divisor class"%P
def _cmp_(self,other):
if self.curve() == other.curve():
return 0
else:
return -1
def _morphism(self, *args, **kwds):
return JacobianMorphism_divisor_class_field(*args, **kwds)
def curve(self):
return self.codomain().curve()
def value_ring(self):
"""
Returns S for a homset X(T) where T = Spec(S).
"""
T = self.domain()
if is_Spec(T):
return T.coordinate_ring()
else:
raise TypeError, "Domain of argument must be of the form Spec(S)."
def base_extend(self, R):
if R != ZZ:
raise NotImplementedError, "Jacobian point sets viewed as modules over rings other than ZZ not implemented"
return self
| 35.122905 | 119 | 0.501352 |
47519ae30caa6577ef3a8cc3db957bd63730687e | 460 | py | Python | photographic/users/migrations/0006_alter_user_followers.py | adrianeriksen/photographic | 5418a6a79850fa887242f273a35ef9ab585d9d1a | [
"MIT"
] | null | null | null | photographic/users/migrations/0006_alter_user_followers.py | adrianeriksen/photographic | 5418a6a79850fa887242f273a35ef9ab585d9d1a | [
"MIT"
] | 6 | 2021-04-25T08:10:51.000Z | 2021-05-25T17:58:32.000Z | photographic/users/migrations/0006_alter_user_followers.py | adrianeriksen/photographic | 5418a6a79850fa887242f273a35ef9ab585d9d1a | [
"MIT"
] | null | null | null | # Generated by Django 3.2.8 on 2021-10-24 10:28
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_user_followers'),
]
operations = [
migrations.AlterField(
model_name='user',
name='followers',
field=models.ManyToManyField(related_name='following', to=settings.AUTH_USER_MODEL),
),
]
| 23 | 96 | 0.63913 |
28bade56cfc09f456d5821ca6e41625094fa2596 | 4,992 | py | Python | users/views/permission.py | yileye/OpenSA | e935480da142d1d7d170db84360f72cc7327a77e | [
"Apache-2.0"
] | 280 | 2019-01-11T08:32:36.000Z | 2022-03-27T16:53:41.000Z | users/views/permission.py | leoiceo/opensa | 55d01427d8aa35492ea3492b042659b12959ddd6 | [
"Apache-2.0"
] | 19 | 2020-02-11T23:37:22.000Z | 2022-03-11T23:38:53.000Z | users/views/permission.py | leoiceo/opensa | 55d01427d8aa35492ea3492b042659b12959ddd6 | [
"Apache-2.0"
] | 104 | 2019-01-11T08:40:20.000Z | 2022-03-27T16:54:17.000Z | #!/usr/bin/env python
# ~*~ coding: utf-8 ~*~
# by leoiceo
from __future__ import unicode_literals
import json
from django.http import HttpResponseRedirect, HttpResponse
from django.views.generic import TemplateView, ListView, View, CreateView, UpdateView, DeleteView, DetailView
from django.utils.translation import ugettext as _
from django.conf import settings
from users.models import PermissionList
from django.utils.translation import ugettext_lazy as _
from django.db.models import Q
from pure_pagination import PageNotAnInteger, Paginator
from django.urls import reverse_lazy
from ..forms import PermissionListForm
from opensa.api import LoginPermissionRequired
class PermissionListAll(LoginPermissionRequired,ListView):
model = PermissionList
template_name = 'users/permission-list.html'
queryset = PermissionList.objects.all()
ordering = ('id',)
def get_context_data(self, **kwargs):
try:
page = self.request.GET.get('page', 1)
except PageNotAnInteger as e:
page = 1
p = Paginator(self.queryset, getattr(settings, 'DISPLAY_PER_PAGE'), request=self.request)
permission_list = p.page(page)
context = {
"users_active": "active",
"users_permission_list": "active",
"permission_list": permission_list,
}
kwargs.update(context)
return super().get_context_data(**kwargs)
def get_queryset(self):
self.queryset = super().get_queryset()
if self.request.GET.get('name'):
query = self.request.GET.get('name', None)
self.queryset = self.queryset.filter(Q(name__icontains=query)|
Q(url__icontains=query)
).order_by('-id')
else:
self.queryset = self.queryset.all().order_by('id')
return self.queryset
class PermissionAdd(LoginPermissionRequired,CreateView):
model = PermissionList
form_class = PermissionListForm
template_name = 'users/permission-add-update.html'
success_url = reverse_lazy('users:permission_list')
def get_context_data(self, **kwargs):
context = {
"users_active": "active",
"users_permission_list": "active",
}
if '__next__' in self.request.POST:
context['i__next__'] = self.request.POST['__next__']
else:
try:
context['i__next__'] = self.request.META['HTTP_REFERER']
except Exception as e:
pass
kwargs.update(context)
return super().get_context_data(**kwargs)
def form_invalid(self, form):
return super(PermissionAdd, self).form_invalid(form)
def form_valid(self, form):
return super().form_valid(form)
class PermissionUpdate(LoginPermissionRequired,UpdateView):
model = PermissionList
form_class = PermissionListForm
template_name = 'users/permission-add-update.html'
success_url = reverse_lazy('users:permission_list')
def get_context_data(self, **kwargs):
context = {
"users_active": "active",
"users_permission_list": "active",
}
if '__next__' in self.request.POST:
context['i__next__'] = self.request.POST['__next__']
else:
try:
context['i__next__'] = self.request.META['HTTP_REFERER']
except Exception as e:
pass
kwargs.update(context)
return super(PermissionUpdate, self).get_context_data(**kwargs)
def get_form_kwargs(self):
kwargs = super(PermissionUpdate, self).get_form_kwargs()
pk = self.kwargs.get(self.pk_url_kwarg, None)
try:
lable_name = PermissionList.objects.get(pk=pk).lable_name
except Exception as e:
lable_name = None
kwargs['lable_name'] = lable_name
return kwargs
def form_invalid(self, form):
return super(PermissionUpdate, self).form_invalid(form)
def get_success_url(self):
self.url = self.request.POST['__next__']
return self.url
class PermissionAllDel(LoginPermissionRequired,DeleteView):
model = PermissionList
def post(self, request, *args, **kwargs):
ret = {'status': True, 'error': None, }
try:
if request.POST.get('nid'):
id = request.POST.get('nid', None)
PermissionList.objects.get(id=id).delete()
else:
ids = request.POST.getlist('id', None)
PermissionList.objects.filter(id__in=ids).delete()
except Exception as e:
ret['status'] = False
ret['error'] = _('Deletion error,{}'.format(e))
finally:
return HttpResponse(json.dumps(ret)) | 33.503356 | 110 | 0.609375 |
f59d4441e39cb12dc9e48dcd3a98d4f8f7bea6fe | 2,503 | py | Python | .ci/jenkins/conf.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | null | null | null | .ci/jenkins/conf.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | null | null | null | .ci/jenkins/conf.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | null | null | null | import argparse
import os
import platform
from contextlib import contextmanager
winpylocation = {"py27": "C:\\Python27\\python.exe",
"py34": "C:\\Python34\\python.exe",
"py36": "C:\\Python36\\python.exe"}
macpylocation = {"py27": "/usr/bin/python", # /Users/jenkins_ci/.pyenv/versions/2.7.11/bin/python",
"py34": "/Users/jenkins_ci/.pyenv/versions/3.4.7/bin/python",
"py36": "/Users/jenkins_ci/.pyenv/versions/3.6.3/bin/python"}
linuxpylocation = {"py27": "/usr/bin/python2.7",
"py34": "/usr/bin/python3.4",
"py36": "/usr/bin/python3.6"}
def get_environ(tmp_path):
if platform.system() == "Windows":
return {"CONAN_BASH_PATH": "c:/tools/msys64/usr/bin/bash",
"CONAN_USER_HOME_SHORT": os.path.join(tmp_path, ".conan")}
return {}
class Extender(argparse.Action):
"""Allows to use the same flag several times in a command and creates a list with the values.
For example:
conan install MyPackage/1.2@user/channel -o qt:value -o mode:2 -s cucumber:true
It creates:
options = ['qt:value', 'mode:2']
settings = ['cucumber:true']
"""
def __call__(self, parser, namespace, values, option_strings=None): # @UnusedVariable
# Need None here incase `argparse.SUPPRESS` was supplied for `dest`
dest = getattr(namespace, self.dest, None)
if not hasattr(dest, 'extend') or dest == self.default:
dest = []
setattr(namespace, self.dest, dest)
# if default isn't set to None, this method might be called
# with the default as `values` for other arguments which
# share this destination.
parser.set_defaults(**{self.dest: None})
try:
dest.extend(values)
except ValueError:
dest.append(values)
@contextmanager
def environment_append(env_vars):
old_env = dict(os.environ)
for name, value in env_vars.items():
if isinstance(value, list):
env_vars[name] = os.pathsep.join(value)
if name in old_env:
env_vars[name] += os.pathsep + old_env[name]
os.environ.update(env_vars)
try:
yield
finally:
os.environ.clear()
os.environ.update(old_env)
@contextmanager
def chdir(newdir):
old_path = os.getcwd()
os.chdir(newdir)
try:
yield
finally:
os.chdir(old_path)
| 33.373333 | 100 | 0.596884 |
6649e91518aab9dc19b7ab454f6c00776f504eda | 99 | py | Python | enthought/tvtk/pyface/ui/wx/scene_editor.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 3 | 2016-12-09T06:05:18.000Z | 2018-03-01T13:00:29.000Z | enthought/tvtk/pyface/ui/wx/scene_editor.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 1 | 2020-12-02T00:51:32.000Z | 2020-12-02T08:48:55.000Z | enthought/tvtk/pyface/ui/wx/scene_editor.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | null | null | null | # proxy module
from __future__ import absolute_import
from tvtk.pyface.ui.wx.scene_editor import *
| 24.75 | 44 | 0.828283 |
50ec1be2ed89e801a113e86686a5d082d5560915 | 75,077 | py | Python | neutron/agent/linux/dhcp.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | neutron/agent/linux/dhcp.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | neutron/agent/linux/dhcp.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import collections
import copy
import io
import itertools
import os
import re
import shutil
import time
import netaddr
from neutron_lib.api.definitions import extra_dhcp_opt as edo_ext
from neutron_lib import constants
from neutron_lib import exceptions
from neutron_lib.utils import file as file_utils
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_utils import netutils
from oslo_utils import uuidutils
from neutron._i18n import _
from neutron.agent.common import utils as agent_common_utils
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.cmd import runtime_checks as checks
from neutron.common import utils as common_utils
from neutron.ipam import utils as ipam_utils
from neutron.privileged.agent.linux import dhcp as priv_dhcp
LOG = logging.getLogger(__name__)
DNS_PORT = 53
WIN2k3_STATIC_DNS = 249
NS_PREFIX = 'qdhcp-'
DNSMASQ_SERVICE_NAME = 'dnsmasq'
DHCP_RELEASE_TRIES = 3
DHCP_RELEASE_TRIES_SLEEP = 0.3
HOST_DHCPV6_TAG = 'tag:dhcpv6,'
# this variable will be removed when neutron-lib is updated with this value
DHCP_OPT_CLIENT_ID_NUM = 61
class DictModel(collections.abc.MutableMapping):
"""Convert dict into an object that provides attribute access to values."""
__slots__ = ['_dictmodel_internal_storage']
def __init__(self, *args, **kwargs):
"""Convert dict values to DictModel values."""
temp_dict = dict(*args)
self._dictmodel_internal_storage = {}
def needs_upgrade(item):
"""Check if `item` is a dict and needs to be changed to DictModel.
"""
return isinstance(item, dict) and not isinstance(item, DictModel)
def upgrade(item):
"""Upgrade item if it needs to be upgraded."""
if needs_upgrade(item):
return DictModel(item)
else:
return item
for key, value in itertools.chain(temp_dict.items(), kwargs.items()):
if isinstance(value, (list, tuple)):
# Keep the same type but convert dicts to DictModels
self._dictmodel_internal_storage[key] = type(value)(
(upgrade(item) for item in value)
)
elif needs_upgrade(value):
# Change dict instance values to DictModel instance values
self._dictmodel_internal_storage[key] = DictModel(value)
else:
self._dictmodel_internal_storage[key] = value
def __getattr__(self, name):
try:
if name == '_dictmodel_internal_storage':
return super(DictModel, self).__getattr__(name)
return self.__getitem__(name)
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, name, value):
if name == '_dictmodel_internal_storage':
super(DictModel, self).__setattr__(name, value)
else:
self._dictmodel_internal_storage[name] = value
def __delattr__(self, name):
del self._dictmodel_internal_storage[name]
def __str__(self):
pairs = ['%s=%s' % (k, v) for k, v in
self._dictmodel_internal_storage.items()]
return ', '.join(sorted(pairs))
def __getitem__(self, name):
return self._dictmodel_internal_storage[name]
def __setitem__(self, name, value):
self._dictmodel_internal_storage[name] = value
def __delitem__(self, name):
del self._dictmodel_internal_storage[name]
def __iter__(self):
return iter(self._dictmodel_internal_storage)
def __len__(self):
return len(self._dictmodel_internal_storage)
def __copy__(self):
return type(self)(self)
def __deepcopy__(self, memo):
# pylint: disable=no-value-for-parameter
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
result._dictmodel_internal_storage = copy.deepcopy(
self._dictmodel_internal_storage)
return result
class NetModel(DictModel):
def __init__(self, *args, **kwargs):
super(NetModel, self).__init__(*args, **kwargs)
self._ns_name = "%s%s" % (NS_PREFIX, self.id)
@property
def namespace(self):
return self._ns_name
class DhcpBase(object, metaclass=abc.ABCMeta):
def __init__(self, conf, network, process_monitor,
version=None, plugin=None):
self.conf = conf
self.network = network
self.process_monitor = process_monitor
self.device_manager = DeviceManager(self.conf, plugin)
self.version = version
@abc.abstractmethod
def enable(self):
"""Enables DHCP for this network."""
@abc.abstractmethod
def disable(self, retain_port=False, block=False):
"""Disable dhcp for this network."""
def restart(self):
"""Restart the dhcp service for the network."""
self.disable(retain_port=True, block=True)
self.enable()
@abc.abstractproperty
def active(self):
"""Boolean representing the running state of the DHCP server."""
@abc.abstractmethod
def reload_allocations(self):
"""Force the DHCP server to reload the assignment database."""
@classmethod
def existing_dhcp_networks(cls, conf):
"""Return a list of existing networks ids that we have configs for."""
raise NotImplementedError()
@classmethod
def check_version(cls):
"""Execute version checks on DHCP server."""
raise NotImplementedError()
@classmethod
def get_isolated_subnets(cls, network):
"""Returns a dict indicating whether or not a subnet is isolated"""
raise NotImplementedError()
@classmethod
def should_enable_metadata(cls, conf, network):
"""True if the metadata-proxy should be enabled for the network."""
raise NotImplementedError()
class DhcpLocalProcess(DhcpBase, metaclass=abc.ABCMeta):
PORTS = []
def __init__(self, conf, network, process_monitor, version=None,
plugin=None):
super(DhcpLocalProcess, self).__init__(conf, network, process_monitor,
version, plugin)
self.confs_dir = self.get_confs_dir(conf)
self.network_conf_dir = os.path.join(self.confs_dir, network.id)
fileutils.ensure_tree(self.network_conf_dir, mode=0o755)
@staticmethod
def get_confs_dir(conf):
return os.path.abspath(os.path.normpath(conf.dhcp_confs))
def get_conf_file_name(self, kind):
"""Returns the file name for a given kind of config file."""
return os.path.join(self.network_conf_dir, kind)
def _remove_config_files(self):
shutil.rmtree(self.network_conf_dir, ignore_errors=True)
@staticmethod
def _get_all_subnets(network):
non_local_subnets = getattr(network, 'non_local_subnets', [])
return network.subnets + non_local_subnets
def _enable_dhcp(self):
"""check if there is a subnet within the network with dhcp enabled."""
for subnet in self.network.subnets:
if subnet.enable_dhcp:
return True
return False
def enable(self):
"""Enables DHCP for this network by spawning a local process."""
try:
common_utils.wait_until_true(self._enable, timeout=300)
except common_utils.WaitTimeout:
LOG.error("Failed to start DHCP process for network %s",
self.network.id)
def _enable(self):
try:
if self.active:
self.disable(retain_port=True, block=True)
if self._enable_dhcp():
fileutils.ensure_tree(self.network_conf_dir, mode=0o755)
interface_name = self.device_manager.setup(self.network)
self.interface_name = interface_name
self.spawn_process()
return True
except exceptions.ProcessExecutionError as error:
LOG.debug("Spawning DHCP process for network %s failed; "
"Error: %s", self.network.id, error)
return False
def _get_process_manager(self, cmd_callback=None):
return external_process.ProcessManager(
conf=self.conf,
uuid=self.network.id,
namespace=self.network.namespace,
service=DNSMASQ_SERVICE_NAME,
default_cmd_callback=cmd_callback,
pid_file=self.get_conf_file_name('pid'),
run_as_root=True)
def disable(self, retain_port=False, block=False):
"""Disable DHCP for this network by killing the local process."""
self.process_monitor.unregister(self.network.id, DNSMASQ_SERVICE_NAME)
self._get_process_manager().disable()
if block:
common_utils.wait_until_true(lambda: not self.active)
if not retain_port:
self._destroy_namespace_and_port()
self._remove_config_files()
def _destroy_namespace_and_port(self):
try:
self.device_manager.destroy(self.network, self.interface_name)
except RuntimeError:
LOG.warning('Failed trying to delete interface: %s',
self.interface_name)
try:
ip_lib.delete_network_namespace(self.network.namespace)
except RuntimeError:
LOG.warning('Failed trying to delete namespace: %s',
self.network.namespace)
def _get_value_from_conf_file(self, kind, converter=None):
"""A helper function to read a value from one of the state files."""
file_name = self.get_conf_file_name(kind)
msg = _('Error while reading %s')
try:
with open(file_name, 'r') as f:
try:
return converter(f.read()) if converter else f.read()
except ValueError:
msg = _('Unable to convert value in %s')
except IOError:
msg = _('Unable to access %s')
LOG.debug(msg, file_name)
return None
@property
def interface_name(self):
return self._get_value_from_conf_file('interface')
@interface_name.setter
def interface_name(self, value):
interface_file_path = self.get_conf_file_name('interface')
file_utils.replace_file(interface_file_path, value)
def get_metadata_bind_interface(self, port):
return self.device_manager.get_interface_name(self.network, port)
@property
def active(self):
return self._get_process_manager().active
@abc.abstractmethod
def spawn_process(self):
pass
class Dnsmasq(DhcpLocalProcess):
# The ports that need to be opened when security policies are active
# on the Neutron port used for DHCP. These are provided as a convenience
# for users of this class.
PORTS = {constants.IP_VERSION_4:
[(constants.PROTO_NAME_UDP, DNS_PORT),
(constants.PROTO_NAME_TCP, DNS_PORT),
(constants.PROTO_NAME_UDP, constants.DHCP_RESPONSE_PORT)],
constants.IP_VERSION_6:
[(constants.PROTO_NAME_UDP, DNS_PORT),
(constants.PROTO_NAME_TCP, DNS_PORT),
(constants.PROTO_NAME_UDP, constants.DHCPV6_RESPONSE_PORT)],
}
_SUBNET_TAG_PREFIX = 'subnet-%s'
_PORT_TAG_PREFIX = 'port-%s'
_ID = 'id:'
_IS_DHCP_RELEASE6_SUPPORTED = None
_IS_HOST_TAG_SUPPORTED = None
@classmethod
def check_version(cls):
pass
@classmethod
def existing_dhcp_networks(cls, conf):
"""Return a list of existing networks ids that we have configs for."""
confs_dir = cls.get_confs_dir(conf)
try:
return [
c for c in os.listdir(confs_dir)
if uuidutils.is_uuid_like(c)
]
except OSError:
return []
def _build_cmdline_callback(self, pid_file):
# We ignore local resolv.conf if dns servers are specified
# or if local resolution is explicitly disabled.
_no_resolv = (
'--no-resolv' if self.conf.dnsmasq_dns_servers or
not self.conf.dnsmasq_local_resolv else '')
cmd = [
'dnsmasq',
'--no-hosts',
_no_resolv,
'--pid-file=%s' % pid_file,
'--dhcp-hostsfile=%s' % self.get_conf_file_name('host'),
'--addn-hosts=%s' % self.get_conf_file_name('addn_hosts'),
'--dhcp-optsfile=%s' % self.get_conf_file_name('opts'),
'--dhcp-leasefile=%s' % self.get_conf_file_name('leases'),
'--dhcp-match=set:ipxe,175',
'--dhcp-userclass=set:ipxe6,iPXE',
'--local-service',
'--bind-dynamic',
]
if not self.device_manager.driver.bridged:
cmd += [
'--bridge-interface=%s,tap*' % self.interface_name,
]
possible_leases = 0
for subnet in self._get_all_subnets(self.network):
mode = None
# if a subnet is specified to have dhcp disabled
if not subnet.enable_dhcp:
continue
if subnet.ip_version == 4:
mode = 'static'
else:
# Note(scollins) If the IPv6 attributes are not set, set it as
# static to preserve previous behavior
addr_mode = getattr(subnet, 'ipv6_address_mode', None)
ra_mode = getattr(subnet, 'ipv6_ra_mode', None)
if (addr_mode in [constants.DHCPV6_STATEFUL,
constants.DHCPV6_STATELESS] or
not addr_mode and not ra_mode):
mode = 'static'
cidr = netaddr.IPNetwork(subnet.cidr)
if self.conf.dhcp_lease_duration == -1:
lease = 'infinite'
else:
lease = '%ss' % self.conf.dhcp_lease_duration
# mode is optional and is not set - skip it
if mode:
if subnet.ip_version == 4:
cmd.append('--dhcp-range=%s%s,%s,%s,%s,%s' %
('set:', self._SUBNET_TAG_PREFIX % subnet.id,
cidr.network, mode, cidr.netmask, lease))
else:
if cidr.prefixlen < 64:
LOG.debug('Ignoring subnet %(subnet)s, CIDR has '
'prefix length < 64: %(cidr)s',
{'subnet': subnet.id, 'cidr': cidr})
continue
cmd.append('--dhcp-range=%s%s,%s,%s,%d,%s' %
('set:', self._SUBNET_TAG_PREFIX % subnet.id,
cidr.network, mode,
cidr.prefixlen, lease))
possible_leases += cidr.size
mtu = getattr(self.network, 'mtu', 0)
# Do not advertise unknown mtu
if mtu > 0:
cmd.append('--dhcp-option-force=option:mtu,%d' % mtu)
# Cap the limit because creating lots of subnets can inflate
# this possible lease cap.
cmd.append('--dhcp-lease-max=%d' %
min(possible_leases, self.conf.dnsmasq_lease_max))
if self.conf.dhcp_renewal_time > 0:
cmd.append('--dhcp-option-force=option:T1,%ds' %
self.conf.dhcp_renewal_time)
if self.conf.dhcp_rebinding_time > 0:
cmd.append('--dhcp-option-force=option:T2,%ds' %
self.conf.dhcp_rebinding_time)
cmd.append('--conf-file=%s' %
(self.conf.dnsmasq_config_file.strip() or '/dev/null'))
for server in self.conf.dnsmasq_dns_servers:
cmd.append('--server=%s' % server)
if self.conf.dns_domain:
cmd.append('--domain=%s' % self.conf.dns_domain)
if self.conf.dhcp_broadcast_reply:
cmd.append('--dhcp-broadcast')
if self.conf.dnsmasq_base_log_dir:
log_dir = os.path.join(
self.conf.dnsmasq_base_log_dir,
self.network.id)
try:
if not os.path.exists(log_dir):
os.makedirs(log_dir)
except OSError:
LOG.error('Error while create dnsmasq log dir: %s', log_dir)
else:
log_filename = os.path.join(log_dir, 'dhcp_dns_log')
cmd.append('--log-queries')
cmd.append('--log-dhcp')
cmd.append('--log-facility=%s' % log_filename)
return cmd
def spawn_process(self):
"""Spawn the process, if it's not spawned already."""
# we only need to generate the lease file the first time dnsmasq starts
# rather than on every reload since dnsmasq will keep the file current
self._output_init_lease_file()
self._spawn_or_reload_process(reload_with_HUP=False)
def _spawn_or_reload_process(self, reload_with_HUP):
"""Spawns or reloads a Dnsmasq process for the network.
When reload_with_HUP is True, dnsmasq receives a HUP signal,
or it's reloaded if the process is not running.
"""
self._output_config_files()
pm = self._get_process_manager(
cmd_callback=self._build_cmdline_callback)
pm.enable(reload_cfg=reload_with_HUP, ensure_active=True)
self.process_monitor.register(uuid=self.network.id,
service_name=DNSMASQ_SERVICE_NAME,
monitored_process=pm)
def _is_dhcp_release6_supported(self):
if self._IS_DHCP_RELEASE6_SUPPORTED is None:
self._IS_DHCP_RELEASE6_SUPPORTED = (
priv_dhcp.dhcp_release6_supported())
if not self._IS_DHCP_RELEASE6_SUPPORTED:
LOG.warning("dhcp_release6 is not present on this system, "
"will not call it again.")
return self._IS_DHCP_RELEASE6_SUPPORTED
def _is_dnsmasq_host_tag_supported(self):
if self._IS_HOST_TAG_SUPPORTED is None:
self._IS_HOST_TAG_SUPPORTED = checks.dnsmasq_host_tag_support()
return self._IS_HOST_TAG_SUPPORTED
def _release_lease(self, mac_address, ip, ip_version, client_id=None,
server_id=None, iaid=None):
"""Release a DHCP lease."""
try:
if ip_version == constants.IP_VERSION_6:
if not self._is_dhcp_release6_supported():
return
params = {'interface_name': self.interface_name,
'ip_address': ip, 'client_id': client_id,
'server_id': server_id, 'iaid': iaid,
'namespace': self.network.namespace}
priv_dhcp.dhcp_release6(**params)
else:
params = {'interface_name': self.interface_name,
'ip_address': ip, 'mac_address': mac_address,
'client_id': client_id,
'namespace': self.network.namespace}
priv_dhcp.dhcp_release(**params)
except (processutils.ProcessExecutionError, OSError) as e:
# when failed to release single lease there's
# no need to propagate error further
LOG.warning('DHCP release failed for params %(params)s. '
'Reason: %(e)s', {'params': params, 'e': e})
def _output_config_files(self):
self._output_hosts_file()
self._output_addn_hosts_file()
self._output_opts_file()
def reload_allocations(self):
"""Rebuild the dnsmasq config and signal the dnsmasq to reload."""
# If all subnets turn off dhcp, kill the process.
if not self._enable_dhcp():
self.disable()
LOG.debug('Killing dnsmasq for network since all subnets have '
'turned off DHCP: %s', self.network.id)
return
if not self.interface_name:
# we land here if above has been called and we receive port
# delete notifications for the network
LOG.debug('Agent does not have an interface on this network '
'anymore, skipping reload: %s', self.network.id)
return
self._release_unused_leases()
self._spawn_or_reload_process(reload_with_HUP=True)
LOG.debug('Reloading allocations for network: %s', self.network.id)
self.device_manager.update(self.network, self.interface_name)
def _sort_fixed_ips_for_dnsmasq(self, fixed_ips, v6_nets):
"""Sort fixed_ips so that stateless IPv6 subnets appear first.
For example, If a port with v6 extra_dhcp_opts is on a network with
IPv4 and IPv6 stateless subnets. Then dhcp host file will have
below 2 entries for same MAC,
fa:16:3e:8f:9d:65,30.0.0.5,set:aabc7d33-4874-429e-9637-436e4232d2cd
(entry for IPv4 dhcp)
fa:16:3e:8f:9d:65,set:aabc7d33-4874-429e-9637-436e4232d2cd
(entry for stateless IPv6 for v6 options)
dnsmasq internal details for processing host file entries
1) dnsmasq reads the host file from EOF.
2) So it first picks up stateless IPv6 entry,
fa:16:3e:8f:9d:65,set:aabc7d33-4874-429e-9637-436e4232d2cd
3) But dnsmasq doesn't have sufficient checks to skip this entry and
pick next entry, to process dhcp IPv4 request.
4) So dnsmasq uses this entry to process dhcp IPv4 request.
5) As there is no ip in this entry, dnsmasq logs "no address available"
and fails to send DHCPOFFER message.
As we rely on internal details of dnsmasq to understand and fix the
issue, Ihar sent a mail to dnsmasq-discuss mailing list
http://lists.thekelleys.org.uk/pipermail/dnsmasq-discuss/2015q2/
009650.html
So if we reverse the order of writing entries in host file,
so that entry for stateless IPv6 comes first,
then dnsmasq can correctly fetch the IPv4 address.
"""
return sorted(
fixed_ips,
key=lambda fip: ((fip.subnet_id in v6_nets) and (
v6_nets[fip.subnet_id].ipv6_address_mode == (
constants.DHCPV6_STATELESS))),
reverse=True)
def _merge_alloc_addr6_list(self, fixed_ips, v6_nets):
"""Merge fixed_ips to ipv6 addr lists
If a port have multiple IPv6 addresses in the same subnet, merge the
into one entry listing all the addresess, creating a single dhcp-host
entry with the list of addresses defined allow dnsmasq to make all
addresses available as requests for leases arrive.
See dnsmasq-discuss mailing list: http://lists.thekelleys.org.uk/
pipermail/dnsmasq-discuss/2020q1/013743.html
"""
by_subnet = {}
NewFip = collections.namedtuple('NewFip', 'subnet_id ip_address')
merged = []
for fip in fixed_ips:
if (fip.subnet_id in v6_nets and
v6_nets[fip.subnet_id].ipv6_address_mode == (
constants.DHCPV6_STATEFUL)):
if fip.subnet_id not in by_subnet:
by_subnet.update({fip.subnet_id: []})
by_subnet[fip.subnet_id].append(fip.ip_address)
else:
merged.append(fip)
for subnet_id in by_subnet:
addr6_list = ','.join([self._format_address_for_dnsmasq(ip)
for ip in by_subnet[subnet_id]])
merged.append(NewFip(subnet_id=subnet_id,
ip_address=addr6_list))
return merged
def _get_dns_assignment(self, ip_address, dns_assignment):
"""Get DNS assignment hostname and fqdn
In dnsmasq it is not possible to configure two dhcp-host
entries mapped to a single client mac address with IP
addresses in the same subnet. When recieving a requst
dnsmasq will match on the first entry in it's config,
and lease that address. The second entry will never be
used.
For IPv6 it is possible to add multiple IPv6 addresses
to a single dhcp-host entry by placing a list of addresses
in brackets, i.e [addr1][addr2][...]. See dnsmasq mailing
list: http://lists.thekelleys.org.uk/pipermail/
dnsmasq-discuss/2020q1/013671.html. Since we cannot have
two hostnames in the dhcp-host entry this method picks the
first hostname and fqdn it find's matching one of the IP's
in the fixed-ips in dns_assignment or the hostname is
generated based on the first fixed-ip.
:param ip_address: IP address or a list of IPv6 addresses
:param dns_ip_map: DNS IP Mapping
:param dns_assignment: DNS assignments
:return: hostname, fqdn
"""
hostname, fqdn = None, None
ip_addresses = ip_address.replace('[', '').split(']')
if dns_assignment:
dns_ip_map = {d.ip_address: d for d in dns_assignment}
for addr in ip_addresses:
# If dns_name attribute is supported by ports API, return the
# dns_assignment generated by the Neutron server. Otherwise,
# generate hostname and fqdn locally (previous behaviour)
if addr in dns_ip_map:
hostname = dns_ip_map[addr].hostname
fqdn = dns_ip_map[addr].fqdn
break
if hostname is None:
hostname = ('host-%s' %
ip_addresses[0].replace('.', '-').replace(':', '-'))
fqdn = hostname
if self.conf.dns_domain:
fqdn = '%s.%s' % (fqdn, self.conf.dns_domain)
return hostname, fqdn
def _iter_hosts(self, merge_addr6_list=False):
"""Iterate over hosts.
For each host on the network we yield a tuple containing:
(
port, # a DictModel instance representing the port.
alloc, # a DictModel instance of the allocated ip and subnet.
# if alloc is None, it means there is no need to allocate
# an IPv6 address because of stateless DHCPv6 network.
host_name, # Host name.
name, # Canonical hostname in the format 'hostname[.domain]'.
no_dhcp, # A flag indicating that the address doesn't need a DHCP
# IP address.
no_opts, # A flag indication that options shouldn't be written
tag, # A dhcp-host tag to add to the configuration if supported
)
"""
v6_nets = dict((subnet.id, subnet) for subnet in
self._get_all_subnets(self.network)
if subnet.ip_version == 6)
for port in self.network.ports:
fixed_ips = self._sort_fixed_ips_for_dnsmasq(port.fixed_ips,
v6_nets)
# TODO(hjensas): Drop this conditional and option once distros
# generally have dnsmasq supporting addr6 list and range.
if self.conf.dnsmasq_enable_addr6_list and merge_addr6_list:
fixed_ips = self._merge_alloc_addr6_list(fixed_ips, v6_nets)
# Confirm whether Neutron server supports dns_name attribute in the
# ports API
dns_assignment = getattr(port, 'dns_assignment', None)
for alloc in fixed_ips:
no_dhcp = False
no_opts = False
tag = ''
if alloc.subnet_id in v6_nets:
addr_mode = v6_nets[alloc.subnet_id].ipv6_address_mode
no_dhcp = addr_mode in (constants.IPV6_SLAAC,
constants.DHCPV6_STATELESS)
if self._is_dnsmasq_host_tag_supported():
tag = HOST_DHCPV6_TAG
# we don't setup anything for SLAAC. It doesn't make sense
# to provide options for a client that won't use DHCP
no_opts = addr_mode == constants.IPV6_SLAAC
hostname, fqdn = self._get_dns_assignment(alloc.ip_address,
dns_assignment)
yield (port, alloc, hostname, fqdn, no_dhcp, no_opts, tag)
def _get_port_extra_dhcp_opts(self, port):
return getattr(port, edo_ext.EXTRADHCPOPTS, False)
def _output_init_lease_file(self):
"""Write a fake lease file to bootstrap dnsmasq.
The generated file is passed to the --dhcp-leasefile option of dnsmasq.
This is used as a bootstrapping mechanism to avoid NAKing active leases
when a dhcp server is scheduled to another agent. Using a leasefile
will also prevent dnsmasq from NAKing or ignoring renewals after a
restart.
Format is as follows:
epoch-timestamp mac_addr ip_addr hostname client-ID
"""
filename = self.get_conf_file_name('leases')
buf = io.StringIO()
LOG.debug('Building initial lease file: %s', filename)
# we make up a lease time for the database entry
if self.conf.dhcp_lease_duration == -1:
# Even with an infinite lease, a client may choose to renew a
# previous lease on reboot or interface bounce so we should have
# an entry for it.
# Dnsmasq timestamp format for an infinite lease is 0.
timestamp = 0
else:
timestamp = int(time.time()) + self.conf.dhcp_lease_duration
dhcpv4_enabled_subnet_ids = [
s.id for s in self._get_all_subnets(self.network)
if s.enable_dhcp and s.ip_version == constants.IP_VERSION_4]
for host_tuple in self._iter_hosts():
port, alloc, hostname, name, no_dhcp, no_opts, tag = host_tuple
# don't write ip address which belongs to a dhcp disabled subnet
# or an IPv6 subnet.
if no_dhcp or alloc.subnet_id not in dhcpv4_enabled_subnet_ids:
continue
# all that matters is the mac address and IP. the hostname and
# client ID will be overwritten on the next renewal.
buf.write('%s %s %s * *\n' %
(timestamp, port.mac_address, alloc.ip_address))
contents = buf.getvalue()
file_utils.replace_file(filename, contents)
LOG.debug('Done building initial lease file %s with contents:\n%s',
filename, contents)
return filename
@staticmethod
def _format_address_for_dnsmasq(address):
# (dzyu) Check if it is legal ipv6 address, if so, need wrap
# it with '[]' to let dnsmasq to distinguish MAC address from
# IPv6 address.
if netaddr.valid_ipv6(address):
return '[%s]' % address
return address
def _output_hosts_file(self):
"""Writes a dnsmasq compatible dhcp hosts file.
The generated file is sent to the --dhcp-hostsfile option of dnsmasq,
and lists the hosts on the network which should receive a dhcp lease.
Each line in this file is in the form::
'mac_address,FQDN,ip_address'
IMPORTANT NOTE: a dnsmasq instance does not resolve hosts defined in
this file if it did not give a lease to a host listed in it (e.g.:
multiple dnsmasq instances on the same network if this network is on
multiple network nodes). This file is only defining hosts which
should receive a dhcp lease, the hosts resolution in itself is
defined by the `_output_addn_hosts_file` method.
"""
buf = io.StringIO()
filename = self.get_conf_file_name('host')
LOG.debug('Building host file: %s', filename)
dhcp_enabled_subnet_ids = [s.id for s in
self._get_all_subnets(self.network)
if s.enable_dhcp]
# NOTE(ihrachyshka): the loop should not log anything inside it, to
# avoid potential performance drop when lots of hosts are dumped
for host_tuple in self._iter_hosts(merge_addr6_list=True):
port, alloc, hostname, name, no_dhcp, no_opts, tag = host_tuple
if no_dhcp:
if not no_opts and self._get_port_extra_dhcp_opts(port):
buf.write('%s,%s%s%s\n' % (
port.mac_address, tag,
'set:', self._PORT_TAG_PREFIX % port.id))
continue
# don't write ip address which belongs to a dhcp disabled subnet.
if alloc.subnet_id not in dhcp_enabled_subnet_ids:
continue
ip_address = self._format_address_for_dnsmasq(alloc.ip_address)
if self._get_port_extra_dhcp_opts(port):
client_id = self._get_client_id(port)
if client_id and len(port.extra_dhcp_opts) > 1:
buf.write('%s,%s%s%s,%s,%s,%s%s\n' %
(port.mac_address, tag, self._ID, client_id,
name, ip_address, 'set:',
self._PORT_TAG_PREFIX % port.id))
elif client_id and len(port.extra_dhcp_opts) == 1:
buf.write('%s,%s%s%s,%s,%s\n' %
(port.mac_address, tag, self._ID, client_id,
name, ip_address))
else:
buf.write('%s,%s%s,%s,%s%s\n' %
(port.mac_address, tag, name, ip_address,
'set:', self._PORT_TAG_PREFIX % port.id))
else:
buf.write('%s,%s%s,%s\n' %
(port.mac_address, tag, name, ip_address))
file_utils.replace_file(filename, buf.getvalue())
LOG.debug('Done building host file %s', filename)
return filename
def _get_client_id(self, port):
if self._get_port_extra_dhcp_opts(port):
for opt in port.extra_dhcp_opts:
if opt.opt_name in (edo_ext.DHCP_OPT_CLIENT_ID,
DHCP_OPT_CLIENT_ID_NUM,
str(DHCP_OPT_CLIENT_ID_NUM)):
return opt.opt_value
@staticmethod
def _parse_ip_addresses(ip_list):
ip_list = [ip.strip('[]') for ip in ip_list]
return [ip for ip in ip_list if netutils.is_valid_ip(ip)]
def _read_hosts_file_leases(self, filename):
leases = set()
try:
with open(filename) as f:
for line in f.readlines():
host = line.strip().split(',')
mac = host[0]
client_id = None
if host[1].startswith('set:'):
continue
if host[1].startswith(self._ID):
ips = self._parse_ip_addresses(host[3:])
client_id = host[1][len(self._ID):]
elif host[1].startswith('tag:'):
ips = self._parse_ip_addresses(host[3:])
else:
ips = self._parse_ip_addresses(host[2:])
for ip in ips:
leases.add((ip, mac, client_id))
except (OSError, IOError):
LOG.debug('Error while reading hosts file %s', filename)
return leases
def _read_leases_file_leases(self, filename):
"""Read dnsmasq dhcp leases file
Read information from leases file, which is needed to pass to
dhcp_release6 command line utility if some of these leases are not
needed anymore
each line in dnsmasq leases file is one of the following
* duid entry: duid server_duid
There MUST be single duid entry per file
* ipv4 entry: space separated list
- The expiration time (seconds since unix epoch) or duration
(if dnsmasq is compiled with HAVE_BROKEN_RTC) of the lease.
0 means infinite.
- The link address, in format XX-YY:YY:YY[...], where XX is the ARP
hardware type. "XX-" may be omitted for Ethernet.
- The IPv4 address
- The hostname (sent by the client or assigned by dnsmasq)
or '*' for none.
- The client identifier (colon-separated hex bytes)
or '*' for none.
* ipv6 entry: space separated list
- The expiration time or duration
- The IAID as a Big Endian decimal number, prefixed by T for
IA_TAs (temporary addresses).
- The IPv6 address
- The hostname or '*'
- The client DUID (colon-separated hex bytes) or '*' if unknown
original discussion is in dnsmasq mailing list
http://lists.thekelleys.org.uk/pipermail/\
dnsmasq-discuss/2016q2/010595.html
:param filename: leases file
:return: dict, keys are IP(v6) addresses, values are dicts containing
iaid, client_id and server_id
"""
leases = {}
server_id = None
if os.path.exists(filename):
with open(filename) as f:
for line in f.readlines():
if line.startswith('duid'):
if not server_id:
server_id = line.strip().split()[1]
else:
LOG.warning('Multiple DUID entries in %s '
'lease file, dnsmasq is possibly '
'not functioning properly',
filename)
continue
parts = line.strip().split()
if len(parts) != 5:
LOG.warning('Invalid lease entry %s found in %s '
'lease file, ignoring', parts, filename)
continue
(iaid, ip, client_id) = parts[1], parts[2], parts[4]
ip = ip.strip('[]')
leases[ip] = {'iaid': iaid,
'client_id': client_id,
'server_id': server_id
}
return leases
def _release_unused_leases(self):
filename = self.get_conf_file_name('host')
old_leases = self._read_hosts_file_leases(filename)
leases_filename = self.get_conf_file_name('leases')
cur_leases = self._read_leases_file_leases(leases_filename)
if not cur_leases:
return
v4_leases = set()
for (k, v) in cur_leases.items():
# IPv4 leases have a MAC, IPv6 ones do not, so we must ignore
if netaddr.IPAddress(k).version == constants.IP_VERSION_4:
# treat '*' as None, see note in _read_leases_file_leases()
client_id = v['client_id']
if client_id == '*':
client_id = None
v4_leases.add((k, v['iaid'], client_id))
new_leases = set()
for port in self.network.ports:
client_id = self._get_client_id(port)
for alloc in port.fixed_ips:
new_leases.add((alloc.ip_address, port.mac_address, client_id))
# If an entry is in the leases or host file(s), but doesn't have
# a fixed IP on a corresponding neutron port, consider it stale.
entries_to_release = (v4_leases | old_leases) - new_leases
if not entries_to_release:
return
# If the VM advertises a client ID in its lease, but its not set in
# the port's Extra DHCP Opts, the lease will not be filtered above.
# Release the lease only if client ID is set in port DB and a mismatch
# Otherwise the lease is released when other ports are deleted/updated
entries_with_no_client_id = set()
for ip, mac, client_id in entries_to_release:
if client_id:
entry_no_client_id = (ip, mac, None)
if (entry_no_client_id in old_leases and
entry_no_client_id in new_leases):
entries_with_no_client_id.add((ip, mac, client_id))
entries_to_release -= entries_with_no_client_id
# Try DHCP_RELEASE_TRIES times to release a lease, re-reading the
# file each time to see if it's still there. We loop +1 times to
# check the lease file one last time before logging any remaining
# entries.
for i in range(DHCP_RELEASE_TRIES + 1):
entries_not_present = set()
for ip, mac, client_id in entries_to_release:
try:
entry = cur_leases[ip]
except KeyError:
entries_not_present.add((ip, mac, client_id))
continue
# if not the final loop, try and release
if i < DHCP_RELEASE_TRIES:
ip_version = netaddr.IPAddress(ip).version
if ip_version == constants.IP_VERSION_6:
client_id = entry['client_id']
self._release_lease(mac, ip, ip_version, client_id,
entry['server_id'], entry['iaid'])
# Remove elements that were not in the current leases file,
# no need to look for them again, and see if we're done.
entries_to_release -= entries_not_present
if not entries_to_release:
break
if i < DHCP_RELEASE_TRIES:
time.sleep(DHCP_RELEASE_TRIES_SLEEP)
cur_leases = self._read_leases_file_leases(leases_filename)
if not cur_leases:
break
else:
LOG.warning("Could not release DHCP leases for these IP "
"addresses after %d tries: %s",
DHCP_RELEASE_TRIES,
', '.join(ip for ip, m, c in entries_to_release))
def _output_addn_hosts_file(self):
"""Writes a dnsmasq compatible additional hosts file.
The generated file is sent to the --addn-hosts option of dnsmasq,
and lists the hosts on the network which should be resolved even if
the dnsmasq instance did not give a lease to the host (see the
`_output_hosts_file` method).
Each line in this file is in the same form as a standard /etc/hosts
file.
"""
buf = io.StringIO()
for host_tuple in self._iter_hosts():
port, alloc, hostname, fqdn, no_dhcp, no_opts, tag = host_tuple
# It is compulsory to write the `fqdn` before the `hostname` in
# order to obtain it in PTR responses.
if alloc:
buf.write('%s\t%s %s\n' % (alloc.ip_address, fqdn, hostname))
addn_hosts = self.get_conf_file_name('addn_hosts')
file_utils.replace_file(addn_hosts, buf.getvalue())
return addn_hosts
def _output_opts_file(self):
"""Write a dnsmasq compatible options file."""
options, subnet_index_map = self._generate_opts_per_subnet()
options += self._generate_opts_per_port(subnet_index_map)
name = self.get_conf_file_name('opts')
file_utils.replace_file(name, '\n'.join(options))
return name
def _generate_opts_per_subnet(self):
options = []
subnets_without_nameservers = set()
if self.conf.enable_isolated_metadata or self.conf.force_metadata:
subnet_to_interface_ip = self._make_subnet_interface_ip_map()
isolated_subnets = self.get_isolated_subnets(self.network)
for subnet in self._get_all_subnets(self.network):
addr_mode = getattr(subnet, 'ipv6_address_mode', None)
segment_id = getattr(subnet, 'segment_id', None)
if (not subnet.enable_dhcp or
(subnet.ip_version == 6 and
addr_mode == constants.IPV6_SLAAC)):
continue
if subnet.dns_nameservers:
if ((subnet.ip_version == 4 and
subnet.dns_nameservers == ['0.0.0.0']) or
(subnet.ip_version == 6 and
subnet.dns_nameservers == ['::'])):
# Special case: Do not announce DNS servers
options.append(
self._format_option(
subnet.ip_version,
self._SUBNET_TAG_PREFIX % subnet.id,
'dns-server'))
else:
options.append(
self._format_option(
subnet.ip_version,
self._SUBNET_TAG_PREFIX % subnet.id,
'dns-server', ','.join(
Dnsmasq._convert_to_literal_addrs(
subnet.ip_version,
subnet.dns_nameservers))))
else:
# use the dnsmasq ip as nameservers only if there is no
# dns-server submitted by the server
# Here is something to check still
subnets_without_nameservers.add(subnet.id)
if self.conf.dns_domain and subnet.ip_version == 6:
# This should be change also
options.append(
self._format_option(
subnet.ip_version, self._SUBNET_TAG_PREFIX % subnet.id,
"domain-search", ''.join(self.conf.dns_domain)))
gateway = subnet.gateway_ip
host_routes = []
for hr in subnet.host_routes:
if hr.destination == constants.IPv4_ANY:
if not gateway:
gateway = hr.nexthop
else:
host_routes.append("%s,%s" % (hr.destination, hr.nexthop))
# Add host routes for isolated network segments
if ((self.conf.force_metadata or
(isolated_subnets[subnet.id] and
self.conf.enable_isolated_metadata)) and
subnet.ip_version == 4):
subnet_dhcp_ip = subnet_to_interface_ip.get(subnet.id)
if subnet_dhcp_ip:
host_routes.append(
'%s,%s' % (constants.METADATA_CIDR, subnet_dhcp_ip)
)
elif not isolated_subnets[subnet.id] and gateway:
host_routes.append(
'%s,%s' % (constants.METADATA_CIDR, gateway)
)
if subnet.ip_version == 4:
for s in self._get_all_subnets(self.network):
sub_segment_id = getattr(s, 'segment_id', None)
if (s.ip_version == 4 and
s.cidr != subnet.cidr and
sub_segment_id == segment_id):
host_routes.insert(0, "%s,0.0.0.0" % s.cidr)
if host_routes:
if gateway:
host_routes.append("%s,%s" % (constants.IPv4_ANY,
gateway))
options.append(
self._format_option(
subnet.ip_version,
self._SUBNET_TAG_PREFIX % subnet.id,
'classless-static-route',
','.join(host_routes)))
options.append(
self._format_option(
subnet.ip_version,
self._SUBNET_TAG_PREFIX % subnet.id,
WIN2k3_STATIC_DNS,
','.join(host_routes)))
if gateway:
options.append(self._format_option(
subnet.ip_version, self._SUBNET_TAG_PREFIX % subnet.id,
'router', gateway))
else:
options.append(self._format_option(
subnet.ip_version, self._SUBNET_TAG_PREFIX % subnet.id,
'router'))
return options, subnets_without_nameservers
def _generate_opts_per_port(self, subnets_without_nameservers):
options = []
dhcp_ips = collections.defaultdict(list)
for port in self.network.ports:
if self._get_port_extra_dhcp_opts(port):
port_ip_versions = set(
[netaddr.IPAddress(ip.ip_address).version
for ip in port.fixed_ips])
for opt in port.extra_dhcp_opts:
if opt.opt_name in (edo_ext.DHCP_OPT_CLIENT_ID,
DHCP_OPT_CLIENT_ID_NUM,
str(DHCP_OPT_CLIENT_ID_NUM)):
continue
opt_ip_version = opt.ip_version
if opt_ip_version in port_ip_versions:
options.append(
self._format_option(
opt_ip_version,
self._PORT_TAG_PREFIX % port.id,
opt.opt_name, opt.opt_value))
else:
LOG.info("Cannot apply dhcp option %(opt)s "
"because it's ip_version %(version)d "
"is not in port's address IP versions",
{'opt': opt.opt_name,
'version': opt_ip_version})
# provides all dnsmasq ip as dns-server if there is more than
# one dnsmasq for a subnet and there is no dns-server submitted
# by the server
if port.device_owner == constants.DEVICE_OWNER_DHCP:
for ip in port.fixed_ips:
if ip.subnet_id not in subnets_without_nameservers:
continue
dhcp_ips[ip.subnet_id].append(ip.ip_address)
for subnet_id, ips in dhcp_ips.items():
for ip_version in (4, 6):
vx_ips = [ip for ip in ips
if netaddr.IPAddress(ip).version == ip_version]
if len(vx_ips) > 1:
options.append(
self._format_option(
ip_version, self._SUBNET_TAG_PREFIX % subnet_id,
'dns-server',
','.join(
Dnsmasq._convert_to_literal_addrs(ip_version,
vx_ips))))
return options
def _make_subnet_interface_ip_map(self):
subnet_lookup = dict(
(netaddr.IPNetwork(subnet.cidr), subnet.id)
for subnet in self.network.subnets
)
retval = {}
for addr in ip_lib.get_devices_with_ip(self.network.namespace,
name=self.interface_name):
ip_net = netaddr.IPNetwork(addr['cidr'])
if ip_net in subnet_lookup:
retval[subnet_lookup[ip_net]] = addr['cidr'].split('/')[0]
return retval
def _format_option(self, ip_version, tag, option, *args):
"""Format DHCP option by option name or code."""
option = str(option)
pattern = "(tag:(.*),)?(.*)$"
matches = re.match(pattern, option)
extra_tag = matches.groups()[0]
option = matches.groups()[2]
# NOTE(TheJulia): prepending option6 to any DHCPv6 option is
# indicated as required in the dnsmasq man page for version 2.79.
# Testing reveals that the man page is correct, option is not
# honored if not in the format "option6:$NUM". For IPv4 we
# only apply if the option is non-numeric.
if ip_version == constants.IP_VERSION_6:
option = 'option6:%s' % option
elif not option.isdigit():
option = 'option:%s' % option
if extra_tag:
tags = ('tag:' + tag, extra_tag[:-1], '%s' % option)
else:
tags = ('tag:' + tag, '%s' % option)
return ','.join(tags + args)
@staticmethod
def _convert_to_literal_addrs(ip_version, ips):
if ip_version == 4:
return ips
return ['[' + ip + ']' for ip in ips]
@classmethod
def get_isolated_subnets(cls, network):
"""Returns a dict indicating whether or not a subnet is isolated
A subnet is considered non-isolated if there is a port connected to
the subnet, and the port's ip address matches that of the subnet's
gateway. The port must be owned by a neutron router.
"""
isolated_subnets = collections.defaultdict(lambda: True)
all_subnets = cls._get_all_subnets(network)
subnets = dict((subnet.id, subnet) for subnet in all_subnets)
for port in network.ports:
if port.device_owner not in constants.ROUTER_INTERFACE_OWNERS:
continue
for alloc in port.fixed_ips:
if (alloc.subnet_id in subnets and
subnets[alloc.subnet_id].gateway_ip ==
alloc.ip_address):
isolated_subnets[alloc.subnet_id] = False
return isolated_subnets
@staticmethod
def has_metadata_subnet(subnets):
"""Check if the subnets has a metadata subnet."""
meta_cidr = netaddr.IPNetwork(constants.METADATA_V4_SUBNET)
if any(netaddr.IPNetwork(s.cidr) in meta_cidr
for s in subnets):
return True
return False
@classmethod
def should_enable_metadata(cls, conf, network):
"""Determine whether the metadata proxy is needed for a network
This method returns True for truly isolated networks (ie: not attached
to a router) when enable_isolated_metadata is True, or for all the
networks when the force_metadata flags is True.
This method also returns True when enable_metadata_network is True,
and the network passed as a parameter has a subnet in the link-local
CIDR, thus characterizing it as a "metadata" network. The metadata
network is used by solutions which do not leverage the l3 agent for
providing access to the metadata service via logical routers built
with 3rd party backends.
"""
all_subnets = cls._get_all_subnets(network)
dhcp_subnets = [s for s in all_subnets if s.enable_dhcp]
if not dhcp_subnets:
return False
if conf.force_metadata:
return True
if not conf.enable_isolated_metadata:
return False
if (conf.enable_metadata_network and
cls.has_metadata_subnet(all_subnets)):
return True
isolated_subnets = cls.get_isolated_subnets(network)
return any(isolated_subnets[s.id] for s in dhcp_subnets)
class DeviceManager(object):
def __init__(self, conf, plugin):
self.conf = conf
self.plugin = plugin
self.driver = agent_common_utils.load_interface_driver(
conf,
get_networks_callback=self.plugin.get_networks)
def get_interface_name(self, network, port):
"""Return interface(device) name for use by the DHCP process."""
return self.driver.get_device_name(port)
def get_device_id(self, network):
"""Return a unique DHCP device ID for this host on the network."""
# There could be more than one dhcp server per network, so create
# a device id that combines host and network ids
return common_utils.get_dhcp_agent_device_id(network.id,
self.conf.host)
def _set_default_route_ip_version(self, network, device_name, ip_version):
device = ip_lib.IPDevice(device_name, namespace=network.namespace)
gateway = device.route.get_gateway(ip_version=ip_version)
if gateway:
gateway = gateway.get('gateway')
for subnet in network.subnets:
skip_subnet = (
subnet.ip_version != ip_version or
not subnet.enable_dhcp or
subnet.gateway_ip is None)
if skip_subnet:
continue
if subnet.ip_version == constants.IP_VERSION_6:
# This is duplicating some of the API checks already done,
# but some of the functional tests call directly
prefixlen = netaddr.IPNetwork(subnet.cidr).prefixlen
if prefixlen == 0 or prefixlen > 126:
continue
modes = [constants.IPV6_SLAAC, constants.DHCPV6_STATELESS]
addr_mode = getattr(subnet, 'ipv6_address_mode', None)
ra_mode = getattr(subnet, 'ipv6_ra_mode', None)
if (prefixlen != 64 and
(addr_mode in modes or ra_mode in modes)):
continue
if gateway != subnet.gateway_ip:
LOG.debug('Setting IPv%(version)s gateway for dhcp netns '
'on net %(n)s to %(ip)s',
{'n': network.id, 'ip': subnet.gateway_ip,
'version': ip_version})
# Check for and remove the on-link route for the old
# gateway being replaced, if it is outside the subnet
is_old_gateway_not_in_subnet = (gateway and
not ipam_utils.check_subnet_ip(
subnet.cidr, gateway))
if is_old_gateway_not_in_subnet:
onlink = device.route.list_onlink_routes(ip_version)
existing_onlink_routes = set(r['cidr'] for r in onlink)
if gateway in existing_onlink_routes:
device.route.delete_route(gateway, scope='link')
is_new_gateway_not_in_subnet = (subnet.gateway_ip and
not ipam_utils.check_subnet_ip(
subnet.cidr,
subnet.gateway_ip))
if is_new_gateway_not_in_subnet:
device.route.add_route(subnet.gateway_ip, scope='link')
device.route.add_gateway(subnet.gateway_ip)
return
# No subnets on the network have a valid gateway. Clean it up to avoid
# confusion from seeing an invalid gateway here.
if gateway is not None:
LOG.debug('Removing IPv%(version)s gateway for dhcp netns on '
'net %(n)s',
{'n': network.id, 'version': ip_version})
device.route.delete_gateway(gateway)
def _set_default_route(self, network, device_name):
"""Sets the default gateway for this dhcp namespace.
This method is idempotent and will only adjust the route if adjusting
it would change it from what it already is. This makes it safe to call
and avoids unnecessary perturbation of the system.
"""
for ip_version in (constants.IP_VERSION_4, constants.IP_VERSION_6):
self._set_default_route_ip_version(network, device_name,
ip_version)
def _setup_existing_dhcp_port(self, network, device_id, dhcp_subnets):
"""Set up the existing DHCP port, if there is one."""
# To avoid pylint thinking that port might be undefined after
# the following loop...
port = None
# Look for an existing DHCP port for this network.
for port in network.ports:
port_device_id = getattr(port, 'device_id', None)
if port_device_id == device_id:
# If using gateway IPs on this port, we can skip the
# following code, whose purpose is just to review and
# update the Neutron-allocated IP addresses for the
# port.
if self.driver.use_gateway_ips:
return port
# Otherwise break out, as we now have the DHCP port
# whose subnets and addresses we need to review.
break
else:
return None
# Compare what the subnets should be against what is already
# on the port.
dhcp_enabled_subnet_ids = set(dhcp_subnets)
port_subnet_ids = set(ip.subnet_id for ip in port.fixed_ips)
# If those differ, we need to call update.
if dhcp_enabled_subnet_ids != port_subnet_ids:
# Collect the subnets and fixed IPs that the port already
# has, for subnets that are still in the DHCP-enabled set.
wanted_fixed_ips = []
for fixed_ip in port.fixed_ips:
if fixed_ip.subnet_id in dhcp_enabled_subnet_ids:
wanted_fixed_ips.append(
{'subnet_id': fixed_ip.subnet_id,
'ip_address': fixed_ip.ip_address})
# Add subnet IDs for new DHCP-enabled subnets.
wanted_fixed_ips.extend(
dict(subnet_id=s)
for s in dhcp_enabled_subnet_ids - port_subnet_ids)
# Update the port to have the calculated subnets and fixed
# IPs. The Neutron server will allocate a fresh IP for
# each subnet that doesn't already have one.
port = self.plugin.update_dhcp_port(
port.id,
{'port': {'network_id': network.id,
'fixed_ips': wanted_fixed_ips}})
if not port:
raise exceptions.Conflict()
return port
def _setup_reserved_dhcp_port(self, network, device_id, dhcp_subnets):
"""Setup the reserved DHCP port, if there is one."""
LOG.debug('DHCP port %(device_id)s on network %(network_id)s'
' does not yet exist. Checking for a reserved port.',
{'device_id': device_id, 'network_id': network.id})
for port in network.ports:
port_device_id = getattr(port, 'device_id', None)
if port_device_id == constants.DEVICE_ID_RESERVED_DHCP_PORT:
port = self.plugin.update_dhcp_port(
port.id, {'port': {'network_id': network.id,
'device_id': device_id}})
if port:
return port
def _setup_new_dhcp_port(self, network, device_id, dhcp_subnets):
"""Create and set up new DHCP port for the specified network."""
LOG.debug('DHCP port %(device_id)s on network %(network_id)s'
' does not yet exist. Creating new one.',
{'device_id': device_id, 'network_id': network.id})
# Make a list of the subnets that need a unique IP address for
# this DHCP port.
if self.driver.use_gateway_ips:
unique_ip_subnets = []
else:
unique_ip_subnets = [dict(subnet_id=s) for s in dhcp_subnets]
port_dict = dict(
name='',
admin_state_up=True,
device_id=device_id,
network_id=network.id,
tenant_id=network.tenant_id,
fixed_ips=unique_ip_subnets)
return self.plugin.create_dhcp_port({'port': port_dict})
def _check_dhcp_port_subnet(self, dhcp_port, dhcp_subnets, network):
"""Check if DHCP port IPs are in the range of the DHCP subnets
FIXME(kevinbenton): ensure we have the IPs we actually need.
can be removed once bug/1627480 is fixed
"""
if self.driver.use_gateway_ips:
return
expected = set(dhcp_subnets)
actual = {fip.subnet_id for fip in dhcp_port.fixed_ips}
missing = expected - actual
if not missing:
return
LOG.debug('Requested DHCP port with IPs on subnets %(expected)s '
'but only got IPs on subnets %(actual)s.',
{'expected': expected, 'actual': actual})
updated_dhcp_port = self.plugin.get_dhcp_port(dhcp_port.id)
actual = {fip.subnet_id for fip in updated_dhcp_port.fixed_ips}
missing = expected - actual
if missing:
raise exceptions.SubnetMismatchForPort(
port_id=updated_dhcp_port.id, subnet_id=list(missing)[0])
self._update_dhcp_port(network, updated_dhcp_port)
LOG.debug('Previous DHCP port information: %(dhcp_port)s. Updated '
'DHCP port information: %(updated_dhcp_port)s.',
{'dhcp_port': dhcp_port,
'updated_dhcp_port': updated_dhcp_port})
def setup_dhcp_port(self, network):
"""Create/update DHCP port for the host if needed and return port."""
# The ID that the DHCP port will have (or already has).
device_id = self.get_device_id(network)
# Get the set of DHCP-enabled local subnets on this network.
dhcp_subnets = {subnet.id: subnet for subnet in network.subnets
if subnet.enable_dhcp}
# There are 3 cases: either the DHCP port already exists (but
# might need to be updated for a changed set of subnets); or
# some other code has already prepared a 'reserved' DHCP port,
# and we just need to adopt that; or we need to create a new
# DHCP port. Try each of those in turn until we have a DHCP
# port.
for setup_method in (self._setup_existing_dhcp_port,
self._setup_reserved_dhcp_port,
self._setup_new_dhcp_port):
dhcp_port = setup_method(network, device_id, dhcp_subnets)
if dhcp_port:
break
else:
raise exceptions.Conflict()
self._check_dhcp_port_subnet(dhcp_port, dhcp_subnets, network)
# Convert subnet_id to subnet dict
fixed_ips = [dict(subnet_id=fixed_ip.subnet_id,
ip_address=fixed_ip.ip_address,
subnet=dhcp_subnets[fixed_ip.subnet_id])
for fixed_ip in dhcp_port.fixed_ips
# we don't care about any ips on subnets irrelevant
# to us (e.g. auto ipv6 addresses)
if fixed_ip.subnet_id in dhcp_subnets]
ips = [DictModel(item) if isinstance(item, dict) else item
for item in fixed_ips]
dhcp_port.fixed_ips = ips
return dhcp_port
def _update_dhcp_port(self, network, port):
for index in range(len(network.ports)):
if network.ports[index].id == port.id:
network.ports[index] = port
break
else:
network.ports.append(port)
def _cleanup_stale_devices(self, network, dhcp_port):
"""Unplug any devices found in the namespace except for dhcp_port."""
LOG.debug("Cleaning stale devices for network %s", network.id)
skip_dev_name = (self.driver.get_device_name(dhcp_port)
if dhcp_port else None)
ns_ip = ip_lib.IPWrapper(namespace=network.namespace)
if not ns_ip.netns.exists(network.namespace):
return
for d in ns_ip.get_devices():
# delete all devices except current active DHCP port device
if d.name != skip_dev_name:
LOG.debug("Found stale device %s, deleting", d.name)
try:
self.unplug(d.name, network)
except Exception:
LOG.exception("Exception during stale "
"dhcp device cleanup")
def plug(self, network, port, interface_name):
"""Plug device settings for the network's DHCP on this host."""
self.driver.plug(network.id,
port.id,
interface_name,
port.mac_address,
namespace=network.namespace,
mtu=network.get('mtu'))
def setup(self, network):
"""Create and initialize a device for network's DHCP on this host."""
try:
port = self.setup_dhcp_port(network)
except Exception:
with excutils.save_and_reraise_exception():
# clear everything out so we don't leave dangling interfaces
# if setup never succeeds in the future.
self._cleanup_stale_devices(network, dhcp_port=None)
self._update_dhcp_port(network, port)
interface_name = self.get_interface_name(network, port)
# Disable acceptance of RAs in the namespace so we don't
# auto-configure an IPv6 address since we explicitly configure
# them on the device. This must be done before any interfaces
# are plugged since it could receive an RA by the time
# plug() returns, so we have to create the namespace first.
# It must also be done in the case there is an existing IPv6
# address here created via SLAAC, since it will be deleted
# and added back statically in the call to init_l3() below.
if network.namespace:
ip_lib.IPWrapper().ensure_namespace(network.namespace)
ip_lib.set_ip_nonlocal_bind_for_namespace(network.namespace, 1,
root_namespace=True)
if netutils.is_ipv6_enabled():
self.driver.configure_ipv6_ra(network.namespace, 'default',
constants.ACCEPT_RA_DISABLED)
if ip_lib.ensure_device_is_ready(interface_name,
namespace=network.namespace):
LOG.debug('Reusing existing device: %s.', interface_name)
# force mtu on the port for in case it was changed for the network
mtu = getattr(network, 'mtu', 0)
if mtu:
self.driver.set_mtu(interface_name, mtu,
namespace=network.namespace)
else:
try:
self.plug(network, port, interface_name)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception('Unable to plug DHCP port for '
'network %s. Releasing port.',
network.id)
# We should unplug the interface in bridge side.
self.unplug(interface_name, network)
self.plugin.release_dhcp_port(network.id, port.device_id)
self.fill_dhcp_udp_checksums(namespace=network.namespace)
ip_cidrs = []
for fixed_ip in port.fixed_ips:
subnet = fixed_ip.subnet
net = netaddr.IPNetwork(subnet.cidr)
ip_cidr = '%s/%s' % (fixed_ip.ip_address, net.prefixlen)
ip_cidrs.append(ip_cidr)
if self.driver.use_gateway_ips:
# For each DHCP-enabled subnet, add that subnet's gateway
# IP address to the Linux device for the DHCP port.
for subnet in network.subnets:
if not subnet.enable_dhcp:
continue
gateway = subnet.gateway_ip
if gateway:
net = netaddr.IPNetwork(subnet.cidr)
ip_cidrs.append('%s/%s' % (gateway, net.prefixlen))
if self.conf.force_metadata or self.conf.enable_isolated_metadata:
ip_cidrs.append(constants.METADATA_CIDR)
if netutils.is_ipv6_enabled():
ip_cidrs.append(constants.METADATA_V6_CIDR)
self.driver.init_l3(interface_name, ip_cidrs,
namespace=network.namespace)
self._set_default_route(network, interface_name)
self._cleanup_stale_devices(network, port)
return interface_name
def update(self, network, device_name):
"""Update device settings for the network's DHCP on this host."""
self._set_default_route(network, device_name)
def unplug(self, device_name, network):
"""Unplug device settings for the network's DHCP on this host."""
self.driver.unplug(device_name, namespace=network.namespace)
def destroy(self, network, device_name):
"""Destroy the device used for the network's DHCP on this host."""
if device_name:
self.unplug(device_name, network)
else:
LOG.debug('No interface exists for network %s', network.id)
self.plugin.release_dhcp_port(network.id,
self.get_device_id(network))
def fill_dhcp_udp_checksums(self, namespace):
"""Ensure DHCP reply packets always have correct UDP checksums."""
iptables_mgr = iptables_manager.IptablesManager(use_ipv6=True,
nat=False,
namespace=namespace)
ipv4_rule = ('-p udp -m udp --dport %d -j CHECKSUM --checksum-fill'
% constants.DHCP_CLIENT_PORT)
ipv6_rule = ('-p udp -m udp --dport %d -j CHECKSUM --checksum-fill'
% constants.DHCPV6_CLIENT_PORT)
iptables_mgr.ipv4['mangle'].add_rule('POSTROUTING', ipv4_rule)
iptables_mgr.ipv6['mangle'].add_rule('POSTROUTING', ipv6_rule)
iptables_mgr.apply()
| 42.705916 | 79 | 0.578939 |
44666844371ff8fad62e6bbdb102cad761f74a80 | 1,826 | py | Python | oo_trees/dataset.py | asross/decision_trees | 8cbdc7a90474abbeddcfccdaadea0e67d67aec86 | [
"Apache-2.0"
] | 1 | 2021-09-18T08:42:05.000Z | 2021-09-18T08:42:05.000Z | oo_trees/dataset.py | asross/decision_trees | 8cbdc7a90474abbeddcfccdaadea0e67d67aec86 | [
"Apache-2.0"
] | null | null | null | oo_trees/dataset.py | asross/decision_trees | 8cbdc7a90474abbeddcfccdaadea0e67d67aec86 | [
"Apache-2.0"
] | null | null | null | from collections import defaultdict
from collections import Counter
from .outcome_counter import *
from .attribute import *
from .single_attribute_splitter_finder import *
import random
import numpy
class Dataset():
def __init__(self, X, y, attributes=None):
self.X = X
self.y = y
self.outcome_counter = OutcomeCounter(y)
self.attributes = attributes or [CategoricalAttribute(i) for i in range(X.shape[1])]
assert self.X.shape[0] == len(y), "len(y) must match len(X)"
assert self.X.shape[1] == len(self.attributes), "len(attributes) must match len(X[i])"
def __len__(self):
return self.X.shape[0]
def best_single_attribute_splitter(self):
finder = SingleAttributeSplitterFinder(self, n=len(self.attributes))
return finder.best_splitter()
def splitter_entropy(self, splitter):
splits = defaultdict(OutcomeCounter)
for i in range(len(self)):
splits[splitter.split(self.X[i])].record(self.y[i])
return sum(y.total * y.entropy() for y in splits.values()) / float(len(self))
def split_on(self, splitter):
splits = defaultdict(list)
for i in range(len(self)):
splits[splitter.split(self.X[i])].append(i)
return { value: self.take(indices) for value, indices in splits.items() }
def take(self, indices):
return self.__class__(self.X.take(indices, 0), self.y.take(indices), self.attributes)
def random_split(self, fraction):
n_examples = int(len(self) * fraction)
indices = list(range(len(self)))
random.shuffle(indices)
return self.take(indices[:n_examples]), self.take(indices[n_examples:])
def bootstrap(self, n=None):
return self.take([random.randrange(len(self)) for _i in range(n or len(self))])
| 38.041667 | 94 | 0.661008 |
ebb3c6612d909e8f635165731f3b91dbe29120c3 | 183 | py | Python | Python101/ep3-Datatypes.py | hemaalathank/incredible-dev-videos | ff1cb7285427d3f851fee292ece3b8838728a7f8 | [
"MIT"
] | 2 | 2021-02-23T15:33:34.000Z | 2021-03-09T06:59:02.000Z | Python101/ep3-Datatypes.py | hemaalathank/incredible-dev-videos | ff1cb7285427d3f851fee292ece3b8838728a7f8 | [
"MIT"
] | null | null | null | Python101/ep3-Datatypes.py | hemaalathank/incredible-dev-videos | ff1cb7285427d3f851fee292ece3b8838728a7f8 | [
"MIT"
] | 2 | 2021-03-09T06:20:28.000Z | 2021-03-18T10:47:11.000Z | # Basic Data types in Python
print(type(1)) # <type 'int'>
print(type(1.432)) # <type 'float'>
print(type("abcd")) # <type 'str'>
print(type(True))
print(type(False)) # <type 'bool'> | 30.5 | 35 | 0.63388 |
9a16956e5a4621656f3fd22c110ffd78b04f82ca | 3,546 | py | Python | messente_api/models/error_code_phonebook.py | messente/messente-api-python | 154abca9e6a226a5c97d8052c3f2631765503426 | [
"Apache-2.0"
] | null | null | null | messente_api/models/error_code_phonebook.py | messente/messente-api-python | 154abca9e6a226a5c97d8052c3f2631765503426 | [
"Apache-2.0"
] | null | null | null | messente_api/models/error_code_phonebook.py | messente/messente-api-python | 154abca9e6a226a5c97d8052c3f2631765503426 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Messente API
[Messente](https://messente.com) is a global provider of messaging and user verification services. * Send and receive SMS, Viber, WhatsApp and Telegram messages. * Manage contacts and groups. * Fetch detailed info about phone numbers. * Blacklist phone numbers to make sure you're not sending any unwanted messages. Messente builds [tools](https://messente.com/documentation) to help organizations connect their services to people anywhere in the world. # noqa: E501
The version of the OpenAPI document: 1.4.0
Contact: messente@messente.com
Generated by: https://openapi-generator.tech
"""
import inspect
import pprint
import re # noqa: F401
import six
from messente_api.configuration import Configuration
class ErrorCodePhonebook(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
_201 = "201"
_202 = "202"
_203 = "203"
_204 = "204"
_244 = "244"
_205 = "205"
allowable_values = [_201, _202, _203, _204, _244, _205] # noqa: E501
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self, local_vars_configuration=None): # noqa: E501
"""ErrorCodePhonebook - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self.discriminator = None
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = inspect.getargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ErrorCodePhonebook):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ErrorCodePhonebook):
return True
return self.to_dict() != other.to_dict()
| 31.105263 | 473 | 0.589961 |
0d1f1b447103793456d278e29a85b2da74d38e0d | 2,388 | py | Python | backend/server/backend/channels/utils.py | FlickerSoul/Graphery | 8b1390e1ba96fd2867f0cd8e5fc1d4ad6108121e | [
"MIT"
] | 5 | 2020-08-26T00:15:01.000Z | 2021-01-11T17:24:51.000Z | backend/server/backend/channels/utils.py | FlickerSoul/Graphery | 8b1390e1ba96fd2867f0cd8e5fc1d4ad6108121e | [
"MIT"
] | 69 | 2020-08-02T23:45:44.000Z | 2021-04-17T03:04:32.000Z | backend/server/backend/channels/utils.py | FlickerSoul/Graphery | 8b1390e1ba96fd2867f0cd8e5fc1d4ad6108121e | [
"MIT"
] | 4 | 2020-09-10T05:40:49.000Z | 2020-12-20T11:44:16.000Z | import json
from urllib import request
from os import getenv
from queue import Queue
from typing import Mapping
from channels.consumer import SyncConsumer
from bundle.server_utils.utils import create_error_response
from bundle.server_utils.params import VERSION
_REMOTE_URL = getenv('GRAPHERY_REMOTE_EXECUTE_URL', 'http://localhost')
def post_request(url: str, data: Mapping[str, str]) -> Mapping:
encoded_data = json.dumps(data).encode('UTF-8')
req = request.Request(url, data=encoded_data,
headers={'content-type': 'application/json'})
return json.loads(request.urlopen(req).read().decode('UTF-8'))
class ProcessHandler:
def __init__(self):
self.processing_queue = Queue()
def enqueue(self, consumer: SyncConsumer) -> None:
self.processing_queue.put(consumer)
self.coordinate()
def dequeue(self) -> SyncConsumer:
return self.processing_queue.get()
@staticmethod
def get_code(consumer: SyncConsumer) -> str:
return consumer.get_code()
@staticmethod
def get_graph_json_obj(consumer: SyncConsumer) -> Mapping:
return consumer.get_graph_json_obj()
@staticmethod
def should_execute(consumer: SyncConsumer) -> bool:
return not consumer.is_closed
@staticmethod
def execute(code: str, graph_json_obj: Mapping) -> Mapping:
if code and graph_json_obj:
response = post_request(f'{_REMOTE_URL}:7590/run',
data={'code': code,
'graph': graph_json_obj,
'version': VERSION})
return response
return create_error_response('Cannot Read Code Or Graph Object')
@staticmethod
def executed(consumer: SyncConsumer, result_mapping: Mapping) -> None:
consumer.executed(result_mapping)
def start_executing(self) -> None:
first_consumer = self.dequeue()
if self.should_execute(consumer=first_consumer):
code = self.get_code(first_consumer)
graph_json_obj = self.get_graph_json_obj(first_consumer)
result_mapping: Mapping = self.execute(code, graph_json_obj)
self.executed(first_consumer, result_mapping)
def coordinate(self) -> None:
self.start_executing()
process_handler = ProcessHandler()
| 31.84 | 78 | 0.660385 |
5aff5c7d7ca94959076309b328f5a91d369ad414 | 9,123 | py | Python | app.py | shameem16/Online-Employee-Payroll-System | 32bb2375338dbcdcc22864b729b46cabb7422893 | [
"MIT"
] | null | null | null | app.py | shameem16/Online-Employee-Payroll-System | 32bb2375338dbcdcc22864b729b46cabb7422893 | [
"MIT"
] | null | null | null | app.py | shameem16/Online-Employee-Payroll-System | 32bb2375338dbcdcc22864b729b46cabb7422893 | [
"MIT"
] | null | null | null | from flask import Flask, flash, redirect, render_template, request, session, abort
import os
import json
import openpyxl
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
app = Flask(__name__, static_url_path='/static')
db=json.load(open("database/db.json"))
@app.route('/')
def hrlogin():
session['logged_in'] = False
return render_template('HRlogin.html')
@app.route('/hrlogin', methods=["POST"])
def hlogin():
username = request.form['Username']
paswrd = request.form['Password']
if (username in db['hrlogin']):
if db['hrlogin'][username]['password']!=paswrd:
return render_template('HRlogin.html', msg="Wrong password")
else:
session['logged_in'] = True
return render_template('hrhhome.html')
else:
return render_template('HRlogin.html', msg="Wrong Username")
@app.route('/add_emp',methods=["POST"])
def addemp():
id=request.form['empid']
name = request.form['name']
age = request.form['age']
designation=request.form['designation']
mn=request.form['mn']
email = request.form['email']
salary = request.form['salary']
if id in db['employees_list']:
return render_template('hrhhome.html',msg="Employee already exists")
else:
db['employees_list'].append(id)
db['employees'][id]={
"id":id,
"name":name,
"age":age,
"designation":designation,
"mobile":mn,
"email":email,
"salary":salary
}
json.dump(db, open("database/db.json", "w"))
return render_template('hrhhome.html', msg="Employee added")
@app.route('/add_atn',methods=['POST'])
def add_attendance():
id=request.form['empid']
year = request.form['year']
month = request.form['month']
wd=request.form['present']
if id not in db['employees_list']:
return render_template('hrhhome.html', msg="Employee doesn't exists add employee first")
else:
wb=openpyxl.load_workbook('excel/attendance_employee.xlsx')
ws=wb.active
max_colum=ws.max_column
flag=0
max_row=ws.max_row
for i in range(1,max_colum+1):
cell=ws.cell(row=1,column=i)
if cell.value==month:
flag=1
break
if flag!=1:
cell=ws.cell(row=1,column=i+1)
cell.value=month
flag=0
i=i+1
for j in range(1,max_row+1):
cell=ws.cell(row=j,column=1)
if cell.value==id:
flag=1
break
if flag!=1:
cell=ws.cell(row=j+1,column=1)
cell.value=id
j+=1
flag=0
cell=ws.cell(row=j,column=i)
cell.value=wd
wb.save("excel/attendance_employee.xlsx")
return render_template('hrhhome.html', msg="Attendence added")
@app.route('/cal_sal',methods=['POST'])
def emp_verify():
id = request.form['empid']
month = request.form['month']
if id not in db['employees_list']:
return render_template('hrhhome.html', msg="Employee doesn't exists add employee first")
else:
if id in db['payslipgenerated'] and month in db['payslipgenerated'][id]['month']:
return render_template('hrhhome.html', msg="Salary already generated")
else:
wb=openpyxl.load_workbook('excel/attendance_employee.xlsx')
ws=wb.active
max_colum=ws.max_column
flag=0
max_row=ws.max_row
for i in range(1,max_colum+1):
cell=ws.cell(row=1,column=i)
if cell.value==month:
flag=1
break
if flag!=1:
return render_template('hrhhome.html', msg="Employee attendance is not entered add attendance for employee")
for j in range(1,max_row+1):
cell=ws.cell(row=j,column=1)
if cell.value==id:
flag=1
break
if flag!=1:
return render_template('hrhhome.html', msg="Employee attendance for the month given is not entered")
cell=ws.cell(row=j,column=i)
pd=cell.value
if pd == None:
return render_template('hrhhome.html', msg="Employee attendance for the month given is not entered")
else:
payperday=db['employees'][id]['salary']
basicpay=int(pd)*int(payperday)
print(basicpay)
return render_template('cal_sal.html', basicpay=basicpay,id=id,month=month)
@app.route('/cal_sal_emp',methods=['POST'])
def cal_Sal():
id=request.form['empid']
month = request.form['month']
basicpay = request.form['basicpay']
da=request.form['da']
hra=request.form['hra']
ta = request.form['ta']
ca = request.form['ca']
it = request.form['it']
pt = request.form['pt']
emi = request.form['emi']
total_earnings=int(basicpay)+int(da)+int(hra)+int(ta)+int(ca)
total_deductions=int(it)+int(pt)+int(emi)
total_salary=total_earnings-total_deductions
print(total_salary)
f=open('payslip/payslip.txt',"w")
f.write("ID:"+id+ os.linesep)
f.write("Month:"+month+ os.linesep)
f.write("Basicpay:"+ basicpay+ os.linesep)
f.write("DA:"+ da+ os.linesep)
f.write("HRA:"+hra+ os.linesep)
f.write("CCA:"+ ca+ os.linesep)
f.write("Transport allowance:"+ta+ os.linesep)
f.write("Income tax:"+ it+ os.linesep)
f.write("Proffesional tax:"+ pt+ os.linesep)
f.write("EMI:"+ emi+ os.linesep)
f.write("Total Earnings:"+str(total_earnings)+ os.linesep)
f.write("Total deductuions:"+str(total_deductions)+ os.linesep)
f.write("Total salary:"+str(total_salary)+ os.linesep)
f.close()
if id not in db['payslipgenerated']:
db['payslipgenerated'][id]={
"month":[]
}
db['payslipgenerated'][id]['month'].append(month)
json.dump(db, open("database/db.json", "w"))
f=open("payslip/payslip.txt","r")
message=f.read()
f.close()
sender = "cb.en.u4cse17541@cb.students.amrita.edu"
password="Asdfgf;lkjhj"
receivers = db['employees'][id]['email']
s = smtplib.SMTP(host='smtp-mail.outlook.com', port=587)
s.starttls()
s.login(sender, password)
msg = MIMEMultipart()
print(message)
msg['From'] = sender
msg['To'] = receivers
msg['Subject'] = "Payslip for "+month
msg.attach(MIMEText(message, 'plain'))
s.send_message(msg)
s.quit()
return render_template('hrhhome.html', msg="Pay slip sent to the employee")
@app.route('/mod_emp',methods=['POST'])
def mod_emp():
id = request.form['empid']
name = db['employees'][id]['name']
age = db['employees'][id]['age']
designation = db['employees'][id]['designation']
mn = db['employees'][id]['mobile']
email = db['employees'][id]['email']
salary = db['employees'][id]['salary']
if request.form['btn1'] == 'Modify':
return render_template('edit_employee.html',id=id,name=name,age=age,designation=designation,mobile=mn,email=email,salary=salary)
elif request.form['btn1'] == 'Delete':
if id in db['employees_list']:
db['employees_list'].remove(id)
del db['employees'][id]
json.dump(db, open("database/db.json", "w"))
return render_template('hrhhome.html', msg="Employee "+id +" is deleted")
else:
return render_template('hrhhome.html', msg="There is no such employee to delete")
@app.route('/edt_emp',methods=['POST'])
def edt_emp():
id = request.form['empid']
name = request.form['name']
age = request.form['age']
designation = request.form['designation']
mn = request.form['mn']
email = request.form['email']
salary = request.form['salary']
db['employees'][id] = {
"id": id,
"name": name,
"age": age,
"designation": designation,
"mobile": mn,
"email": email,
"salary": salary
}
json.dump(db, open("database/db.json", "w"))
return render_template('hrhhome.html', msg="Employee " + id +" details updated")
@app.route('/back',methods=['GET', 'POST'])
def back():
return render_template('hrhhome.html')
@app.route('/logout',methods=['GET', 'POST'])
def logout():
session['logged-in']=False
return render_template('HRlogin.html',msg="Successfully logged out")
@app.route('/hrhome',methods=['POST'])
def hr_home():
if request.form['btn1']=='Add an Employee':
return render_template('add_employee.html')
elif request.form['btn1']=='Add Attendance':
return render_template('add_attendance.html')
elif request.form['btn1']=='Calculate Salary':
return render_template('calculate_salary.html')
elif request.form['btn1']=='Manage Employee Details':
return render_template('manage_employee.html')
if __name__ == "__main__":
app.secret_key = os.urandom(12)
print("hello")
app.run(debug=True, port=5000) | 35.636719 | 136 | 0.600132 |
74eb84466320120efc4a2544d5ceadf4608e3e5e | 3,496 | py | Python | google/ads/google_ads/v1/proto/services/campaign_budget_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2019-11-30T23:42:39.000Z | 2019-11-30T23:42:39.000Z | google/ads/google_ads/v1/proto/services/campaign_budget_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v1/proto/services/campaign_budget_service_pb2_grpc.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2020-03-13T00:14:31.000Z | 2020-03-13T00:14:31.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v1.proto.resources import campaign_budget_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_campaign__budget__pb2
from google.ads.google_ads.v1.proto.services import campaign_budget_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2
class CampaignBudgetServiceStub(object):
"""Proto file describing the Campaign Budget service.
Service to manage campaign budgets.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetCampaignBudget = channel.unary_unary(
'/google.ads.googleads.v1.services.CampaignBudgetService/GetCampaignBudget',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.GetCampaignBudgetRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_campaign__budget__pb2.CampaignBudget.FromString,
)
self.MutateCampaignBudgets = channel.unary_unary(
'/google.ads.googleads.v1.services.CampaignBudgetService/MutateCampaignBudgets',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.MutateCampaignBudgetsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.MutateCampaignBudgetsResponse.FromString,
)
class CampaignBudgetServiceServicer(object):
"""Proto file describing the Campaign Budget service.
Service to manage campaign budgets.
"""
def GetCampaignBudget(self, request, context):
"""Returns the requested Campaign Budget in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateCampaignBudgets(self, request, context):
"""Creates, updates, or removes campaign budgets. Operation statuses are
returned.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CampaignBudgetServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetCampaignBudget': grpc.unary_unary_rpc_method_handler(
servicer.GetCampaignBudget,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.GetCampaignBudgetRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_campaign__budget__pb2.CampaignBudget.SerializeToString,
),
'MutateCampaignBudgets': grpc.unary_unary_rpc_method_handler(
servicer.MutateCampaignBudgets,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.MutateCampaignBudgetsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_campaign__budget__service__pb2.MutateCampaignBudgetsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v1.services.CampaignBudgetService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 49.942857 | 173 | 0.821224 |
cd971b815fe824e14e979b1bece0cd8a41cf0d18 | 2,632 | py | Python | examples/intermediate/build.py | rhattersley/curly-octo-journey | ba802f2545f37968b35b5f790a6ad91999caa450 | [
"MIT"
] | null | null | null | examples/intermediate/build.py | rhattersley/curly-octo-journey | ba802f2545f37968b35b5f790a6ad91999caa450 | [
"MIT"
] | null | null | null | examples/intermediate/build.py | rhattersley/curly-octo-journey | ba802f2545f37968b35b5f790a6ad91999caa450 | [
"MIT"
] | null | null | null | import os
import jinja2
import yaml
class _StackOutput:
def __init__(self, stack, output):
self.stack = stack
self.output = output
def __repr__(self):
return '!Ref {}.{}'.format(self.stack, self.output)
@staticmethod
def constructor(loader, node):
value = loader.construct_scalar(node)
stack, output = value.split('.')
return _StackOutput(stack, output)
yaml.add_constructor('!StackOutput', _StackOutput.constructor)
class _Unary:
def __init__(self, tag, arg):
self.tag = tag
self.arg = arg
@staticmethod
def constructor(loader, node):
value = loader.construct_scalar(node)
return _Unary(node.tag, value)
@staticmethod
def representer(dumper, data):
return dumper.represent_scalar(data.tag, data.arg)
for name in ('!ImportValue', '!Ref'):
yaml.add_constructor(name, _Unary.constructor)
yaml.add_representer(_Unary, _Unary.representer)
def _fiddle(stack_name, t):
w = yaml.load(t)
for name, detail in w.get('Outputs', {}).items():
if 'Export' not in detail:
detail['Export'] = {'Name': '{}-{}'.format(stack_name, name)}
return yaml.dump(w, default_flow_style=False)
def go():
dependencies = {}
for path in os.listdir('config'):
stack_name, _ = path.split('.')
#print('Loading', path)
with open('config/' + path) as f:
config = yaml.load(f)
import_keys = []
for key, value in config.items():
if isinstance(value, _StackOutput):
#print(stack_name, 'depends on', value.stack)
d = dependencies.setdefault(stack_name, [])
d.append(value.stack)
import_keys.append(key)
#print('Import keys:', import_keys)
for key in import_keys:
ref = config[key]
config[key] = '!ImportValue {}-{}'.format(ref.stack, ref.output)
#print('New config:', config)
p = 'templates/' + config['template'] + '.yaml'
#print(p)
with open(p) as tf:
stuff = tf.read()
#print('Raw:', stuff)
jinja_template = jinja2.Template(stuff)
template = jinja_template.render(config)
template = _fiddle(stack_name, template)
#print(template)
with open('output/' + stack_name + '.yaml', 'w') as out:
out.write(template)
out.write('\n')
#break
#print(dependencies)
if __name__ == '__main__':
go()
| 29.244444 | 80 | 0.565729 |
350e0c8b8328c6070d453302b793c1ecd8fe083f | 13,406 | py | Python | main_al.py | osimeoni/RethinkingDeepActiveLearning | 8a0056f47605276b1c9d8338d423433dae73f5e9 | [
"MIT"
] | 4 | 2021-03-01T17:43:48.000Z | 2022-03-24T11:28:48.000Z | main_al.py | osimeoni/RethinkingDeepActiveLearning | 8a0056f47605276b1c9d8338d423433dae73f5e9 | [
"MIT"
] | null | null | null | main_al.py | osimeoni/RethinkingDeepActiveLearning | 8a0056f47605276b1c9d8338d423433dae73f5e9 | [
"MIT"
] | null | null | null | # Authors: O. Simeoni, M. Budnik, Y. Avrithis, G. Gravier, 2019
import os
import time
import random
import pickle
import numpy as np
import torch
import pdb
from tqdm import tqdm
from time import gmtime, strftime
from lib import checkpoints, parse_args, training, label_propagation, pretraining
from lib import models, datasets, cli, data
import lib.selection_methods as selection_methods
torch.manual_seed(7)
torch.cuda.manual_seed(7)
np.random.seed(7)
random.seed(7)
torch.backends.cudnn.deterministic=True
torch.backends.cudnn.benchmark = False
args = None
def apply_random_seed(seed, log_file):
if seed != 7:
print_log('SEED: changing seed to %d'%seed, log_file)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
def print_log(msg, log):
print(msg)
with open(log, 'a') as log:
log.write('%s\n' % msg)
def write_log_dist_class(dataset, labeled_idxs, log):
targets = [targ for i, targ in enumerate(dataset.targets) if i in labeled_idxs]
unique, counts = np.unique(targets, return_counts=True)
dic_target = dict(zip(unique, counts))
log.write('targets %s \n'% str(dic_target))
def create_folder(folder):
if not os.path.exists(folder):
os.makedirs(folder)
return folder
def load_unsupervised_pretrained_model(args):
root_dir = './models/pretrained/'
if 'mnist' in args.dataset:
pretraining_path = 'pretrained_mnist_cifar_cnn_lr0.02_batch_128_final.pickle'
elif 'svhn' in args.dataset:
pretraining_path = 'pretrained_svhn_cifar_cnn_lr0.02_batch_128_final.pickle'
elif 'cifar' in args.dataset:
pretraining_path = 'pretrained_cifar_cifar_cnn_lr0.02_batch_128_final.pickle'
else:
raise ValueError('No pretraining for this dataset')
pretraining_path = os.path.join(root_dir, pretraining_path)
with open(pretraining_path, 'rb') as f:
pretrain_w = pickle.load(f)
return pretrain_w
def main():
# -------------------------------------------------------------------------------
# Directories
dir_path = 'exps/'
method_str = args.al_method
if args.add_lp:
method_str += '_withLP'
if args.add_ceal:
method_str += '_withCEAL'
dir_path = create_folder(os.path.join(dir_path, args.dataset,
'budget%d'% args.al_budget,
args.arch, method_str,
parse_args.from_args_to_string(args), 'split%d' % args.split))
ckpt_dir = create_folder(os.path.join(dir_path, 'ckpt'))
label_dir = create_folder(os.path.join(dir_path, 'used_labels'))
log_dir = create_folder(os.path.join(dir_path, 'logs'))
weight_dir = create_folder(os.path.join(log_dir, 'weights'))
# -------------------------------------------------------------------------------
# Check dataset exists
data_root = 'data/'
labels_file = os.path.join(data_root,
'labels/%s/%d_balanced_labels/0%d.txt' % (args.dataset,
args.al_budget,
args.split))
args.labels = labels_file
if not os.path.exists(labels_file):
raise ValueError('Non existing label file %s' % labels_file)
# Dataset
dataset_config = datasets.__dict__[args.dataset]()
num_classes = dataset_config.pop('num_classes')
train_loader, train_loader_noshuff, eval_loader,\
labeled_idxs, unlabeled_idxs, dataset, \
test_loader, batch_sampler = training.create_data_loaders(args=args, \
**dataset_config)
if args.add_ceal:
ceal_pseudo_labels_idxs = list()
# Logging
results_file = os.path.join(log_dir, 'results.csv')
test_results_file = os.path.join(log_dir, 'test_results.csv')
log_file = os.path.join(log_dir, '%s_%s.txt' % (strftime("%Y-%m-%d_%H-%M-%S", gmtime()),
args.exp_name))
print_log('Log will be saved to %s' % log_file, log_file)
print_log('Results will be saved to %s' % results_file, log_file)
print_log('\nArgs: ', log_file)
print_log(str(args) + '\n', log_file)
with open(log_file, 'a') as log:
write_log_dist_class(dataset, labeled_idxs, log)
# Select random seed
apply_random_seed(args.seed, log_file)
# AL method
al_method = parse_args.get_method(args)
# ------------------------------------------------------------------------
# Pretrained model in an unsupervised fashion
if args.add_unsupervised_pretraining:
# Pretrain a model from scratch
if args.learn_unsupervised_pretraining:
pretrain_w = pretraining.pretrain(args, dataset, num_classes, train_loader_noshuff)
# Select one of the pretrained models used in the paper.
else:
pretrain_w = load_unsupervised_pretrained_model(args)
# ------------------------------------------------------------------------
# RESUMING
r_cycle = -1
if args.resume:
r_cycle, r_epoch = checkpoints.get_checkpoint_cycle_epoch(ckpt_dir)
r_labels = checkpoints.load_labels(r_cycle, label_dir)
continue_training = True
if r_cycle == -1 or r_epoch == -1:
continue_training = False
else:
print_log('----------------------------------', log_file)
print_log('RESUMING: from cycle %d and epoch %d'%(r_cycle, r_epoch), log_file)
# In case training of a cycle was finished, check if labels were generated properly
if r_epoch == (args.epochs - 1):
print_log('Going to next cycle', log_file)
r_labels_next_cycle = checkpoints.load_labels(r_cycle+1, label_dir)
# If resuming labels exist go to next cycle
if r_labels_next_cycle:
r_cycle += 1
r_labels = r_labels_next_cycle
continue_training = False
print_log('RESUMING: Going to next cycle %d'%(r_cycle), log_file)
if r_labels:
print_log('RESUMING: Updating the dataset and generating new train_loader', log_file)
labeled_idxs = r_labels
unlabeled_idxs = data.update_dataset_resuming(dataset, labeled_idxs)
if args.add_lp:
print_log('Create dummy pseudo_label_idx', log_file)
dataset.pseudo_label_idx = dataset.unlabeled_idxs
train_loader = training.get_train_loader(args, dataset)
if args.add_lp and args.lp_mode != 'full' and \
(r_epoch > 0 and continue_training):
raise ValueError('Not implemented.')
elif r_cycle > 0:
raise ValueError('Should have new set of labels')
# ------------------------------------------------------------------------
# CYCLE
for cycle in range(max(r_cycle, 0), args.al_nb_cycles):
print_log('AL cycle %d' % cycle, log_file)
model = models.create_model(args, num_classes)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov=args.nesterov)
# Pretraining
if args.add_unsupervised_pretraining:
print_log('Use model pretrained in an unsupervised fashion', log_file)
model_dict = model.state_dict()
model_dict.update(pretrain_w)
model.load_state_dict(model_dict)
# Resuming from previous checkpoint if exists
start_epoch = 0
if args.resume:
if continue_training:
start_epoch = r_epoch
if r_epoch > 0 and r_cycle == cycle and continue_training:
r_cycle, r_epoch, model, optimizer = checkpoints.load_checkpoint(model, optimizer,
ckpt_dir, cycle)
print_log('RESUMING: weights model from cycle %d and epoch %d'% (r_cycle,
r_epoch),
log_file)
epoch = r_epoch
start_epoch = r_epoch + 1
if args.finetuning and cycle != 0:
model = checkpoints.load_checkpoint_finetuning(model, ckpt_dir, cycle-1)
print('Using pretrained model')
# Apply first label propagation
if args.add_lp:
print_log('Label propagation: Starting diffusion', log_file)
# Extract features
feats, labels, preds = models.extract_features(train_loader_noshuff, model)
# Apply label propagation
lp = label_propagation.LP()
sel_acc, sel_n = lp.update_lp(feats, preds, dataset, thresh=args.tau,
args=args, w_mode=args.is_cW)
if args.weighted_unlabeled_batch:
batch_sampler.update_weights(np.array(dataset.p_weights)[unlabeled_idxs])
elif args.lp_mode != 'full':
print_log('Updating %d pseudo labels in the batch' % len(dataset.pseudo_label_idx), log_file)
batch_sampler.update_pseudo_indices(dataset.pseudo_label_idx, same_length=False)
# ------------------------------------------------------------------------
# CYCLE TRAINING
for epoch in tqdm(range(start_epoch, args.epochs)):
# Train the model
models.train(train_loader, model, optimizer, epoch, args)
# Apply label propagation if needed
if args.add_lp and epoch > args.start_epoch and epoch % args.lp_step == 0:
print_log('Label propagation applied epoch {}'.format(epoch), log_file)
# Extract features
feats, labels, preds = models.extract_features(train_loader_noshuff, model)
# Apply label propagation
sel_acc, sel_n = lp.update_lp(feats, preds, dataset, thresh=args.tau,
args=args, w_mode=args.is_cW)
if args.weighted_unlabeled_batch:
batch_sampler.update_weights(np.array(dataset.p_weights)[unlabeled_idxs])
elif args.lp_mode != 'full':
print_log('Updating %d pseudo labels in the batch'%len(dataset.pseudo_label_idx), log_file)
batch_sampler.update_pseudo_indices(dataset.pseudo_label_idx)
# Save models
if epoch % args.checkpoint_epochs == 0 or epoch == args.epochs-1:
checkpoints.save_checkpoint(model, optimizer, ckpt_dir, cycle, epoch)
#---------------------------
#------- Evaluation -------
#---------------------------
with open(log_file, 'a') as log:
write_log_dist_class(dataset, labeled_idxs, log)
# TODO Change validation
if args.use_val_set:
with open(results_file, 'a') as log_results:
models.validate(eval_loader, model, epoch, log, cycle, log_results, 'Val set')
# Model Evaluation
with open(test_results_file, 'a') as log_test_results:
models.validate(test_loader, model, epoch, log, cycle, log_test_results, 'Test set')
# Removing labels used for ceal before the selection
if args.add_ceal:
data.remove_semi_labels(dataset, ceal_pseudo_labels_idxs)
# Perform the selection using the selected AL method
selection_method = al_method(model)
selected = selection_method.select(train_loader_noshuff, dataset,
args.al_budget, args=args)
# Check
if set(selected).intersection(set(labeled_idxs)) or len(set(selected)) != args.al_budget:
raise ValueError("Selection is not correct")
# Update the dataset with the newly selected images
labeled_idxs, unlabeled_idxs = data.update_dataset(dataset, selected, labeled_idxs)
# Save the selected images - used for next cycle
print_log('%d selected images saved'%(len(selected)), log_file)
checkpoints.write_labels(labeled_idxs, cycle+1, label_dir)
continue_training = False
# Apply CEAL - used for next cycle
if args.add_ceal:
print_log('Applying CEAL', log_file)
ceal_method = selection_methods.CEAL(model)
ceal_pseudo_labels_idxs, ceal_pseudo_labels = ceal_method.select(train_loader_noshuff, dataset,
args.ceal_th, 0.00033, cycle)
# Update the dataset
data.update_dataset_semi(dataset, ceal_pseudo_labels_idxs, ceal_pseudo_labels)
if __name__ == '__main__':
args = cli.parse_commandline_args()
args.test_batch_size = args.batch_size
main()
| 41.76324 | 111 | 0.575638 |
24dc9a02fef34dc7ea018096854223f61d41a4f8 | 198 | py | Python | virtual/lib/python3.6/site-packages/pylint/test/data/suppliermodule_test.py | drewheathens/The-Moringa-Tribune | 98ee4d63c9df6f1f7497fc6876960a822d914500 | [
"MIT"
] | 463 | 2015-01-15T08:17:42.000Z | 2022-03-28T15:10:20.000Z | virtual/lib/python3.6/site-packages/pylint/test/data/suppliermodule_test.py | drewheathens/The-Moringa-Tribune | 98ee4d63c9df6f1f7497fc6876960a822d914500 | [
"MIT"
] | 52 | 2015-01-06T02:43:59.000Z | 2022-03-14T11:15:21.000Z | virtual/lib/python3.6/site-packages/pylint/test/data/suppliermodule_test.py | drewheathens/The-Moringa-Tribune | 98ee4d63c9df6f1f7497fc6876960a822d914500 | [
"MIT"
] | 249 | 2015-01-07T22:49:49.000Z | 2022-03-18T02:32:06.000Z | """ file suppliermodule.py """
class Interface:
def get_value(self):
raise NotImplementedError
def set_value(self, value):
raise NotImplementedError
class DoNothing: pass
| 18 | 33 | 0.691919 |
965443b2d0dcec9cef3ec596434e9f92aea243b0 | 18,916 | py | Python | pandas/core/indexes/period.py | ChuliangXiao/pandas | 29094b6aaeae74794d1745c2f73ea04195acaa16 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/core/indexes/period.py | ChuliangXiao/pandas | 29094b6aaeae74794d1745c2f73ea04195acaa16 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/core/indexes/period.py | ChuliangXiao/pandas | 29094b6aaeae74794d1745c2f73ea04195acaa16 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
from datetime import (
datetime,
timedelta,
)
from typing import Hashable
import warnings
import numpy as np
from pandas._libs import (
index as libindex,
lib,
)
from pandas._libs.tslibs import (
BaseOffset,
NaT,
Period,
Resolution,
Tick,
)
from pandas._typing import (
Dtype,
DtypeObj,
)
from pandas.util._decorators import doc
from pandas.core.dtypes.common import (
is_datetime64_any_dtype,
is_integer,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import PeriodDtype
from pandas.core.dtypes.missing import is_valid_na_for_dtype
from pandas.core.arrays.period import (
PeriodArray,
period_array,
raise_on_incompatible,
validate_dtype_freq,
)
import pandas.core.common as com
import pandas.core.indexes.base as ibase
from pandas.core.indexes.base import maybe_extract_name
from pandas.core.indexes.datetimelike import DatetimeIndexOpsMixin
from pandas.core.indexes.datetimes import (
DatetimeIndex,
Index,
)
from pandas.core.indexes.extension import inherit_names
from pandas.core.indexes.numeric import Int64Index
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update({"target_klass": "PeriodIndex or list of Periods"})
_shared_doc_kwargs = {
"klass": "PeriodArray",
}
# --- Period index sketch
def _new_PeriodIndex(cls, **d):
# GH13277 for unpickling
values = d.pop("data")
if values.dtype == "int64":
freq = d.pop("freq", None)
values = PeriodArray(values, freq=freq)
return cls._simple_new(values, **d)
else:
return cls(values, **d)
@inherit_names(
["strftime", "start_time", "end_time"] + PeriodArray._field_ops,
PeriodArray,
wrap=True,
)
@inherit_names(["is_leap_year", "_format_native_types"], PeriodArray)
class PeriodIndex(DatetimeIndexOpsMixin):
"""
Immutable ndarray holding ordinal values indicating regular periods in time.
Index keys are boxed to Period objects which carries the metadata (eg,
frequency information).
Parameters
----------
data : array-like (1d int np.ndarray or PeriodArray), optional
Optional period-like data to construct index with.
copy : bool
Make a copy of input ndarray.
freq : str or period object, optional
One of pandas period strings or corresponding objects.
year : int, array, or Series, default None
month : int, array, or Series, default None
quarter : int, array, or Series, default None
day : int, array, or Series, default None
hour : int, array, or Series, default None
minute : int, array, or Series, default None
second : int, array, or Series, default None
dtype : str or PeriodDtype, default None
Attributes
----------
day
dayofweek
day_of_week
dayofyear
day_of_year
days_in_month
daysinmonth
end_time
freq
freqstr
hour
is_leap_year
minute
month
quarter
qyear
second
start_time
week
weekday
weekofyear
year
Methods
-------
asfreq
strftime
to_timestamp
See Also
--------
Index : The base pandas Index type.
Period : Represents a period of time.
DatetimeIndex : Index with datetime64 data.
TimedeltaIndex : Index of timedelta64 data.
period_range : Create a fixed-frequency PeriodIndex.
Examples
--------
>>> idx = pd.PeriodIndex(year=[2000, 2002], quarter=[1, 3])
>>> idx
PeriodIndex(['2000Q1', '2002Q3'], dtype='period[Q-DEC]')
"""
_typ = "periodindex"
_attributes = ["name"]
_data: PeriodArray
freq: BaseOffset
_data_cls = PeriodArray
_engine_type = libindex.PeriodEngine
_supports_partial_string_indexing = True
# --------------------------------------------------------------------
# methods that dispatch to array and wrap result in Index
# These are defined here instead of via inherit_names for mypy
@doc(
PeriodArray.asfreq,
other="pandas.arrays.PeriodArray",
other_name="PeriodArray",
**_shared_doc_kwargs,
)
def asfreq(self, freq=None, how: str = "E") -> PeriodIndex:
arr = self._data.asfreq(freq, how)
return type(self)._simple_new(arr, name=self.name)
@doc(PeriodArray.to_timestamp)
def to_timestamp(self, freq=None, how="start") -> DatetimeIndex:
arr = self._data.to_timestamp(freq, how)
return DatetimeIndex._simple_new(arr, name=self.name)
# https://github.com/python/mypy/issues/1362
# error: Decorated property not supported
@property # type:ignore[misc]
@doc(PeriodArray.hour.fget)
def hour(self) -> Int64Index:
return Int64Index(self._data.hour, name=self.name)
# https://github.com/python/mypy/issues/1362
# error: Decorated property not supported
@property # type:ignore[misc]
@doc(PeriodArray.minute.fget)
def minute(self) -> Int64Index:
return Int64Index(self._data.minute, name=self.name)
# https://github.com/python/mypy/issues/1362
# error: Decorated property not supported
@property # type:ignore[misc]
@doc(PeriodArray.second.fget)
def second(self) -> Int64Index:
return Int64Index(self._data.second, name=self.name)
# ------------------------------------------------------------------------
# Index Constructors
def __new__(
cls,
data=None,
ordinal=None,
freq=None,
dtype: Dtype | None = None,
copy: bool = False,
name: Hashable = None,
**fields,
) -> PeriodIndex:
valid_field_set = {
"year",
"month",
"day",
"quarter",
"hour",
"minute",
"second",
}
if not set(fields).issubset(valid_field_set):
argument = list(set(fields) - valid_field_set)[0]
raise TypeError(f"__new__() got an unexpected keyword argument {argument}")
name = maybe_extract_name(name, data, cls)
if data is None and ordinal is None:
# range-based.
data, freq2 = PeriodArray._generate_range(None, None, None, freq, fields)
# PeriodArray._generate range does validation that fields is
# empty when really using the range-based constructor.
freq = freq2
data = PeriodArray(data, freq=freq)
else:
freq = validate_dtype_freq(dtype, freq)
# PeriodIndex allow PeriodIndex(period_index, freq=different)
# Let's not encourage that kind of behavior in PeriodArray.
if freq and isinstance(data, cls) and data.freq != freq:
# TODO: We can do some of these with no-copy / coercion?
# e.g. D -> 2D seems to be OK
data = data.asfreq(freq)
if data is None and ordinal is not None:
# we strangely ignore `ordinal` if data is passed.
ordinal = np.asarray(ordinal, dtype=np.int64)
data = PeriodArray(ordinal, freq=freq)
else:
# don't pass copy here, since we copy later.
data = period_array(data=data, freq=freq)
if copy:
data = data.copy()
return cls._simple_new(data, name=name)
# ------------------------------------------------------------------------
# Data
@property
def values(self) -> np.ndarray:
return np.asarray(self, dtype=object)
def _maybe_convert_timedelta(self, other):
"""
Convert timedelta-like input to an integer multiple of self.freq
Parameters
----------
other : timedelta, np.timedelta64, DateOffset, int, np.ndarray
Returns
-------
converted : int, np.ndarray[int64]
Raises
------
IncompatibleFrequency : if the input cannot be written as a multiple
of self.freq. Note IncompatibleFrequency subclasses ValueError.
"""
if isinstance(other, (timedelta, np.timedelta64, Tick, np.ndarray)):
if isinstance(self.freq, Tick):
# _check_timedeltalike_freq_compat will raise if incompatible
delta = self._data._check_timedeltalike_freq_compat(other)
return delta
elif isinstance(other, BaseOffset):
if other.base == self.freq.base:
return other.n
raise raise_on_incompatible(self, other)
elif is_integer(other):
# integer is passed to .shift via
# _add_datetimelike_methods basically
# but ufunc may pass integer to _add_delta
return other
# raise when input doesn't have freq
raise raise_on_incompatible(self, None)
def _is_comparable_dtype(self, dtype: DtypeObj) -> bool:
"""
Can we compare values of the given dtype to our own?
"""
if not isinstance(dtype, PeriodDtype):
return False
return dtype.freq == self.freq
# ------------------------------------------------------------------------
# Index Methods
def asof_locs(self, where: Index, mask: np.ndarray) -> np.ndarray:
"""
where : array of timestamps
mask : np.ndarray[bool]
Array of booleans where data is not NA.
"""
if isinstance(where, DatetimeIndex):
where = PeriodIndex(where._values, freq=self.freq)
elif not isinstance(where, PeriodIndex):
raise TypeError("asof_locs `where` must be DatetimeIndex or PeriodIndex")
return super().asof_locs(where, mask)
@doc(Index.astype)
def astype(self, dtype, copy: bool = True, how=lib.no_default):
dtype = pandas_dtype(dtype)
if how is not lib.no_default:
# GH#37982
warnings.warn(
"The 'how' keyword in PeriodIndex.astype is deprecated and "
"will be removed in a future version. "
"Use index.to_timestamp(how=how) instead",
FutureWarning,
stacklevel=2,
)
else:
how = "start"
if is_datetime64_any_dtype(dtype):
# 'how' is index-specific, isn't part of the EA interface.
tz = getattr(dtype, "tz", None)
return self.to_timestamp(how=how).tz_localize(tz)
return super().astype(dtype, copy=copy)
@property
def is_full(self) -> bool:
"""
Returns True if this PeriodIndex is range-like in that all Periods
between start and end are present, in order.
"""
if len(self) == 0:
return True
if not self.is_monotonic_increasing:
raise ValueError("Index is not monotonic")
values = self.asi8
return ((values[1:] - values[:-1]) < 2).all()
@property
def inferred_type(self) -> str:
# b/c data is represented as ints make sure we can't have ambiguous
# indexing
return "period"
# ------------------------------------------------------------------------
# Indexing Methods
def _convert_tolerance(self, tolerance, target):
# Returned tolerance must be in dtype/units so that
# `|self._get_engine_target() - target._engine_target()| <= tolerance`
# is meaningful. Since PeriodIndex returns int64 for engine_target,
# we may need to convert timedelta64 tolerance to int64.
tolerance = super()._convert_tolerance(tolerance, target)
if self.dtype == target.dtype:
# convert tolerance to i8
tolerance = self._maybe_convert_timedelta(tolerance)
return tolerance
def get_loc(self, key, method=None, tolerance=None):
"""
Get integer location for requested label.
Parameters
----------
key : Period, NaT, str, or datetime
String or datetime key must be parsable as Period.
Returns
-------
loc : int or ndarray[int64]
Raises
------
KeyError
Key is not present in the index.
TypeError
If key is listlike or otherwise not hashable.
"""
orig_key = key
self._check_indexing_error(key)
if is_valid_na_for_dtype(key, self.dtype):
key = NaT
elif isinstance(key, str):
try:
parsed, reso = self._parse_with_reso(key)
except ValueError as err:
# A string with invalid format
raise KeyError(f"Cannot interpret '{key}' as period") from err
if self._can_partial_date_slice(reso):
try:
return self._partial_date_slice(reso, parsed)
except KeyError as err:
# TODO: pass if method is not None, like DTI does?
raise KeyError(key) from err
if reso == self.dtype.resolution:
# the reso < self.dtype.resolution case goes through _get_string_slice
key = Period(parsed, freq=self.freq)
loc = self.get_loc(key, method=method, tolerance=tolerance)
# Recursing instead of falling through matters for the exception
# message in test_get_loc3 (though not clear if that really matters)
return loc
elif method is None:
raise KeyError(key)
else:
key = Period(parsed, freq=self.freq)
elif isinstance(key, Period):
sfreq = self.freq
kfreq = key.freq
if not (
sfreq.n == kfreq.n
and sfreq._period_dtype_code == kfreq._period_dtype_code
):
# GH#42247 For the subset of DateOffsets that can be Period freqs,
# checking these two attributes is sufficient to check equality,
# and much more performant than `self.freq == key.freq`
raise KeyError(key)
elif isinstance(key, datetime):
try:
key = Period(key, freq=self.freq)
except ValueError as err:
# we cannot construct the Period
raise KeyError(orig_key) from err
else:
# in particular integer, which Period constructor would cast to string
raise KeyError(key)
try:
return Index.get_loc(self, key, method, tolerance)
except KeyError as err:
raise KeyError(orig_key) from err
def _maybe_cast_slice_bound(self, label, side: str, kind=lib.no_default):
"""
If label is a string or a datetime, cast it to Period.ordinal according
to resolution.
Parameters
----------
label : object
side : {'left', 'right'}
kind : {'loc', 'getitem'}, or None
Returns
-------
bound : Period or object
Notes
-----
Value of `side` parameter should be validated in caller.
"""
assert kind in ["loc", "getitem", None, lib.no_default]
self._deprecated_arg(kind, "kind", "_maybe_cast_slice_bound")
if isinstance(label, datetime):
return Period(label, freq=self.freq)
elif isinstance(label, str):
try:
parsed, reso = self._parse_with_reso(label)
except ValueError as err:
# string cannot be parsed as datetime-like
raise self._invalid_indexer("slice", label) from err
lower, upper = self._parsed_string_to_bounds(reso, parsed)
return lower if side == "left" else upper
elif not isinstance(label, self._data._recognized_scalars):
raise self._invalid_indexer("slice", label)
return label
def _parsed_string_to_bounds(self, reso: Resolution, parsed: datetime):
grp = reso.freq_group
iv = Period(parsed, freq=grp.value)
return (iv.asfreq(self.freq, how="start"), iv.asfreq(self.freq, how="end"))
def _can_partial_date_slice(self, reso: Resolution) -> bool:
assert isinstance(reso, Resolution), (type(reso), reso)
grp = reso.freq_group
freqn = self.dtype.freq_group_code
if not grp.value < freqn:
# TODO: we used to also check for
# reso in ["day", "hour", "minute", "second"]
# why is that check not needed?
return False
return True
def period_range(
start=None, end=None, periods: int | None = None, freq=None, name=None
) -> PeriodIndex:
"""
Return a fixed frequency PeriodIndex.
The day (calendar) is the default frequency.
Parameters
----------
start : str or period-like, default None
Left bound for generating periods.
end : str or period-like, default None
Right bound for generating periods.
periods : int, default None
Number of periods to generate.
freq : str or DateOffset, optional
Frequency alias. By default the freq is taken from `start` or `end`
if those are Period objects. Otherwise, the default is ``"D"`` for
daily frequency.
name : str, default None
Name of the resulting PeriodIndex.
Returns
-------
PeriodIndex
Notes
-----
Of the three parameters: ``start``, ``end``, and ``periods``, exactly two
must be specified.
To learn more about the frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
Examples
--------
>>> pd.period_range(start='2017-01-01', end='2018-01-01', freq='M')
PeriodIndex(['2017-01', '2017-02', '2017-03', '2017-04', '2017-05', '2017-06',
'2017-07', '2017-08', '2017-09', '2017-10', '2017-11', '2017-12',
'2018-01'],
dtype='period[M]')
If ``start`` or ``end`` are ``Period`` objects, they will be used as anchor
endpoints for a ``PeriodIndex`` with frequency matching that of the
``period_range`` constructor.
>>> pd.period_range(start=pd.Period('2017Q1', freq='Q'),
... end=pd.Period('2017Q2', freq='Q'), freq='M')
PeriodIndex(['2017-03', '2017-04', '2017-05', '2017-06'],
dtype='period[M]')
"""
if com.count_not_none(start, end, periods) != 2:
raise ValueError(
"Of the three parameters: start, end, and periods, "
"exactly two must be specified"
)
if freq is None and (not isinstance(start, Period) and not isinstance(end, Period)):
freq = "D"
data, freq = PeriodArray._generate_range(start, end, periods, freq, fields={})
data = PeriodArray(data, freq=freq)
return PeriodIndex(data, name=name)
| 32.170068 | 96 | 0.592779 |
a8a5269abad3ae1adb7dbcf4ca223178370310e9 | 9,823 | py | Python | src/ashpy/models/convolutional/decoders.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 89 | 2019-07-05T11:57:24.000Z | 2021-11-22T04:25:11.000Z | src/ashpy/models/convolutional/decoders.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 58 | 2019-07-09T09:59:19.000Z | 2021-05-21T15:29:53.000Z | src/ashpy/models/convolutional/decoders.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 11 | 2019-07-08T08:39:06.000Z | 2021-08-23T12:15:16.000Z | # Copyright 2019 Zuru Tech HK Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Collection of Decoders (i.e., GANs' Generators) models."""
import tensorflow as tf
from ashpy.models.convolutional.interfaces import Conv2DInterface
from tensorflow import keras # pylint: disable=no-name-in-module
__ALL__ = ["Decoder", "FCNNDecoder"]
class Decoder(Conv2DInterface):
"""
Primitive Model for all decoder (i.e., transpose convolution) based architecture.
Notes:
Default to DCGAN Generator architecture.
Examples:
* Direct Usage:
.. testcode::
dummy_generator = Decoder(
layer_spec_input_res=(8, 8),
layer_spec_target_res=(64, 64),
kernel_size=(5, 5),
initial_filters=1024,
filters_cap=16,
channels=3,
)
* Subclassing
.. testcode::
class DummyGenerator(Decoder):
def call(self, input, training=True):
print("Dummy Generator!")
return input
dummy_generator = DummyGenerator(
layer_spec_input_res=(8, 8),
layer_spec_target_res=(32, 32),
kernel_size=(5, 5),
initial_filters=1024,
filters_cap=16,
channels=3,
)
dummy_generator(tf.random.normal((1, 100)))
.. testoutput::
Dummy Generator!
"""
def __init__(
self,
layer_spec_input_res,
layer_spec_target_res,
kernel_size,
initial_filters,
filters_cap,
channels,
use_dropout=True,
dropout_prob=0.3,
non_linearity=keras.layers.LeakyReLU,
):
r"""
Instantiate the :class:`Decoder`.
Model Assembly:
1. :func:`_add_initial_block`: Ingest the :py:obj:`tf.keras.Model`
inputs and prepare them for :func:`_add_building_block`.
2. :func:`_add_building_block`: Core of the model, the layers specified
here get added to the :py:obj:`tf.keras.Model` multiple times consuming the
hyperparameters generated in the :func:`_get_layer_spec`.
3. :func:`_add_final_block`: Final block of our :py:obj:`tf.keras.Model`,
take the model after :func:`_add_building_block` and prepare them for the
for the final output.
Args:
layer_spec_input_res (:obj:`tuple` of (:obj:`int`, :obj:`int`)): Shape of
the :func:`_get_layer_spec` input tensors.
layer_spec_target_res: (:obj:`tuple` of (:obj:`int`, :obj:`int`)): Shape
of tensor desired as output of :func:`_get_layer_spec`.
kernel_size (:obj:`tuple` of (:obj:`int`, :obj:`int`)): Kernel used by the
convolution layers.
initial_filters (int): Numbers of filters at the end of the first block.
filters_cap (int): Cap filters to a set amount, in the case of Decoder is a
floor value AKA the minimum amount of filters.
channels (int): Channels of the output images (1 for Grayscale, 3 for RGB).
Returns:
:py:obj:`None`
Raises:
ValueError: If `filters_cap` > `initial_filters`.
"""
super().__init__()
if filters_cap > initial_filters:
raise ValueError(
"`filters_cap` > `initial_filters`. "
"When decoding ``filters_cap`` is a floor value AKA the minimum "
"amount of filters."
)
if isinstance(layer_spec_input_res, int):
layer_spec_input_res = (layer_spec_input_res, layer_spec_input_res)
if isinstance(layer_spec_target_res, int):
layer_spec_target_res = (layer_spec_target_res, layer_spec_target_res)
filters = self._get_layer_spec(
initial_filters, filters_cap, layer_spec_input_res, layer_spec_target_res
)
# layer specification
self.use_dropout = use_dropout
self.dropout_prob = dropout_prob
self.non_linearity = non_linearity
self.kernel_size = kernel_size
# Assembling Model
self._add_initial_block(initial_filters, layer_spec_input_res)
for layer_filters in filters:
self._add_building_block(layer_filters)
self._add_final_block(channels)
def _add_initial_block(self, initial_filters, input_res):
"""
Ingest the :py:obj:`tf.keras.Model` inputs and prepare them for :func:`_add_building_block`.
Args:
initial_filters (int): Numbers of filters to used as a base value.
input_res (:obj:`tuple` of (:obj:`int`, :obj:`int`)): Shape of the
:func:`_get_layer_spec` input tensors.
"""
self.model_layers.extend(
[
keras.layers.Dense(initial_filters * input_res[0] * input_res[1]),
keras.layers.BatchNormalization(),
self.non_linearity(),
keras.layers.Reshape((input_res[0], input_res[1], initial_filters)),
]
)
def _add_building_block(self, filters):
"""
Construct the core of the :py:obj:`tf.keras.Model`.
The layers specified here get added to the :py:obj:`tf.keras.Model` multiple times
consuming the hyperparameters generated in the :func:`_get_layer_spec`.
Args:
filters (int): Number of filters to use for this iteration of the Building Block.
"""
self.model_layers.extend(
[
keras.layers.Conv2DTranspose(
filters,
self.kernel_size,
strides=(2, 2),
padding="same",
use_bias=False,
),
keras.layers.BatchNormalization(),
self.non_linearity(),
]
)
def _add_final_block(self, channels):
"""
Prepare results of :func:`_add_building_block` for the for the final output.
Args:
channels (int): Channels of the output images (1 for Grayscale, 3 for RGB).
"""
self.model_layers.append(
keras.layers.Conv2DTranspose(
channels,
self.kernel_size,
strides=(1, 1),
padding="same",
use_bias=False,
activation=tf.math.tanh,
)
)
class FCNNDecoder(Decoder):
"""Fully Convolutional Decoder. Expected input is a feature map.
Examples:
* Direct Usage:
.. testcode::
dummy_generator = FCNNDecoder(
layer_spec_input_res=(8, 8),
layer_spec_target_res=(64, 64),
kernel_size=(5, 5),
initial_filters=1024,
filters_cap=16,
channels=3,
)
print(dummy_generator(tf.zeros((1, 1, 1, 100))).shape)
.. testoutput::
(1, 64, 64, 3)
"""
def __init__(
self,
layer_spec_input_res,
layer_spec_target_res,
kernel_size,
initial_filters,
filters_cap,
channels,
use_dropout=True,
dropout_prob=0.3,
non_linearity=keras.layers.LeakyReLU,
):
"""Build a Fully Convolutional Decoder."""
self._kernel_size = kernel_size
super().__init__(
layer_spec_input_res,
layer_spec_target_res,
kernel_size,
initial_filters,
filters_cap,
channels,
use_dropout=use_dropout,
dropout_prob=dropout_prob,
non_linearity=non_linearity,
)
def _add_initial_block(self, initial_filters, input_res):
"""
Ingest the :py:obj:`tf.keras.Model` inputs and prepare them for :func:`_add_building_block`.
Args:
initial_filters (int): Numbers of filters to used as a base value.
input_res (:obj:`tuple` of (:obj:`int`, :obj:`int`)): Shape of the
:func:`_get_layer_spec` input tensors.
"""
# Always suppose to have a 1x1xD input vector.
# GOAL: upsample in order to make it input_res[0], input_res[1], initial_filters
# Since conv2dtrasponse output is: input size * stride if padding == same
# and (input size -1) * stride + Kernel size if padding == valid
# Since input resolution is 1, computing the stride value is
# not possible (division by zero (input_size-1))
# hence we have to use padding = same.
stride = max(*input_res)
self.model_layers.extend(
[
keras.layers.Conv2DTranspose(
initial_filters,
self._kernel_size,
strides=(stride, stride),
padding="same",
use_bias=False,
),
keras.layers.LeakyReLU(),
]
)
| 33.989619 | 100 | 0.56612 |
b42b9db56ae8cdb927ef96a0f57e6e2d406488e2 | 335,215 | py | Python | run_unittests.py | smxia/mason_minigui | d79c20065e85fcb625fc01e00fe107c311c10703 | [
"Apache-2.0"
] | null | null | null | run_unittests.py | smxia/mason_minigui | d79c20065e85fcb625fc01e00fe107c311c10703 | [
"Apache-2.0"
] | null | null | null | run_unittests.py | smxia/mason_minigui | d79c20065e85fcb625fc01e00fe107c311c10703 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2016-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import stat
import subprocess
import re
import json
import tempfile
import textwrap
import os
import shutil
import sys
import unittest
import platform
import pickle
import functools
import io
import operator
import threading
import urllib.error
import urllib.request
import zipfile
import hashlib
from itertools import chain
from unittest import mock
from configparser import ConfigParser
from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
from distutils.dir_util import copy_tree
import mesonbuild.mlog
import mesonbuild.depfile
import mesonbuild.compilers
import mesonbuild.envconfig
import mesonbuild.environment
import mesonbuild.mesonlib
import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.interpreter import Interpreter, ObjectHolder
from mesonbuild.ast import AstInterpreter
from mesonbuild.mesonlib import (
BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows,
is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos,
windows_proof_rmtree, python_command, version_compare, split_args,
quote_arg
)
from mesonbuild.environment import detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
import mesonbuild.dependencies.base
from mesonbuild.build import Target
import mesonbuild.modules.pkgconfig
from mesonbuild.mtest import TAPParser, TestResult
from run_tests import (
Backend, FakeBuild, FakeCompilerOptions,
ensure_backend_detects_changes, exe_suffix, get_backend_commands,
get_builddir_target_args, get_fake_env, get_fake_options, get_meson_script,
run_configure_inprocess, run_mtest_inprocess
)
URLOPEN_TIMEOUT = 5
def get_dynamic_section_entry(fname, entry):
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF platforms')
try:
raw_out = subprocess.check_output(['readelf', '-d', fname],
universal_newlines=True)
except FileNotFoundError:
# FIXME: Try using depfixer.py:Elf() as a fallback
raise unittest.SkipTest('readelf not found')
pattern = re.compile(entry + r': \[(.*?)\]')
for line in raw_out.split('\n'):
m = pattern.search(line)
if m is not None:
return m.group(1)
return None # The file did not contain the specified entry.
def get_soname(fname):
return get_dynamic_section_entry(fname, 'soname')
def get_rpath(fname):
return get_dynamic_section_entry(fname, r'(?:rpath|runpath)')
def is_tarball():
if not os.path.isdir('docs'):
return True
return False
def is_ci():
if 'CI' in os.environ:
return True
return False
def is_pull():
# Travis
if os.environ.get('TRAVIS_PULL_REQUEST', 'false') != 'false':
return True
# Azure
if 'SYSTEM_PULLREQUEST_ISFORK' in os.environ:
return True
return False
def _git_init(project_dir):
subprocess.check_call(['git', 'init'], cwd=project_dir, stdout=subprocess.DEVNULL)
subprocess.check_call(['git', 'config',
'user.name', 'Author Person'], cwd=project_dir)
subprocess.check_call(['git', 'config',
'user.email', 'teh_coderz@example.com'], cwd=project_dir)
subprocess.check_call('git add *', cwd=project_dir, shell=True,
stdout=subprocess.DEVNULL)
subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
stdout=subprocess.DEVNULL)
@functools.lru_cache()
def is_real_gnu_compiler(path):
'''
Check if the gcc we have is a real gcc and not a macOS wrapper around clang
'''
if not path:
return False
out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT)
return 'Free Software Foundation' in out
def skipIfNoExecutable(exename):
'''
Skip this test if the given executable is not found.
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
if shutil.which(exename) is None:
raise unittest.SkipTest(exename + ' not found')
return func(*args, **kwargs)
return wrapped
return wrapper
def skipIfNoPkgconfig(f):
'''
Skip this test if no pkg-config is found, unless we're on CI.
This allows users to run our test suite without having
pkg-config installed on, f.ex., macOS, while ensuring that our CI does not
silently skip the test because of misconfiguration.
Note: Yes, we provide pkg-config even while running Windows CI
'''
@functools.wraps(f)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
return f(*args, **kwargs)
return wrapped
def skipIfNoPkgconfigDep(depname):
'''
Skip this test if the given pkg-config dep is not found, unless we're on CI.
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('pkg-config') is None:
raise unittest.SkipTest('pkg-config not found')
if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0:
raise unittest.SkipTest('pkg-config dependency {} not found.'.format(depname))
return func(*args, **kwargs)
return wrapped
return wrapper
def skip_if_no_cmake(f):
'''
Skip this test if no cmake is found, unless we're on CI.
This allows users to run our test suite without having
cmake installed on, f.ex., macOS, while ensuring that our CI does not
silently skip the test because of misconfiguration.
'''
@functools.wraps(f)
def wrapped(*args, **kwargs):
if not is_ci() and shutil.which('cmake') is None:
raise unittest.SkipTest('cmake not found')
return f(*args, **kwargs)
return wrapped
def skip_if_not_language(lang):
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
env = get_fake_env()
f = getattr(env, 'detect_{}_compiler'.format(lang))
f(MachineChoice.HOST)
except EnvironmentException:
raise unittest.SkipTest('No {} compiler found.'.format(lang))
return func(*args, **kwargs)
return wrapped
return wrapper
def skip_if_env_set(key):
'''
Skip a test if a particular env is set, except when running under CI
'''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
old = None
if key in os.environ:
if not is_ci():
raise unittest.SkipTest('Env var {!r} set, skipping'.format(key))
old = os.environ.pop(key)
try:
return func(*args, **kwargs)
finally:
if old is not None:
os.environ[key] = old
return wrapped
return wrapper
def skip_if_not_base_option(feature):
"""Skip tests if The compiler does not support a given base option.
for example, ICC doesn't currently support b_sanitize.
"""
def actual(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if feature not in cc.base_options:
raise unittest.SkipTest(
'{} not available with {}'.format(feature, cc.id))
return f(*args, **kwargs)
return wrapped
return actual
@contextmanager
def temp_filename():
'''A context manager which provides a filename to an empty temporary file.
On exit the file will be deleted.
'''
fd, filename = tempfile.mkstemp()
os.close(fd)
try:
yield filename
finally:
try:
os.remove(filename)
except OSError:
pass
@contextmanager
def no_pkgconfig():
'''
A context manager that overrides shutil.which and ExternalProgram to force
them to return None for pkg-config to simulate it not existing.
'''
old_which = shutil.which
old_search = ExternalProgram._search
def new_search(self, name, search_dir):
if name == 'pkg-config':
return [None]
return old_search(self, name, search_dir)
def new_which(cmd, *kwargs):
if cmd == 'pkg-config':
return None
return old_which(cmd, *kwargs)
shutil.which = new_which
ExternalProgram._search = new_search
try:
yield
finally:
shutil.which = old_which
ExternalProgram._search = old_search
class InternalTests(unittest.TestCase):
def test_version_number(self):
searchfunc = mesonbuild.environment.search_version
self.assertEqual(searchfunc('foobar 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
def test_mode_symbolic_to_bits(self):
modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
self.assertEqual(modefunc('---------'), 0)
self.assertEqual(modefunc('r--------'), stat.S_IRUSR)
self.assertEqual(modefunc('---r-----'), stat.S_IRGRP)
self.assertEqual(modefunc('------r--'), stat.S_IROTH)
self.assertEqual(modefunc('-w-------'), stat.S_IWUSR)
self.assertEqual(modefunc('----w----'), stat.S_IWGRP)
self.assertEqual(modefunc('-------w-'), stat.S_IWOTH)
self.assertEqual(modefunc('--x------'), stat.S_IXUSR)
self.assertEqual(modefunc('-----x---'), stat.S_IXGRP)
self.assertEqual(modefunc('--------x'), stat.S_IXOTH)
self.assertEqual(modefunc('--S------'), stat.S_ISUID)
self.assertEqual(modefunc('-----S---'), stat.S_ISGID)
self.assertEqual(modefunc('--------T'), stat.S_ISVTX)
self.assertEqual(modefunc('--s------'), stat.S_ISUID | stat.S_IXUSR)
self.assertEqual(modefunc('-----s---'), stat.S_ISGID | stat.S_IXGRP)
self.assertEqual(modefunc('--------t'), stat.S_ISVTX | stat.S_IXOTH)
self.assertEqual(modefunc('rwx------'), stat.S_IRWXU)
self.assertEqual(modefunc('---rwx---'), stat.S_IRWXG)
self.assertEqual(modefunc('------rwx'), stat.S_IRWXO)
# We could keep listing combinations exhaustively but that seems
# tedious and pointless. Just test a few more.
self.assertEqual(modefunc('rwxr-xr-x'),
stat.S_IRWXU |
stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
self.assertEqual(modefunc('rw-r--r--'),
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP |
stat.S_IROTH)
self.assertEqual(modefunc('rwsr-x---'),
stat.S_IRWXU | stat.S_ISUID |
stat.S_IRGRP | stat.S_IXGRP)
def test_compiler_args_class(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
# Test that empty initialization works
a = cargsfunc(cc)
self.assertEqual(a, [])
# Test that list initialization works
a = cargsfunc(cc, ['-I.', '-I..'])
self.assertEqual(a, ['-I.', '-I..'])
# Test that there is no de-dup on initialization
self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.'])
## Test that appending works
a.append('-I..')
self.assertEqual(a, ['-I..', '-I.'])
a.append('-O3')
self.assertEqual(a, ['-I..', '-I.', '-O3'])
## Test that in-place addition works
a += ['-O2', '-O2']
self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2', '-O2'])
# Test that removal works
a.remove('-O2')
self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2'])
# Test that de-dup happens on addition
a += ['-Ifoo', '-Ifoo']
self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
# .extend() is just +=, so we don't test it
## Test that addition works
# Test that adding a list with just one old arg works and yields the same array
a = a + ['-Ifoo']
self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2'])
# Test that adding a list with one arg new and one old works
a = a + ['-Ifoo', '-Ibaz']
self.assertEqual(a, ['-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2'])
# Test that adding args that must be prepended and appended works
a = a + ['-Ibar', '-Wall']
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
## Test that reflected addition works
# Test that adding to a list with just one old arg works and yields the same array
a = ['-Ifoo'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall'])
# Test that adding to a list with just one new arg that is not pre-pended works
a = ['-Werror'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Werror', '-O3', '-O2', '-Wall'])
# Test that adding to a list with two new args preserves the order
a = ['-Ldir', '-Lbah'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
# Test that adding to a list with old args does nothing
a = ['-Ibar', '-Ibaz', '-Ifoo'] + a
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
## Test that adding libraries works
l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Adding a library and a libpath appends both correctly
l += ['-Lbardir', '-lbar']
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
# Adding the same library again does nothing
l += ['-lbar']
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
## Test that 'direct' append and extend works
l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar'])
# Direct-adding the same library again still adds it
l.append_direct('-lbar')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar'])
# Direct-adding with absolute path deduplicates
l.append_direct('/libbaz.a')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
# Adding libbaz again does nothing
l.append_direct('/libbaz.a')
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
def test_compiler_args_class_gnuld(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-Wl,--end-group'])
# Direct-adding the same library again still adds it
l.append_direct('-lbar')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '-Wl,--end-group'])
# Direct-adding with absolute path deduplicates
l.append_direct('/libbaz.a')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group'])
# Adding libbaz again does nothing
l.append_direct('/libbaz.a')
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group'])
# Adding a non-library argument doesn't include it in the group
l += ['-Lfoo', '-Wl,--export-dynamic']
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group', '-Wl,--export-dynamic'])
# -Wl,-lfoo is detected as a library and gets added to the group
l.append('-Wl,-ldl')
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group'])
def test_compiler_args_remove_system(self):
cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
## Test that to_native removes all system includes
l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include']
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group', '-DSOMETHING_IMPORTANT=1'])
def test_string_templates_substitution(self):
dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict
substfunc = mesonbuild.mesonlib.substitute_values
ME = mesonbuild.mesonlib.MesonException
# Identity
self.assertEqual(dictfunc([], []), {})
# One input, no outputs
inputs = ['bar/foo.c.in']
outputs = []
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', 'strings']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out'] + [d['@PLAINNAME@'] + '.ok'] + cmd[2:])
cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
self.assertEqual(substfunc(cmd, d),
inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
cmd = ['@OUTPUT@']
self.assertRaises(ME, substfunc, cmd, d)
# One input, one output
inputs = ['bar/foo.c.in']
outputs = ['out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': '.'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@.out', '@OUTPUT@', 'strings']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out'] + outputs + cmd[2:])
cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', '@OUTPUT0@']
self.assertEqual(substfunc(cmd, d),
[inputs[0] + '.out', d['@PLAINNAME@'] + '.ok'] + outputs)
cmd = ['@INPUT@', '@BASENAME@.hah', 'strings']
self.assertEqual(substfunc(cmd, d),
inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:])
# One input, one output with a subdir
outputs = ['dir/out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0],
'@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c',
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Two inputs, no outputs
inputs = ['bar/foo.c.in', 'baz/foo.c.in']
outputs = []
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1]}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@INPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), inputs + cmd[1:])
cmd = ['@INPUT0@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:])
cmd = ['@INPUT0@.out', '@INPUT1@.ok', 'strings']
self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
cmd = ['@INPUT0@', '@INPUT1@', 'strings']
self.assertEqual(substfunc(cmd, d), inputs + cmd[2:])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Too many inputs
cmd = ['@PLAINNAME@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@BASENAME@']
self.assertRaises(ME, substfunc, cmd, d)
# No outputs
cmd = ['@OUTPUT@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@OUTPUT0@']
self.assertRaises(ME, substfunc, cmd, d)
cmd = ['@OUTDIR@']
self.assertRaises(ME, substfunc, cmd, d)
# Two inputs, one output
outputs = ['dir/out.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@OUTPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out'] + cmd[1:])
cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', 'strings']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough outputs
cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Two inputs, two outputs
outputs = ['dir/out.c', 'dir/out2.c']
ret = dictfunc(inputs, outputs)
d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1],
'@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTPUT1@': outputs[1],
'@OUTDIR@': 'dir'}
# Check dictionary
self.assertEqual(ret, d)
# Check substitutions
cmd = ['some', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), cmd)
cmd = ['@OUTPUT@', 'ordinary', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[1:])
cmd = ['@OUTPUT0@', '@OUTPUT1@', 'strings']
self.assertEqual(substfunc(cmd, d), outputs + cmd[2:])
cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', '@OUTDIR@']
self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok', 'dir'])
# Many inputs, can't use @INPUT@ like this
cmd = ['@INPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough inputs
cmd = ['@INPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Not enough outputs
cmd = ['@OUTPUT2@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
# Many outputs, can't use @OUTPUT@ like this
cmd = ['@OUTPUT@.out', 'ordinary', 'strings']
self.assertRaises(ME, substfunc, cmd, d)
def test_needs_exe_wrapper_override(self):
config = ConfigParser()
config['binaries'] = {
'c': '\'/usr/bin/gcc\'',
}
config['host_machine'] = {
'system': '\'linux\'',
'cpu_family': '\'arm\'',
'cpu': '\'armv7\'',
'endian': '\'little\'',
}
# Can not be used as context manager because we need to
# open it a second time and this is not possible on
# Windows.
configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
configfilename = configfile.name
config.write(configfile)
configfile.flush()
configfile.close()
opts = get_fake_options()
opts.cross_file = (configfilename,)
env = get_fake_env(opts=opts)
detected_value = env.need_exe_wrapper()
os.unlink(configfilename)
desired_value = not detected_value
config['properties'] = {
'needs_exe_wrapper': 'true' if desired_value else 'false'
}
configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
configfilename = configfile.name
config.write(configfile)
configfile.close()
opts = get_fake_options()
opts.cross_file = (configfilename,)
env = get_fake_env(opts=opts)
forced_value = env.need_exe_wrapper()
os.unlink(configfilename)
self.assertEqual(forced_value, desired_value)
def test_listify(self):
listify = mesonbuild.mesonlib.listify
# Test sanity
self.assertEqual([1], listify(1))
self.assertEqual([], listify([]))
self.assertEqual([1], listify([1]))
# Test flattening
self.assertEqual([1, 2, 3], listify([1, [2, 3]]))
self.assertEqual([1, 2, 3], listify([1, [2, [3]]]))
self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
# Test flattening and unholdering
holder1 = ObjectHolder(1)
holder3 = ObjectHolder(3)
self.assertEqual([holder1], listify(holder1))
self.assertEqual([holder1], listify([holder1]))
self.assertEqual([holder1, 2], listify([holder1, 2]))
self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))
self.assertEqual([1], listify(holder1, unholder=True))
self.assertEqual([1], listify([holder1], unholder=True))
self.assertEqual([1, 2], listify([holder1, 2], unholder=True))
self.assertEqual([1, 2, 3], listify([holder1, 2, [holder3]], unholder=True))
# Unholding doesn't work recursively when not flattening
self.assertEqual([1, [2], [holder3]], listify([holder1, [2], [holder3]], unholder=True, flatten=False))
def test_extract_as_list(self):
extract = mesonbuild.mesonlib.extract_as_list
# Test sanity
kwargs = {'sources': [1, 2, 3]}
self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
self.assertEqual(kwargs, {'sources': [1, 2, 3]})
self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))
self.assertEqual(kwargs, {})
# Test unholding
holder3 = ObjectHolder(3)
kwargs = {'sources': [1, 2, holder3]}
self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True))
self.assertEqual(kwargs, {'sources': [1, 2, holder3]})
self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True, pop=True))
self.assertEqual(kwargs, {})
# Test listification
kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}
self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))
def test_pkgconfig_module(self):
class Mock:
pass
mock = Mock()
mock.pcdep = Mock()
mock.pcdep.name = "some_name"
mock.version_reqs = []
# pkgconfig dependency as lib
deps = mesonbuild.modules.pkgconfig.DependenciesHelper("thislib")
deps.add_pub_libs([mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
# pkgconfig dependency as requires
deps = mesonbuild.modules.pkgconfig.DependenciesHelper("thislib")
deps.add_pub_reqs([mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
def _test_all_naming(self, cc, env, patterns, platform):
shr = patterns[platform]['shared']
stc = patterns[platform]['static']
shrstc = shr + tuple([x for x in stc if x not in shr])
stcshr = stc + tuple([x for x in shr if x not in stc])
p = cc.get_library_naming(env, LibType.SHARED)
self.assertEqual(p, shr)
p = cc.get_library_naming(env, LibType.STATIC)
self.assertEqual(p, stc)
p = cc.get_library_naming(env, LibType.PREFER_STATIC)
self.assertEqual(p, stcshr)
p = cc.get_library_naming(env, LibType.PREFER_SHARED)
self.assertEqual(p, shrstc)
# Test find library by mocking up openbsd
if platform != 'openbsd':
return
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'libfoo.so.6.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.5.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.54.0'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.66a.0b'), 'w') as f:
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f:
f.write('')
found = cc.find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED)
self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0')
def test_find_library_patterns(self):
'''
Unit test for the library search patterns used by find_library()
'''
unix_static = ('lib{}.a', '{}.a')
msvc_static = ('lib{}.a', 'lib{}.lib', '{}.a', '{}.lib')
# This is the priority list of pattern matching for library searching
patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*', '{}.so.[0-9]*.[0-9]*'),
'static': unix_static},
'linux': {'shared': ('lib{}.so', '{}.so'),
'static': unix_static},
'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'),
'static': unix_static},
'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll',
'lib{}.dll.a', '{}.dll', '{}.dll.a'),
'static': ('cyg{}.a',) + unix_static},
'windows-msvc': {'shared': ('lib{}.lib', '{}.lib'),
'static': msvc_static},
'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll',
'{}.dll.a', '{}.lib', '{}.dll'),
'static': msvc_static}}
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if is_osx():
self._test_all_naming(cc, env, patterns, 'darwin')
elif is_cygwin():
self._test_all_naming(cc, env, patterns, 'cygwin')
elif is_windows():
if cc.get_argument_syntax() == 'msvc':
self._test_all_naming(cc, env, patterns, 'windows-msvc')
else:
self._test_all_naming(cc, env, patterns, 'windows-mingw')
elif is_openbsd():
self._test_all_naming(cc, env, patterns, 'openbsd')
else:
self._test_all_naming(cc, env, patterns, 'linux')
env.machines.host.system = 'openbsd'
self._test_all_naming(cc, env, patterns, 'openbsd')
env.machines.host.system = 'darwin'
self._test_all_naming(cc, env, patterns, 'darwin')
env.machines.host.system = 'cygwin'
self._test_all_naming(cc, env, patterns, 'cygwin')
env.machines.host.system = 'windows'
self._test_all_naming(cc, env, patterns, 'windows-mingw')
def test_pkgconfig_parse_libs(self):
'''
Unit test for parsing of pkg-config output to search for libraries
https://github.com/mesonbuild/meson/issues/3951
'''
def create_static_lib(name):
if not is_osx():
name.open('w').close()
return
src = name.with_suffix('.c')
out = name.with_suffix('.o')
with src.open('w') as f:
f.write('int meson_foobar (void) { return 0; }')
subprocess.check_call(['clang', '-c', str(src), '-o', str(out)])
subprocess.check_call(['ar', 'csr', str(name), str(out)])
with tempfile.TemporaryDirectory() as tmpdir:
pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True)
env = get_fake_env()
compiler = env.detect_c_compiler(MachineChoice.HOST)
env.coredata.compilers.host = {'c': compiler}
env.coredata.compiler_options.host['c_link_args'] = FakeCompilerOptions()
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
p2.mkdir()
# libfoo.a is in one prefix
create_static_lib(p1 / 'libfoo.a')
# libbar.a is in both prefixes
create_static_lib(p1 / 'libbar.a')
create_static_lib(p2 / 'libbar.a')
# Ensure that we never statically link to these
create_static_lib(p1 / 'libpthread.a')
create_static_lib(p1 / 'libm.a')
create_static_lib(p1 / 'libc.a')
create_static_lib(p1 / 'libdl.a')
create_static_lib(p1 / 'librt.a')
def fake_call_pkgbin(self, args, env=None):
if '--libs' not in args:
return 0, '', ''
if args[0] == 'foo':
return 0, '-L{} -lfoo -L{} -lbar'.format(p2.as_posix(), p1.as_posix()), ''
if args[0] == 'bar':
return 0, '-L{} -lbar'.format(p2.as_posix()), ''
if args[0] == 'internal':
return 0, '-L{} -lpthread -lm -lc -lrt -ldl'.format(p1.as_posix()), ''
old_call = PkgConfigDependency._call_pkgbin
old_check = PkgConfigDependency.check_pkgconfig
PkgConfigDependency._call_pkgbin = fake_call_pkgbin
PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin
# Test begins
try:
kwargs = {'required': True, 'silent': True}
foo_dep = PkgConfigDependency('foo', env, kwargs)
self.assertEqual(foo_dep.get_link_args(),
[(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
bar_dep = PkgConfigDependency('bar', env, kwargs)
self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
internal_dep = PkgConfigDependency('internal', env, kwargs)
if compiler.get_argument_syntax() == 'msvc':
self.assertEqual(internal_dep.get_link_args(), [])
else:
link_args = internal_dep.get_link_args()
for link_arg in link_args:
for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
finally:
# Test ends
PkgConfigDependency._call_pkgbin = old_call
PkgConfigDependency.check_pkgconfig = old_check
# Reset dependency class to ensure that in-process configure doesn't mess up
PkgConfigDependency.pkgbin_cache = {}
PkgConfigDependency.class_pkgbin = PerMachine(None, None)
def test_version_compare(self):
comparefunc = mesonbuild.mesonlib.version_compare_many
for (a, b, result) in [
('0.99.beta19', '>= 0.99.beta14', True),
]:
self.assertEqual(comparefunc(a, b)[0], result)
for (a, b, op) in [
# examples from https://fedoraproject.org/wiki/Archive:Tools/RPM/VersionComparison
("1.0010", "1.9", operator.gt),
("1.05", "1.5", operator.eq),
("1.0", "1", operator.gt),
("2.50", "2.5", operator.gt),
("fc4", "fc.4", operator.eq),
("FC5", "fc4", operator.lt),
("2a", "2.0", operator.lt),
("1.0", "1.fc4", operator.gt),
("3.0.0_fc", "3.0.0.fc", operator.eq),
# from RPM tests
("1.0", "1.0", operator.eq),
("1.0", "2.0", operator.lt),
("2.0", "1.0", operator.gt),
("2.0.1", "2.0.1", operator.eq),
("2.0", "2.0.1", operator.lt),
("2.0.1", "2.0", operator.gt),
("2.0.1a", "2.0.1a", operator.eq),
("2.0.1a", "2.0.1", operator.gt),
("2.0.1", "2.0.1a", operator.lt),
("5.5p1", "5.5p1", operator.eq),
("5.5p1", "5.5p2", operator.lt),
("5.5p2", "5.5p1", operator.gt),
("5.5p10", "5.5p10", operator.eq),
("5.5p1", "5.5p10", operator.lt),
("5.5p10", "5.5p1", operator.gt),
("10xyz", "10.1xyz", operator.lt),
("10.1xyz", "10xyz", operator.gt),
("xyz10", "xyz10", operator.eq),
("xyz10", "xyz10.1", operator.lt),
("xyz10.1", "xyz10", operator.gt),
("xyz.4", "xyz.4", operator.eq),
("xyz.4", "8", operator.lt),
("8", "xyz.4", operator.gt),
("xyz.4", "2", operator.lt),
("2", "xyz.4", operator.gt),
("5.5p2", "5.6p1", operator.lt),
("5.6p1", "5.5p2", operator.gt),
("5.6p1", "6.5p1", operator.lt),
("6.5p1", "5.6p1", operator.gt),
("6.0.rc1", "6.0", operator.gt),
("6.0", "6.0.rc1", operator.lt),
("10b2", "10a1", operator.gt),
("10a2", "10b2", operator.lt),
("1.0aa", "1.0aa", operator.eq),
("1.0a", "1.0aa", operator.lt),
("1.0aa", "1.0a", operator.gt),
("10.0001", "10.0001", operator.eq),
("10.0001", "10.1", operator.eq),
("10.1", "10.0001", operator.eq),
("10.0001", "10.0039", operator.lt),
("10.0039", "10.0001", operator.gt),
("4.999.9", "5.0", operator.lt),
("5.0", "4.999.9", operator.gt),
("20101121", "20101121", operator.eq),
("20101121", "20101122", operator.lt),
("20101122", "20101121", operator.gt),
("2_0", "2_0", operator.eq),
("2.0", "2_0", operator.eq),
("2_0", "2.0", operator.eq),
("a", "a", operator.eq),
("a+", "a+", operator.eq),
("a+", "a_", operator.eq),
("a_", "a+", operator.eq),
("+a", "+a", operator.eq),
("+a", "_a", operator.eq),
("_a", "+a", operator.eq),
("+_", "+_", operator.eq),
("_+", "+_", operator.eq),
("_+", "_+", operator.eq),
("+", "_", operator.eq),
("_", "+", operator.eq),
# other tests
('0.99.beta19', '0.99.beta14', operator.gt),
("1.0.0", "2.0.0", operator.lt),
(".0.0", "2.0.0", operator.lt),
("alpha", "beta", operator.lt),
("1.0", "1.0.0", operator.lt),
("2.456", "2.1000", operator.lt),
("2.1000", "3.111", operator.lt),
("2.001", "2.1", operator.eq),
("2.34", "2.34", operator.eq),
("6.1.2", "6.3.8", operator.lt),
("1.7.3.0", "2.0.0", operator.lt),
("2.24.51", "2.25", operator.lt),
("2.1.5+20120813+gitdcbe778", "2.1.5", operator.gt),
("3.4.1", "3.4b1", operator.gt),
("041206", "200090325", operator.lt),
("0.6.2+git20130413", "0.6.2", operator.gt),
("2.6.0+bzr6602", "2.6.0", operator.gt),
("2.6.0", "2.6b2", operator.gt),
("2.6.0+bzr6602", "2.6b2x", operator.gt),
("0.6.7+20150214+git3a710f9", "0.6.7", operator.gt),
("15.8b", "15.8.0.1", operator.lt),
("1.2rc1", "1.2.0", operator.lt),
]:
ver_a = Version(a)
ver_b = Version(b)
if op is operator.eq:
for o, name in [(op, 'eq'), (operator.ge, 'ge'), (operator.le, 'le')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
if op is operator.lt:
for o, name in [(op, 'lt'), (operator.le, 'le'), (operator.ne, 'ne')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
for o, name in [(operator.gt, 'gt'), (operator.ge, 'ge'), (operator.eq, 'eq')]:
self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
if op is operator.gt:
for o, name in [(op, 'gt'), (operator.ge, 'ge'), (operator.ne, 'ne')]:
self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
for o, name in [(operator.lt, 'lt'), (operator.le, 'le'), (operator.eq, 'eq')]:
self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b))
def test_msvc_toolset_version(self):
'''
Ensure that the toolset version returns the correct value for this MSVC
'''
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
toolset_ver = cc.get_toolset_version()
self.assertIsNotNone(toolset_ver)
# Visual Studio 2015 and older versions do not define VCToolsVersion
# TODO: ICL doesn't set this in the VSC2015 profile either
if cc.id == 'msvc' and int(''.join(cc.version.split('.')[0:2])) < 1910:
return
if 'VCToolsVersion' in os.environ:
vctools_ver = os.environ['VCToolsVersion']
else:
self.assertIn('VCINSTALLDIR', os.environ)
# See https://devblogs.microsoft.com/cppblog/finding-the-visual-c-compiler-tools-in-visual-studio-2017/
vctools_ver = (Path(os.environ['VCINSTALLDIR']) / 'Auxiliary' / 'Build' / 'Microsoft.VCToolsVersion.default.txt').read_text()
self.assertTrue(vctools_ver.startswith(toolset_ver),
msg='{!r} does not start with {!r}'.format(vctools_ver, toolset_ver))
def test_split_args(self):
split_args = mesonbuild.mesonlib.split_args
join_args = mesonbuild.mesonlib.join_args
if is_windows():
test_data = [
# examples from https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments
(r'"a b c" d e', ['a b c', 'd', 'e'], True),
(r'"ab\"c" "\\" d', ['ab"c', '\\', 'd'], False),
(r'a\\\b d"e f"g h', [r'a\\\b', 'de fg', 'h'], False),
(r'a\\\"b c d', [r'a\"b', 'c', 'd'], False),
(r'a\\\\"b c" d e', [r'a\\b c', 'd', 'e'], False),
# other basics
(r'""', [''], True),
(r'a b c d "" e', ['a', 'b', 'c', 'd', '', 'e'], True),
(r"'a b c' d e", ["'a", 'b', "c'", 'd', 'e'], True),
(r"'a&b&c' d e", ["'a&b&c'", 'd', 'e'], True),
(r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], True),
(r"'a & b & c d e'", ["'a", '&', 'b', '&', 'c', 'd', "e'"], True),
('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False),
# more illustrative tests
(r'cl test.cpp /O1 /Fe:test.exe', ['cl', 'test.cpp', '/O1', '/Fe:test.exe'], True),
(r'cl "test.cpp /O1 /Fe:test.exe"', ['cl', 'test.cpp /O1 /Fe:test.exe'], True),
(r'cl /DNAME=\"Bob\" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], False),
(r'cl "/DNAME=\"Bob\"" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], True),
(r'cl /DNAME=\"Bob, Alice\" test.cpp', ['cl', '/DNAME="Bob,', 'Alice"', 'test.cpp'], False),
(r'cl "/DNAME=\"Bob, Alice\"" test.cpp', ['cl', '/DNAME="Bob, Alice"', 'test.cpp'], True),
(r'cl C:\path\with\backslashes.cpp', ['cl', r'C:\path\with\backslashes.cpp'], True),
(r'cl C:\\path\\with\\double\\backslashes.cpp', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], True),
(r'cl "C:\\path\\with\\double\\backslashes.cpp"', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], False),
(r'cl C:\path with spaces\test.cpp', ['cl', r'C:\path', 'with', r'spaces\test.cpp'], False),
(r'cl "C:\path with spaces\test.cpp"', ['cl', r'C:\path with spaces\test.cpp'], True),
(r'cl /DPATH="C:\path\with\backslashes test.cpp', ['cl', r'/DPATH=C:\path\with\backslashes test.cpp'], False),
(r'cl /DPATH=\"C:\\ends\\with\\backslashes\\\" test.cpp', ['cl', r'/DPATH="C:\\ends\\with\\backslashes\"', 'test.cpp'], False),
(r'cl /DPATH="C:\\ends\\with\\backslashes\\" test.cpp', ['cl', '/DPATH=C:\\\\ends\\\\with\\\\backslashes\\', 'test.cpp'], False),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\"', 'test.cpp'], True),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\ test.cpp'], False),
(r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\"', 'test.cpp'], True),
]
else:
test_data = [
(r"'a b c' d e", ['a b c', 'd', 'e'], True),
(r"a/b/c d e", ['a/b/c', 'd', 'e'], True),
(r"a\b\c d e", [r'abc', 'd', 'e'], False),
(r"a\\b\\c d e", [r'a\b\c', 'd', 'e'], False),
(r'"a b c" d e', ['a b c', 'd', 'e'], False),
(r'"a\\b\\c\\" d e', ['a\\b\\c\\', 'd', 'e'], False),
(r"'a\b\c\' d e", ['a\\b\\c\\', 'd', 'e'], True),
(r"'a&b&c' d e", ['a&b&c', 'd', 'e'], True),
(r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], False),
(r"'a & b & c d e'", ['a & b & c d e'], True),
(r"abd'e f'g h", [r'abde fg', 'h'], False),
('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False),
('g++ -DNAME="Bob" test.cpp', ['g++', '-DNAME=Bob', 'test.cpp'], False),
("g++ '-DNAME=\"Bob\"' test.cpp", ['g++', '-DNAME="Bob"', 'test.cpp'], True),
('g++ -DNAME="Bob, Alice" test.cpp', ['g++', '-DNAME=Bob, Alice', 'test.cpp'], False),
("g++ '-DNAME=\"Bob, Alice\"' test.cpp", ['g++', '-DNAME="Bob, Alice"', 'test.cpp'], True),
]
for (cmd, expected, roundtrip) in test_data:
self.assertEqual(split_args(cmd), expected)
if roundtrip:
self.assertEqual(join_args(expected), cmd)
def test_quote_arg(self):
split_args = mesonbuild.mesonlib.split_args
quote_arg = mesonbuild.mesonlib.quote_arg
if is_windows():
test_data = [
('', '""'),
('arg1', 'arg1'),
('/option1', '/option1'),
('/Ovalue', '/Ovalue'),
('/OBob&Alice', '/OBob&Alice'),
('/Ovalue with spaces', r'"/Ovalue with spaces"'),
(r'/O"value with spaces"', r'"/O\"value with spaces\""'),
(r'/OC:\path with spaces\test.exe', r'"/OC:\path with spaces\test.exe"'),
('/LIBPATH:C:\\path with spaces\\ends\\with\\backslashes\\', r'"/LIBPATH:C:\path with spaces\ends\with\backslashes\\"'),
('/LIBPATH:"C:\\path with spaces\\ends\\with\\backslashes\\\\"', r'"/LIBPATH:\"C:\path with spaces\ends\with\backslashes\\\\\""'),
(r'/DMSG="Alice said: \"Let\'s go\""', r'"/DMSG=\"Alice said: \\\"Let\'s go\\\"\""'),
]
else:
test_data = [
('arg1', 'arg1'),
('--option1', '--option1'),
('-O=value', '-O=value'),
('-O=Bob&Alice', "'-O=Bob&Alice'"),
('-O=value with spaces', "'-O=value with spaces'"),
('-O="value with spaces"', '\'-O=\"value with spaces\"\''),
('-O=/path with spaces/test', '\'-O=/path with spaces/test\''),
('-DMSG="Alice said: \\"Let\'s go\\""', "'-DMSG=\"Alice said: \\\"Let'\"'\"'s go\\\"\"'"),
]
for (arg, expected) in test_data:
self.assertEqual(quote_arg(arg), expected)
self.assertEqual(split_args(expected)[0], arg)
def test_depfile(self):
for (f, target, expdeps) in [
# empty, unknown target
([''], 'unknown', set()),
# simple target & deps
(['meson/foo.o : foo.c foo.h'], 'meson/foo.o', set({'foo.c', 'foo.h'})),
(['meson/foo.o: foo.c foo.h'], 'foo.c', set()),
# get all deps
(['meson/foo.o: foo.c foo.h',
'foo.c: gen.py'], 'meson/foo.o', set({'foo.c', 'foo.h', 'gen.py'})),
(['meson/foo.o: foo.c foo.h',
'foo.c: gen.py'], 'foo.c', set({'gen.py'})),
# linue continuation, multiple targets
(['foo.o \\', 'foo.h: bar'], 'foo.h', set({'bar'})),
(['foo.o \\', 'foo.h: bar'], 'foo.o', set({'bar'})),
# \\ handling
(['foo: Program\\ F\\iles\\\\X'], 'foo', set({'Program Files\\X'})),
# $ handling
(['f$o.o: c/b'], 'f$o.o', set({'c/b'})),
(['f$$o.o: c/b'], 'f$o.o', set({'c/b'})),
# cycles
(['a: b', 'b: a'], 'a', set({'a', 'b'})),
(['a: b', 'b: a'], 'b', set({'a', 'b'})),
]:
d = mesonbuild.depfile.DepFile(f)
deps = d.get_all_dependencies(target)
self.assertEqual(deps, expdeps)
def test_log_once(self):
f = io.StringIO()
with mock.patch('mesonbuild.mlog.log_file', f), \
mock.patch('mesonbuild.mlog._logged_once', set()):
mesonbuild.mlog.log_once('foo')
mesonbuild.mlog.log_once('foo')
actual = f.getvalue().strip()
self.assertEqual(actual, 'foo', actual)
def test_log_once_ansi(self):
f = io.StringIO()
with mock.patch('mesonbuild.mlog.log_file', f), \
mock.patch('mesonbuild.mlog._logged_once', set()):
mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo'))
mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo'))
actual = f.getvalue().strip()
self.assertEqual(actual.count('foo'), 1, actual)
mesonbuild.mlog.log_once('foo')
actual = f.getvalue().strip()
self.assertEqual(actual.count('foo'), 1, actual)
f.truncate()
mesonbuild.mlog.warning('bar', once=True)
mesonbuild.mlog.warning('bar', once=True)
actual = f.getvalue().strip()
self.assertEqual(actual.count('bar'), 1, actual)
def test_sort_libpaths(self):
sort_libpaths = mesonbuild.dependencies.base.sort_libpaths
self.assertEqual(sort_libpaths(
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/local/lib', '/home/mesonuser/.local/lib', '/usr/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
self.assertEqual(sort_libpaths(
['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'],
['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/libdata/pkgconfig']),
['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'])
@unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release')
class DataTests(unittest.TestCase):
def test_snippets(self):
hashcounter = re.compile('^ *(#)+')
snippet_dir = Path('docs/markdown/snippets')
self.assertTrue(snippet_dir.is_dir())
for f in snippet_dir.glob('*'):
self.assertTrue(f.is_file())
if f.parts[-1].endswith('~'):
continue
if f.suffix == '.md':
in_code_block = False
with f.open() as snippet:
for line in snippet:
if line.startswith(' '):
continue
if line.startswith('```'):
in_code_block = not in_code_block
if in_code_block:
continue
m = re.match(hashcounter, line)
if m:
self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name)
self.assertFalse(in_code_block, 'Unclosed code block.')
else:
if f.name != 'add_release_note_snippets_here':
self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name)
def test_compiler_options_documented(self):
'''
Test that C and C++ compiler options and base options are documented in
Builtin-Options.md. Only tests the default compiler for the current
platform on the CI.
'''
md = None
with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
env = get_fake_env()
# FIXME: Support other compilers
cc = env.detect_c_compiler(MachineChoice.HOST)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
for comp in (cc, cpp):
for opt in comp.get_options().keys():
self.assertIn(opt, md)
for opt in comp.base_options:
self.assertIn(opt, md)
self.assertNotIn('b_unknown', md)
def test_builtin_options_documented(self):
'''
Test that universal options and base options are documented in
Builtin-Options.md.
'''
md = None
with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
found_entries = set()
sections = list(re.finditer(r"^## (.+)$", md, re.MULTILINE)) + [None]
for s1, s2 in zip(sections[:], sections[1:]):
if s1.group(1) == "Universal options":
# Extract the content for this section
end = s2.start() if s2 is not None else len(md)
content = md[s1.end():end]
subsections = list(re.finditer(r"^### (.+)$", content, re.MULTILINE)) + [None]
for sub1, sub2 in zip(subsections[:], subsections[1:]):
if sub1.group(1) == "Directories" or sub1.group(1) == "Core options":
# Extract the content for this subsection
sub_end = sub2.start() if sub2 is not None else len(content)
subcontent = content[sub1.end():sub_end]
# Find the list entries
arches = [m.group(1) for m in re.finditer(r"^\| (\w+) .* \|", subcontent, re.MULTILINE)]
# Drop the header
arches = set(arches[1:])
self.assertEqual(len(found_entries & arches), 0)
found_entries |= arches
break
self.assertEqual(found_entries, set([
*mesonbuild.coredata.builtin_options.keys(),
*mesonbuild.coredata.builtin_options_per_machine.keys()
]))
def test_cpu_families_documented(self):
with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
sections = list(re.finditer(r"^## (.+)$", md, re.MULTILINE))
for s1, s2 in zip(sections[::2], sections[1::2]):
if s1.group(1) == "CPU families":
# Extract the content for this section
content = md[s1.end():s2.start()]
# Find the list entries
arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)]
# Drop the header
arches = set(arches[1:])
self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families))
def test_markdown_files_in_sitemap(self):
'''
Test that each markdown files in docs/markdown is referenced in sitemap.txt
'''
with open("docs/sitemap.txt", encoding='utf-8') as f:
md = f.read()
self.assertIsNotNone(md)
toc = list(m.group(1) for m in re.finditer(r"^\s*(\w.*)$", md, re.MULTILINE))
markdownfiles = [f.name for f in Path("docs/markdown").iterdir() if f.is_file() and f.suffix == '.md']
exceptions = ['_Sidebar.md']
for f in markdownfiles:
if f not in exceptions:
self.assertIn(f, toc)
def test_vim_syntax_highlighting(self):
'''
Ensure that vim syntax highlighting files were updated for new
functions in the global namespace in build files.
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
with open('data/syntax-highlighting/vim/syntax/meson.vim') as f:
res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE)
defined = set([a.strip() for a in res.group().split('\\')][1:])
self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys())))
@unittest.skipIf(is_pull(), 'Skipping because this is a pull request')
def test_json_grammar_syntax_highlighting(self):
'''
Ensure that syntax highlighting JSON grammar written by TingPing was
updated for new functions in the global namespace in build files.
https://github.com/TingPing/language-meson/
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
url = 'https://raw.githubusercontent.com/TingPing/language-meson/master/grammars/meson.json'
try:
# Use a timeout to avoid blocking forever in case the network is
# slow or unavailable in a weird way
r = urllib.request.urlopen(url, timeout=URLOPEN_TIMEOUT)
except urllib.error.URLError as e:
# Skip test when network is not available, such as during packaging
# by a distro or Flatpak
if not isinstance(e, urllib.error.HTTPError):
raise unittest.SkipTest('Network unavailable')
# Don't fail the test if github is down, but do fail if 4xx
if e.code >= 500:
raise unittest.SkipTest('Server error ' + str(e.code))
raise e
# On Python 3.5, we must decode bytes to string. Newer versions don't require that.
grammar = json.loads(r.read().decode('utf-8', 'surrogatepass'))
for each in grammar['patterns']:
if 'name' in each and each['name'] == 'support.function.builtin.meson':
# The string is of the form: (?x)\\b(func1|func2|...\n)\\b\\s*(?=\\() and
# we convert that to [func1, func2, ...] without using regex to parse regex
funcs = set(each['match'].split('\\b(')[1].split('\n')[0].split('|'))
if 'name' in each and each['name'] == 'support.variable.meson':
# \\b(builtin1|builtin2...)\\b
builtin = set(each['match'].split('\\b(')[1].split(')\\b')[0].split('|'))
self.assertEqual(builtin, set(interp.builtin.keys()))
self.assertEqual(funcs, set(interp.funcs.keys()))
def test_all_functions_defined_in_ast_interpreter(self):
'''
Ensure that the all functions defined in the Interpreter are also defined
in the AstInterpreter (and vice versa).
'''
env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
astint = AstInterpreter('.', '')
self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
class BasePlatformTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.maxDiff = None
src_root = os.path.dirname(__file__)
src_root = os.path.join(os.getcwd(), src_root)
self.src_root = src_root
self.prefix = '/usr'
self.libdir = 'lib'
# Get the backend
# FIXME: Extract this from argv?
self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
self.meson_args = ['--backend=' + self.backend.name]
self.meson_cross_file = None
self.meson_command = python_command + [get_meson_script()]
self.setup_command = self.meson_command + self.meson_args
self.mconf_command = self.meson_command + ['configure']
self.mintro_command = self.meson_command + ['introspect']
self.wrap_command = self.meson_command + ['wrap']
self.rewrite_command = self.meson_command + ['rewrite']
# Backend-specific build commands
self.build_command, self.clean_command, self.test_command, self.install_command, \
self.uninstall_command = get_backend_commands(self.backend)
# Test directories
self.common_test_dir = os.path.join(src_root, 'test cases/common')
self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
# Misc stuff
self.orig_env = os.environ.copy()
if self.backend is Backend.ninja:
self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do']
else:
# VS doesn't have a stable output when no changes are done
# XCode backend is untested with unit tests, help welcome!
self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)]
self.builddirs = []
self.new_builddir()
def change_builddir(self, newdir):
self.builddir = newdir
self.privatedir = os.path.join(self.builddir, 'meson-private')
self.logdir = os.path.join(self.builddir, 'meson-logs')
self.installdir = os.path.join(self.builddir, 'install')
self.distdir = os.path.join(self.builddir, 'meson-dist')
self.mtest_command = self.meson_command + ['test', '-C', self.builddir]
self.builddirs.append(self.builddir)
def new_builddir(self):
if not is_cygwin():
# Keep builddirs inside the source tree so that virus scanners
# don't complain
newdir = tempfile.mkdtemp(dir=os.getcwd())
else:
# But not on Cygwin because that breaks the umask tests. See:
# https://github.com/mesonbuild/meson/pull/5546#issuecomment-509666523
newdir = tempfile.mkdtemp()
# In case the directory is inside a symlinked directory, find the real
# path otherwise we might not find the srcdir from inside the builddir.
newdir = os.path.realpath(newdir)
self.change_builddir(newdir)
def _print_meson_log(self):
log = os.path.join(self.logdir, 'meson-log.txt')
if not os.path.isfile(log):
print("{!r} doesn't exist".format(log))
return
with open(log, 'r', encoding='utf-8') as f:
print(f.read())
def tearDown(self):
for path in self.builddirs:
try:
windows_proof_rmtree(path)
except FileNotFoundError:
pass
os.environ.clear()
os.environ.update(self.orig_env)
super().tearDown()
def _run(self, command, *, workdir=None, override_envvars=None):
'''
Run a command while printing the stdout and stderr to stdout,
and also return a copy of it
'''
# If this call hangs CI will just abort. It is very hard to distinguish
# between CI issue and test bug in that case. Set timeout and fail loud
# instead.
if override_envvars is None:
env = None
else:
env = os.environ.copy()
env.update(override_envvars)
p = subprocess.run(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, env=env,
universal_newlines=True, cwd=workdir, timeout=60 * 5)
print(p.stdout)
if p.returncode != 0:
if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
return p.stdout
def init(self, srcdir, *,
extra_args=None,
default_args=True,
inprocess=False,
override_envvars=None):
self.assertPathExists(srcdir)
if extra_args is None:
extra_args = []
if not isinstance(extra_args, list):
extra_args = [extra_args]
args = [srcdir, self.builddir]
if default_args:
args += ['--prefix', self.prefix,
'--libdir', self.libdir]
if self.meson_cross_file:
args += ['--cross-file', self.meson_cross_file]
self.privatedir = os.path.join(self.builddir, 'meson-private')
if inprocess:
try:
if override_envvars is not None:
old_envvars = os.environ.copy()
os.environ.update(override_envvars)
(returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args)
if override_envvars is not None:
os.environ.clear()
os.environ.update(old_envvars)
if 'MESON_SKIP_TEST' in out:
raise unittest.SkipTest('Project requested skipping.')
if returncode != 0:
self._print_meson_log()
print('Stdout:\n')
print(out)
print('Stderr:\n')
print(err)
raise RuntimeError('Configure failed')
except Exception:
self._print_meson_log()
raise
finally:
# Close log file to satisfy Windows file locking
mesonbuild.mlog.shutdown()
mesonbuild.mlog.log_dir = None
mesonbuild.mlog.log_file = None
else:
try:
out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars)
except unittest.SkipTest:
raise unittest.SkipTest('Project requested skipping: ' + srcdir)
except Exception:
self._print_meson_log()
raise
return out
def build(self, target=None, *, extra_args=None, override_envvars=None):
if extra_args is None:
extra_args = []
# Add arguments for building the target (if specified),
# and using the build dir (if required, with VS)
args = get_builddir_target_args(self.backend, self.builddir, target)
return self._run(self.build_command + args + extra_args, workdir=self.builddir, override_envvars=override_envvars)
def clean(self, *, override_envvars=None):
dir_args = get_builddir_target_args(self.backend, self.builddir, None)
self._run(self.clean_command + dir_args, workdir=self.builddir, override_envvars=override_envvars)
def run_tests(self, *, inprocess=False, override_envvars=None):
if not inprocess:
self._run(self.test_command, workdir=self.builddir, override_envvars=override_envvars)
else:
if override_envvars is not None:
old_envvars = os.environ.copy()
os.environ.update(override_envvars)
try:
run_mtest_inprocess(['-C', self.builddir])
finally:
if override_envvars is not None:
os.environ.clear()
os.environ.update(old_envvars)
def install(self, *, use_destdir=True, override_envvars=None):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
if use_destdir:
destdir = {'DESTDIR': self.installdir}
if override_envvars is None:
override_envvars = destdir
else:
override_envvars.update(destdir)
self._run(self.install_command, workdir=self.builddir, override_envvars=override_envvars)
def uninstall(self, *, override_envvars=None):
self._run(self.uninstall_command, workdir=self.builddir, override_envvars=override_envvars)
def run_target(self, target, *, override_envvars=None):
'''
Run a Ninja target while printing the stdout and stderr to stdout,
and also return a copy of it
'''
return self.build(target=target, override_envvars=override_envvars)
def setconf(self, arg, will_build=True):
if not isinstance(arg, list):
arg = [arg]
if will_build:
ensure_backend_detects_changes(self.backend)
self._run(self.mconf_command + arg + [self.builddir])
def wipe(self):
windows_proof_rmtree(self.builddir)
def utime(self, f):
ensure_backend_detects_changes(self.backend)
os.utime(f)
def get_compdb(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name))
try:
with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile:
contents = json.load(ifile)
except FileNotFoundError:
raise unittest.SkipTest('Compiler db not found')
# If Ninja is using .rsp files, generate them, read their contents, and
# replace it as the command for all compile commands in the parsed json.
if len(contents) > 0 and contents[0]['command'].endswith('.rsp'):
# Pretend to build so that the rsp files are generated
self.build(extra_args=['-d', 'keeprsp', '-n'])
for each in contents:
# Extract the actual command from the rsp file
compiler, rsp = each['command'].split(' @')
rsp = os.path.join(self.builddir, rsp)
# Replace the command with its contents
with open(rsp, 'r', encoding='utf-8') as f:
each['command'] = compiler + ' ' + f.read()
return contents
def get_meson_log(self):
with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt')) as f:
return f.readlines()
def get_meson_log_compiler_checks(self):
'''
Fetch a list command-lines run by meson for compiler checks.
Each command-line is returned as a list of arguments.
'''
log = self.get_meson_log()
prefix = 'Command line:'
cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)]
return cmds
def introspect(self, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [self.builddir],
universal_newlines=True)
return json.loads(out)
def introspect_directory(self, directory, args):
if isinstance(args, str):
args = [args]
out = subprocess.check_output(self.mintro_command + args + [directory],
universal_newlines=True)
try:
obj = json.loads(out)
except Exception as e:
print(out)
raise e
return obj
def assertPathEqual(self, path1, path2):
'''
Handles a lot of platform-specific quirks related to paths such as
separator, case-sensitivity, etc.
'''
self.assertEqual(PurePath(path1), PurePath(path2))
def assertPathListEqual(self, pathlist1, pathlist2):
self.assertEqual(len(pathlist1), len(pathlist2))
worklist = list(zip(pathlist1, pathlist2))
for i in worklist:
if i[0] is None:
self.assertEqual(i[0], i[1])
else:
self.assertPathEqual(i[0], i[1])
def assertPathBasenameEqual(self, path, basename):
msg = '{!r} does not end with {!r}'.format(path, basename)
# We cannot use os.path.basename because it returns '' when the path
# ends with '/' for some silly reason. This is not how the UNIX utility
# `basename` works.
path_basename = PurePath(path).parts[-1]
self.assertEqual(PurePath(path_basename), PurePath(basename), msg)
def assertBuildIsNoop(self):
ret = self.build()
if self.backend is Backend.ninja:
self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout)
elif self.backend is Backend.vs:
# Ensure that some target said that no rebuild was done
self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret)
self.assertIn('ClCompile:\n All outputs are up-to-date.', ret)
self.assertIn('Link:\n All outputs are up-to-date.', ret)
# Ensure that no targets were built
clre = re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)
linkre = re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)
self.assertNotRegex(ret, clre)
self.assertNotRegex(ret, linkre)
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
def assertRebuiltTarget(self, target):
ret = self.build()
if self.backend is Backend.ninja:
self.assertIn('Linking target {}'.format(target), ret)
elif self.backend is Backend.vs:
# Ensure that this target was rebuilt
linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE)
self.assertRegex(ret, linkre)
elif self.backend is Backend.xcode:
raise unittest.SkipTest('Please help us fix this test on the xcode backend')
else:
raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name))
def assertPathExists(self, path):
m = 'Path {!r} should exist'.format(path)
self.assertTrue(os.path.exists(path), msg=m)
def assertPathDoesNotExist(self, path):
m = 'Path {!r} should not exist'.format(path)
self.assertFalse(os.path.exists(path), msg=m)
class AllPlatformTests(BasePlatformTests):
'''
Tests that should run on all platforms
'''
def test_default_options_prefix(self):
'''
Tests that setting a prefix in default_options in project() works.
Can't be an ordinary test because we pass --prefix to meson there.
https://github.com/mesonbuild/meson/issues/1349
'''
testdir = os.path.join(self.common_test_dir, '90 default options')
self.init(testdir, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
if opt['name'] == 'prefix':
prefix = opt['value']
self.assertEqual(prefix, '/absoluteprefix')
def test_do_conf_file_preserve_newlines(self):
def conf_file(in_data, confdata):
with temp_filename() as fin:
with open(fin, 'wb') as fobj:
fobj.write(in_data.encode('utf-8'))
with temp_filename() as fout:
mesonbuild.mesonlib.do_conf_file(fin, fout, confdata, 'meson')
with open(fout, 'rb') as fobj:
return fobj.read().decode('utf-8')
confdata = {'VAR': ('foo', 'bar')}
self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n')
self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n')
def test_absolute_prefix_libdir(self):
'''
Tests that setting absolute paths for --prefix and --libdir work. Can't
be an ordinary test because these are set via the command-line.
https://github.com/mesonbuild/meson/issues/1341
https://github.com/mesonbuild/meson/issues/1345
'''
testdir = os.path.join(self.common_test_dir, '90 default options')
prefix = '/someabs'
libdir = 'libdir'
extra_args = ['--prefix=' + prefix,
# This can just be a relative path, but we want to test
# that passing this as an absolute path also works
'--libdir=' + prefix + '/' + libdir]
self.init(testdir, extra_args=extra_args, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
if opt['name'] == 'prefix':
self.assertEqual(prefix, opt['value'])
elif opt['name'] == 'libdir':
self.assertEqual(libdir, opt['value'])
def test_libdir_must_be_inside_prefix(self):
'''
Tests that libdir is forced to be inside prefix no matter how it is set.
Must be a unit test for obvious reasons.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
# libdir being inside prefix is ok
args = ['--prefix', '/opt', '--libdir', '/opt/lib32']
self.init(testdir, extra_args=args)
self.wipe()
# libdir not being inside prefix is not ok
args = ['--prefix', '/usr', '--libdir', '/opt/lib32']
self.assertRaises(subprocess.CalledProcessError, self.init, testdir, extra_args=args)
self.wipe()
# libdir must be inside prefix even when set via mesonconf
self.init(testdir)
self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False)
def test_prefix_dependent_defaults(self):
'''
Tests that configured directory paths are set to prefix dependent
defaults.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
expected = {
'/opt': {'prefix': '/opt',
'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
'infodir': 'share/info',
'libexecdir': 'libexec', 'localedir': 'share/locale',
'localstatedir': 'var', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': 'com',
'sysconfdir': 'etc'},
'/usr': {'prefix': '/usr',
'bindir': 'bin', 'datadir': 'share', 'includedir': 'include',
'infodir': 'share/info',
'libexecdir': 'libexec', 'localedir': 'share/locale',
'localstatedir': '/var', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': '/var/lib',
'sysconfdir': '/etc'},
'/usr/local': {'prefix': '/usr/local',
'bindir': 'bin', 'datadir': 'share',
'includedir': 'include', 'infodir': 'share/info',
'libexecdir': 'libexec',
'localedir': 'share/locale',
'localstatedir': '/var/local', 'mandir': 'share/man',
'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib',
'sysconfdir': 'etc'},
# N.B. We don't check 'libdir' as it's platform dependent, see
# default_libdir():
}
if mesonbuild.mesonlib.default_prefix() == '/usr/local':
expected[None] = expected['/usr/local']
for prefix in expected:
args = []
if prefix:
args += ['--prefix', prefix]
self.init(testdir, extra_args=args, default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
name = opt['name']
value = opt['value']
if name in expected[prefix]:
self.assertEqual(value, expected[prefix][name])
self.wipe()
def test_default_options_prefix_dependent_defaults(self):
'''
Tests that setting a prefix in default_options in project() sets prefix
dependent defaults for other options, and that those defaults can
be overridden in default_options or by the command line.
'''
testdir = os.path.join(self.common_test_dir, '168 default options prefix dependent defaults')
expected = {
'':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/sharedstate'},
'--prefix=/usr':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/sharedstate'},
'--sharedstatedir=/var/state':
{'prefix': '/usr',
'sysconfdir': '/etc',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
'--sharedstatedir=/var/state --prefix=/usr --sysconfdir=sysconf':
{'prefix': '/usr',
'sysconfdir': 'sysconf',
'localstatedir': '/var',
'sharedstatedir': '/var/state'},
}
for args in expected:
self.init(testdir, extra_args=args.split(), default_args=False)
opts = self.introspect('--buildoptions')
for opt in opts:
name = opt['name']
value = opt['value']
if name in expected[args]:
self.assertEqual(value, expected[args][name])
self.wipe()
def test_clike_get_library_dirs(self):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
for d in cc.get_library_dirs(env):
self.assertTrue(os.path.exists(d))
self.assertTrue(os.path.isdir(d))
self.assertTrue(os.path.isabs(d))
def test_static_library_overwrite(self):
'''
Tests that static libraries are never appended to, always overwritten.
Has to be a unit test because this involves building a project,
reconfiguring, and building it again so that `ar` is run twice on the
same static library.
https://github.com/mesonbuild/meson/issues/1355
'''
testdir = os.path.join(self.common_test_dir, '3 static')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
static_linker = env.detect_static_linker(cc)
if is_windows():
raise unittest.SkipTest('https://github.com/mesonbuild/meson/issues/1526')
if not isinstance(static_linker, mesonbuild.linkers.ArLinker):
raise unittest.SkipTest('static linker is not `ar`')
# Configure
self.init(testdir)
# Get name of static library
targets = self.introspect('--targets')
self.assertEqual(len(targets), 1)
libname = targets[0]['filename'][0]
# Build and get contents of static library
self.build()
before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
# Filter out non-object-file contents
before = [f for f in before if f.endswith(('.o', '.obj'))]
# Static library should contain only one object
self.assertEqual(len(before), 1, msg=before)
# Change the source to be built into the static library
self.setconf('-Dsource=libfile2.c')
self.build()
after = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
# Filter out non-object-file contents
after = [f for f in after if f.endswith(('.o', '.obj'))]
# Static library should contain only one object
self.assertEqual(len(after), 1, msg=after)
# and the object must have changed
self.assertNotEqual(before, after)
def test_static_compile_order(self):
'''
Test that the order of files in a compiler command-line while compiling
and linking statically is deterministic. This can't be an ordinary test
case because we need to inspect the compiler database.
https://github.com/mesonbuild/meson/pull/951
'''
testdir = os.path.join(self.common_test_dir, '5 linkstatic')
self.init(testdir)
compdb = self.get_compdb()
# Rules will get written out in this order
self.assertTrue(compdb[0]['file'].endswith("libfile.c"))
self.assertTrue(compdb[1]['file'].endswith("libfile2.c"))
self.assertTrue(compdb[2]['file'].endswith("libfile3.c"))
self.assertTrue(compdb[3]['file'].endswith("libfile4.c"))
# FIXME: We don't have access to the linker command
def test_run_target_files_path(self):
'''
Test that run_targets are run from the correct directory
https://github.com/mesonbuild/meson/issues/957
'''
testdir = os.path.join(self.common_test_dir, '54 run target')
self.init(testdir)
self.run_target('check_exists')
def test_install_introspection(self):
'''
Tests that the Meson introspection API exposes install filenames correctly
https://github.com/mesonbuild/meson/issues/829
'''
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
testdir = os.path.join(self.common_test_dir, '8 install')
self.init(testdir)
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
self.assertPathListEqual(intro[0]['install_filename'], ['/usr/lib/libstat.a'])
self.assertPathListEqual(intro[1]['install_filename'], ['/usr/bin/prog' + exe_suffix])
def test_install_subdir_introspection(self):
'''
Test that the Meson introspection API also contains subdir install information
https://github.com/mesonbuild/meson/issues/5556
'''
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
intro = self.introspect('--installed')
expected = {
'sub2': 'share/sub2',
'subdir/sub1': 'share/sub1',
'subdir/sub_elided': 'share',
'sub1': 'share/sub1',
'sub/sub1': 'share/sub1',
'sub_elided': 'share',
'nested_elided/sub': 'share',
}
self.assertEqual(len(intro), len(expected))
# Convert expected to PurePath
expected_converted = {PurePath(os.path.join(testdir, key)): PurePath(os.path.join(self.prefix, val)) for key, val in expected.items()}
intro_converted = {PurePath(key): PurePath(val) for key, val in intro.items()}
for src, dst in expected_converted.items():
self.assertIn(src, intro_converted)
self.assertEqual(dst, intro_converted[src])
def test_install_introspection_multiple_outputs(self):
'''
Tests that the Meson introspection API exposes multiple install filenames correctly without crashing
https://github.com/mesonbuild/meson/pull/4555
Reverted to the first file only because of https://github.com/mesonbuild/meson/pull/4547#discussion_r244173438
TODO Change the format to a list officially in a followup PR
'''
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
testdir = os.path.join(self.common_test_dir, '144 custom target multiple outputs')
self.init(testdir)
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh'])
self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh'])
self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None])
self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh'])
def test_install_log_content(self):
'''
Tests that the install-log.txt is consistent with the installed files and directories.
Specifically checks that the log file only contains one entry per file/directory.
https://github.com/mesonbuild/meson/issues/4499
'''
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
self.install()
installpath = Path(self.installdir)
# Find installed files and directories
expected = {installpath: 0}
for name in installpath.rglob('*'):
expected[name] = 0
# Find logged files and directories
with Path(self.builddir, 'meson-logs', 'install-log.txt').open() as f:
logged = list(map(lambda l: Path(l.strip()),
filter(lambda l: not l.startswith('#'),
f.readlines())))
for name in logged:
self.assertTrue(name in expected, 'Log contains extra entry {}'.format(name))
expected[name] += 1
for name, count in expected.items():
self.assertGreater(count, 0, 'Log is missing entry for {}'.format(name))
self.assertLess(count, 2, 'Log has multiple entries for {}'.format(name))
def test_uninstall(self):
exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix)
testdir = os.path.join(self.common_test_dir, '8 install')
self.init(testdir)
self.assertPathDoesNotExist(exename)
self.install()
self.assertPathExists(exename)
self.uninstall()
self.assertPathDoesNotExist(exename)
def test_forcefallback(self):
testdir = os.path.join(self.unit_test_dir, '31 forcefallback')
self.init(testdir, extra_args=['--wrap-mode=forcefallback'])
self.build()
self.run_tests()
def test_env_ops_dont_stack(self):
'''
Test that env ops prepend/append do not stack, and that this usage issues a warning
'''
testdir = os.path.join(self.unit_test_dir, '63 test env does not stack')
out = self.init(testdir)
self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_APPEND')
self.assertRegex(out, r'WARNING: Overriding.*TEST_VAR_PREPEND')
self.assertNotRegex(out, r'WARNING: Overriding.*TEST_VAR_SET')
self.run_tests()
def test_testsetups(self):
if not shutil.which('valgrind'):
raise unittest.SkipTest('Valgrind not installed.')
testdir = os.path.join(self.unit_test_dir, '2 testsetups')
self.init(testdir)
self.build()
# Run tests without setup
self.run_tests()
with open(os.path.join(self.logdir, 'testlog.txt')) as f:
basic_log = f.read()
# Run buggy test with setup that has env that will make it fail
self.assertRaises(subprocess.CalledProcessError,
self._run, self.mtest_command + ['--setup=valgrind'])
with open(os.path.join(self.logdir, 'testlog-valgrind.txt')) as f:
vg_log = f.read()
self.assertFalse('TEST_ENV is set' in basic_log)
self.assertFalse('Memcheck' in basic_log)
self.assertTrue('TEST_ENV is set' in vg_log)
self.assertTrue('Memcheck' in vg_log)
# Run buggy test with setup without env that will pass
self._run(self.mtest_command + ['--setup=wrapper'])
# Setup with no properties works
self._run(self.mtest_command + ['--setup=empty'])
# Setup with only env works
self._run(self.mtest_command + ['--setup=onlyenv'])
self._run(self.mtest_command + ['--setup=onlyenv2'])
self._run(self.mtest_command + ['--setup=onlyenv3'])
# Setup with only a timeout works
self._run(self.mtest_command + ['--setup=timeout'])
def test_testsetup_selection(self):
testdir = os.path.join(self.unit_test_dir, '14 testsetup selection')
self.init(testdir)
self.build()
# Run tests without setup
self.run_tests()
self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo'])
self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:'])
self._run(self.mtest_command + ['--setup=worksforall'])
self._run(self.mtest_command + ['--setup=main:worksforall'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:'])
self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:'])
self._run(self.mtest_command + ['--setup=bar:onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=foo:onlyinbar'])
self.assertRaises(subprocess.CalledProcessError, self._run,
self.mtest_command + ['--setup=main:onlyinbar'])
def test_testsetup_default(self):
testdir = os.path.join(self.unit_test_dir, '49 testsetup default')
self.init(testdir)
self.build()
# Run tests without --setup will cause the default setup to be used
self.run_tests()
with open(os.path.join(self.logdir, 'testlog.txt')) as f:
default_log = f.read()
# Run tests with explicitly using the same setup that is set as default
self._run(self.mtest_command + ['--setup=mydefault'])
with open(os.path.join(self.logdir, 'testlog-mydefault.txt')) as f:
mydefault_log = f.read()
# Run tests with another setup
self._run(self.mtest_command + ['--setup=other'])
with open(os.path.join(self.logdir, 'testlog-other.txt')) as f:
other_log = f.read()
self.assertTrue('ENV_A is 1' in default_log)
self.assertTrue('ENV_B is 2' in default_log)
self.assertTrue('ENV_C is 2' in default_log)
self.assertTrue('ENV_A is 1' in mydefault_log)
self.assertTrue('ENV_B is 2' in mydefault_log)
self.assertTrue('ENV_C is 2' in mydefault_log)
self.assertTrue('ENV_A is 1' in other_log)
self.assertTrue('ENV_B is 3' in other_log)
self.assertTrue('ENV_C is 2' in other_log)
def assertFailedTestCount(self, failure_count, command):
try:
self._run(command)
self.assertEqual(0, failure_count, 'Expected %d tests to fail.' % failure_count)
except subprocess.CalledProcessError as e:
self.assertEqual(e.returncode, failure_count)
def test_suite_selection(self):
testdir = os.path.join(self.unit_test_dir, '4 suite selection')
self.init(testdir)
self.build()
self.assertFailedTestCount(4, self.mtest_command)
self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', ':success'])
self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', ':fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'mainprj:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjfail:success'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success'])
self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:fail'])
self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjmix:success'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test'])
self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail'])
def test_build_by_default(self):
testdir = os.path.join(self.common_test_dir, '133 build by default')
self.init(testdir)
self.build()
genfile1 = os.path.join(self.builddir, 'generated1.dat')
genfile2 = os.path.join(self.builddir, 'generated2.dat')
exe1 = os.path.join(self.builddir, 'fooprog' + exe_suffix)
exe2 = os.path.join(self.builddir, 'barprog' + exe_suffix)
self.assertPathExists(genfile1)
self.assertPathExists(genfile2)
self.assertPathDoesNotExist(exe1)
self.assertPathDoesNotExist(exe2)
self.build(target=('fooprog' + exe_suffix))
self.assertPathExists(exe1)
self.build(target=('barprog' + exe_suffix))
self.assertPathExists(exe2)
def test_internal_include_order(self):
testdir = os.path.join(self.common_test_dir, '134 include order')
self.init(testdir)
execmd = fxecmd = None
for cmd in self.get_compdb():
if 'someexe' in cmd['command']:
execmd = cmd['command']
continue
if 'somefxe' in cmd['command']:
fxecmd = cmd['command']
continue
if not execmd or not fxecmd:
raise Exception('Could not find someexe and somfxe commands')
# Check include order for 'someexe'
incs = [a for a in split_args(execmd) if a.startswith("-I")]
self.assertEqual(len(incs), 9)
# target private dir
someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe")
self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id))
# target build subdir
self.assertPathEqual(incs[1], "-Isub4")
# target source subdir
self.assertPathBasenameEqual(incs[2], 'sub4')
# include paths added via per-target c_args: ['-I'...]
self.assertPathBasenameEqual(incs[3], 'sub3')
# target include_directories: build dir
self.assertPathEqual(incs[4], "-Isub2")
# target include_directories: source dir
self.assertPathBasenameEqual(incs[5], 'sub2')
# target internal dependency include_directories: build dir
self.assertPathEqual(incs[6], "-Isub1")
# target internal dependency include_directories: source dir
self.assertPathBasenameEqual(incs[7], 'sub1')
# custom target include dir
self.assertPathEqual(incs[8], '-Ictsub')
# Check include order for 'somefxe'
incs = [a for a in split_args(fxecmd) if a.startswith('-I')]
self.assertEqual(len(incs), 9)
# target private dir
self.assertPathEqual(incs[0], '-Isomefxe@exe')
# target build dir
self.assertPathEqual(incs[1], '-I.')
# target source dir
self.assertPathBasenameEqual(incs[2], os.path.basename(testdir))
# target internal dependency correct include_directories: build dir
self.assertPathEqual(incs[3], "-Isub4")
# target internal dependency correct include_directories: source dir
self.assertPathBasenameEqual(incs[4], 'sub4')
# target internal dependency dep include_directories: build dir
self.assertPathEqual(incs[5], "-Isub1")
# target internal dependency dep include_directories: source dir
self.assertPathBasenameEqual(incs[6], 'sub1')
# target internal dependency wrong include_directories: build dir
self.assertPathEqual(incs[7], "-Isub2")
# target internal dependency wrong include_directories: source dir
self.assertPathBasenameEqual(incs[8], 'sub2')
def test_compiler_detection(self):
'''
Test that automatic compiler detection and setting from the environment
both work just fine. This is needed because while running project tests
and other unit tests, we always read CC/CXX/etc from the environment.
'''
gnu = mesonbuild.compilers.GnuCompiler
clang = mesonbuild.compilers.ClangCompiler
intel = mesonbuild.compilers.IntelGnuLikeCompiler
msvc = (mesonbuild.compilers.VisualStudioCCompiler, mesonbuild.compilers.VisualStudioCPPCompiler)
clangcl = (mesonbuild.compilers.ClangClCCompiler, mesonbuild.compilers.ClangClCPPCompiler)
ar = mesonbuild.linkers.ArLinker
lib = mesonbuild.linkers.VisualStudioLinker
langs = [('c', 'CC'), ('cpp', 'CXX')]
if not is_windows() and platform.machine().lower() != 'e2k':
langs += [('objc', 'OBJC'), ('objcpp', 'OBJCXX')]
testdir = os.path.join(self.unit_test_dir, '5 compiler detection')
env = get_fake_env(testdir, self.builddir, self.prefix)
for lang, evar in langs:
# Detect with evar and do sanity checks on that
if evar in os.environ:
ecc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
self.assertTrue(ecc.version)
elinker = env.detect_static_linker(ecc)
# Pop it so we don't use it for the next detection
evalue = os.environ.pop(evar)
# Very rough/strict heuristics. Would never work for actual
# compiler detection, but should be ok for the tests.
ebase = os.path.basename(evalue)
if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
self.assertIsInstance(ecc, gnu)
self.assertIsInstance(elinker, ar)
elif 'clang-cl' in ebase:
self.assertIsInstance(ecc, clangcl)
self.assertIsInstance(elinker, lib)
elif 'clang' in ebase:
self.assertIsInstance(ecc, clang)
self.assertIsInstance(elinker, ar)
elif ebase.startswith('ic'):
self.assertIsInstance(ecc, intel)
self.assertIsInstance(elinker, ar)
elif ebase.startswith('cl'):
self.assertIsInstance(ecc, msvc)
self.assertIsInstance(elinker, lib)
else:
raise AssertionError('Unknown compiler {!r}'.format(evalue))
# Check that we actually used the evalue correctly as the compiler
self.assertEqual(ecc.get_exelist(), split_args(evalue))
# Do auto-detection of compiler based on platform, PATH, etc.
cc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
self.assertTrue(cc.version)
linker = env.detect_static_linker(cc)
# Check compiler type
if isinstance(cc, gnu):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, clangcl):
self.assertIsInstance(linker, lib)
self.assertIsInstance(cc.linker, mesonbuild.linkers.ClangClDynamicLinker)
if isinstance(cc, clang):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
elif is_windows():
# This is clang, not clang-cl
self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, intel):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
elif is_windows():
self.assertIsInstance(cc.linker, mesonbuild.linkers.XilinkDynamicLinker)
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuDynamicLinker)
if isinstance(cc, msvc):
self.assertTrue(is_windows())
self.assertIsInstance(linker, lib)
self.assertEqual(cc.id, 'msvc')
self.assertTrue(hasattr(cc, 'is_64'))
self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker)
# If we're on Windows CI, we know what the compiler will be
if 'arch' in os.environ:
if os.environ['arch'] == 'x64':
self.assertTrue(cc.is_64)
else:
self.assertFalse(cc.is_64)
# Set evar ourselves to a wrapper script that just calls the same
# exelist + some argument. This is meant to test that setting
# something like `ccache gcc -pipe` or `distcc ccache gcc` works.
wrapper = os.path.join(testdir, 'compiler wrapper.py')
wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
wrappercc_s = ''
for w in wrappercc:
wrappercc_s += quote_arg(w) + ' '
os.environ[evar] = wrappercc_s
wcc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
# Check static linker too
wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
wrapperlinker_s = ''
for w in wrapperlinker:
wrapperlinker_s += quote_arg(w) + ' '
os.environ['AR'] = wrapperlinker_s
wlinker = env.detect_static_linker(wcc)
# Pop it so we don't use it for the next detection
evalue = os.environ.pop('AR')
# Must be the same type since it's a wrapper around the same exelist
self.assertIs(type(cc), type(wcc))
self.assertIs(type(linker), type(wlinker))
# Ensure that the exelist is correct
self.assertEqual(wcc.get_exelist(), wrappercc)
self.assertEqual(wlinker.get_exelist(), wrapperlinker)
# Ensure that the version detection worked correctly
self.assertEqual(cc.version, wcc.version)
if hasattr(cc, 'is_64'):
self.assertEqual(cc.is_64, wcc.is_64)
def test_always_prefer_c_compiler_for_asm(self):
testdir = os.path.join(self.common_test_dir, '137 c cpp and asm')
# Skip if building with MSVC
env = get_fake_env(testdir, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'msvc':
raise unittest.SkipTest('MSVC can\'t compile assembly')
self.init(testdir)
commands = {'c-asm': {}, 'cpp-asm': {}, 'cpp-c-asm': {}, 'c-cpp-asm': {}}
for cmd in self.get_compdb():
# Get compiler
split = split_args(cmd['command'])
if split[0] == 'ccache':
compiler = split[1]
else:
compiler = split[0]
# Classify commands
if 'Ic-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['c-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['c-asm']['c'] = compiler
else:
raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
elif 'Icpp-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['cpp-asm']['asm'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['cpp-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command']))
elif 'Ic-cpp-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['c-cpp-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['c-cpp-asm']['c'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['c-cpp-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in c-cpp-asm?'.format(cmd['command']))
elif 'Icpp-c-asm' in cmd['command']:
if cmd['file'].endswith('.S'):
commands['cpp-c-asm']['asm'] = compiler
elif cmd['file'].endswith('.c'):
commands['cpp-c-asm']['c'] = compiler
elif cmd['file'].endswith('.cpp'):
commands['cpp-c-asm']['cpp'] = compiler
else:
raise AssertionError('{!r} found in cpp-c-asm?'.format(cmd['command']))
else:
raise AssertionError('Unknown command {!r} found'.format(cmd['command']))
# Check that .S files are always built with the C compiler
self.assertEqual(commands['c-asm']['asm'], commands['c-asm']['c'])
self.assertEqual(commands['c-asm']['asm'], commands['cpp-asm']['asm'])
self.assertEqual(commands['cpp-asm']['asm'], commands['c-cpp-asm']['c'])
self.assertEqual(commands['c-cpp-asm']['asm'], commands['c-cpp-asm']['c'])
self.assertEqual(commands['cpp-c-asm']['asm'], commands['cpp-c-asm']['c'])
self.assertNotEqual(commands['cpp-asm']['asm'], commands['cpp-asm']['cpp'])
self.assertNotEqual(commands['c-cpp-asm']['c'], commands['c-cpp-asm']['cpp'])
self.assertNotEqual(commands['cpp-c-asm']['c'], commands['cpp-c-asm']['cpp'])
# Check that the c-asm target is always linked with the C linker
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('build c-asm.*: c_LINKER', contents)
self.assertIsNotNone(m, msg=contents)
def test_preprocessor_checks_CPPFLAGS(self):
'''
Test that preprocessor compiler checks read CPPFLAGS and also CFLAGS but
not LDFLAGS.
'''
testdir = os.path.join(self.common_test_dir, '136 get define')
define = 'MESON_TEST_DEFINE_VALUE'
# NOTE: this list can't have \n, ' or "
# \n is never substituted by the GNU pre-processor via a -D define
# ' and " confuse split_args() even when they are escaped
# % and # confuse the MSVC preprocessor
# !, ^, *, and < confuse lcc preprocessor
value = 'spaces and fun@$&()-=_+{}[]:;>?,./~`'
for env_var in ['CPPFLAGS', 'CFLAGS']:
env = {}
env[env_var] = '-D{}="{}"'.format(define, value)
env['LDFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read'.format(define)
self.init(testdir, extra_args=['-D{}={}'.format(define, value)], override_envvars=env)
def test_custom_target_exe_data_deterministic(self):
testdir = os.path.join(self.common_test_dir, '113 custom target capture')
self.init(testdir)
meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
self.wipe()
self.init(testdir)
meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat'))
self.assertListEqual(meson_exe_dat1, meson_exe_dat2)
def test_source_changes_cause_rebuild(self):
'''
Test that changes to sources and headers cause rebuilds, but not
changes to unused files (as determined by the dependency file) in the
input files list.
'''
testdir = os.path.join(self.common_test_dir, '20 header in file list')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of header.h should rebuild everything
self.utime(os.path.join(testdir, 'header.h'))
self.assertRebuiltTarget('prog')
def test_custom_target_changes_cause_rebuild(self):
'''
Test that in a custom target, changes to the input files, the
ExternalProgram, and any File objects on the command-line cause
a rebuild.
'''
testdir = os.path.join(self.common_test_dir, '60 custom header generator')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of these should rebuild everything
for f in ('input.def', 'makeheader.py', 'somefile.txt'):
self.utime(os.path.join(testdir, f))
self.assertRebuiltTarget('prog')
def test_source_generator_program_cause_rebuild(self):
'''
Test that changes to generator programs in the source tree cause
a rebuild.
'''
testdir = os.path.join(self.common_test_dir, '94 gen extra')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of generator should rebuild the executable
self.utime(os.path.join(testdir, 'srcgen.py'))
self.assertRebuiltTarget('basic')
def test_static_library_lto(self):
'''
Test that static libraries can be built with LTO and linked to
executables. On Linux, this requires the use of gcc-ar.
https://github.com/mesonbuild/meson/issues/1646
'''
testdir = os.path.join(self.common_test_dir, '5 linkstatic')
env = get_fake_env(testdir, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() == 'clang' and is_windows():
raise unittest.SkipTest('LTO not (yet) supported by windows clang')
self.init(testdir, extra_args='-Db_lto=true')
self.build()
self.run_tests()
def test_dist_git(self):
if not shutil.which('git'):
raise unittest.SkipTest('Git not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
try:
self.dist_impl(_git_init)
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the git files so cleaning up the dir
# fails sometimes.
pass
def test_dist_hg(self):
if not shutil.which('hg'):
raise unittest.SkipTest('Mercurial not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
def hg_init(project_dir):
subprocess.check_call(['hg', 'init'], cwd=project_dir)
with open(os.path.join(project_dir, '.hg', 'hgrc'), 'w') as f:
print('[ui]', file=f)
print('username=Author Person <teh_coderz@example.com>', file=f)
subprocess.check_call(['hg', 'add', 'meson.build', 'distexe.c'], cwd=project_dir)
subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir)
try:
self.dist_impl(hg_init, include_subprojects=False)
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the hg files so cleaning up the dir
# fails sometimes.
pass
def test_dist_git_script(self):
if not shutil.which('git'):
raise unittest.SkipTest('Git not found')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
try:
with tempfile.TemporaryDirectory() as tmpdir:
project_dir = os.path.join(tmpdir, 'a')
shutil.copytree(os.path.join(self.unit_test_dir, '35 dist script'),
project_dir)
_git_init(project_dir)
self.init(project_dir)
self.build('dist')
except PermissionError:
# When run under Windows CI, something (virus scanner?)
# holds on to the git files so cleaning up the dir
# fails sometimes.
pass
def create_dummy_subproject(self, project_dir, name):
path = os.path.join(project_dir, 'subprojects', name)
os.makedirs(path)
with open(os.path.join(path, 'meson.build'), 'w') as ofile:
ofile.write("project('{}')".format(name))
return path
def dist_impl(self, vcs_init, include_subprojects=True):
# Create this on the fly because having rogue .git directories inside
# the source tree leads to all kinds of trouble.
with tempfile.TemporaryDirectory() as project_dir:
with open(os.path.join(project_dir, 'meson.build'), 'w') as ofile:
ofile.write('''project('disttest', 'c', version : '1.4.3')
e = executable('distexe', 'distexe.c')
test('dist test', e)
subproject('vcssub', required : false)
subproject('tarballsub', required : false)
''')
with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile:
ofile.write('''#include<stdio.h>
int main(int argc, char **argv) {
printf("I am a distribution test.\\n");
return 0;
}
''')
xz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz')
xz_checksumfile = xz_distfile + '.sha256sum'
zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip')
zip_checksumfile = zip_distfile + '.sha256sum'
vcs_init(project_dir)
if include_subprojects:
vcs_init(self.create_dummy_subproject(project_dir, 'vcssub'))
self.create_dummy_subproject(project_dir, 'tarballsub')
self.create_dummy_subproject(project_dir, 'unusedsub')
self.init(project_dir)
self.build('dist')
self.assertPathExists(xz_distfile)
self.assertPathExists(xz_checksumfile)
self.assertPathDoesNotExist(zip_distfile)
self.assertPathDoesNotExist(zip_checksumfile)
self._run(self.meson_command + ['dist', '--formats', 'zip'],
workdir=self.builddir)
self.assertPathExists(zip_distfile)
self.assertPathExists(zip_checksumfile)
if include_subprojects:
z = zipfile.ZipFile(zip_distfile)
self.assertEqual(sorted(['disttest-1.4.3/',
'disttest-1.4.3/meson.build',
'disttest-1.4.3/distexe.c']),
sorted(z.namelist()))
self._run(self.meson_command + ['dist', '--formats', 'zip', '--include-subprojects'],
workdir=self.builddir)
z = zipfile.ZipFile(zip_distfile)
self.assertEqual(sorted(['disttest-1.4.3/',
'disttest-1.4.3/subprojects/',
'disttest-1.4.3/meson.build',
'disttest-1.4.3/distexe.c',
'disttest-1.4.3/subprojects/tarballsub/',
'disttest-1.4.3/subprojects/vcssub/',
'disttest-1.4.3/subprojects/tarballsub/meson.build',
'disttest-1.4.3/subprojects/vcssub/meson.build']),
sorted(z.namelist()))
def test_rpath_uses_ORIGIN(self):
'''
Test that built targets use $ORIGIN in rpath, which ensures that they
are relocatable and ensures that builds are reproducible since the
build directory won't get embedded into the built binaries.
'''
if is_windows() or is_cygwin():
raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.common_test_dir, '42 library chain')
self.init(testdir)
self.build()
for each in ('prog', 'subdir/liblib1.so', ):
rpath = get_rpath(os.path.join(self.builddir, each))
self.assertTrue(rpath, 'Rpath could not be determined for {}.'.format(each))
if is_dragonflybsd():
# DragonflyBSD will prepend /usr/lib/gccVERSION to the rpath,
# so ignore that.
self.assertTrue(rpath.startswith('/usr/lib/gcc'))
rpaths = rpath.split(':')[1:]
else:
rpaths = rpath.split(':')
for path in rpaths:
self.assertTrue(path.startswith('$ORIGIN'), msg=(each, path))
# These two don't link to anything else, so they do not need an rpath entry.
for each in ('subdir/subdir2/liblib2.so', 'subdir/subdir3/liblib3.so'):
rpath = get_rpath(os.path.join(self.builddir, each))
if is_dragonflybsd():
# The rpath should be equal to /usr/lib/gccVERSION
self.assertTrue(rpath.startswith('/usr/lib/gcc'))
self.assertEqual(len(rpath.split(':')), 1)
else:
self.assertTrue(rpath is None)
def test_dash_d_dedup(self):
testdir = os.path.join(self.unit_test_dir, '9 d dedup')
self.init(testdir)
cmd = self.get_compdb()[0]['command']
self.assertTrue('-D FOO -D BAR' in cmd or
'"-D" "FOO" "-D" "BAR"' in cmd or
'/D FOO /D BAR' in cmd or
'"/D" "FOO" "/D" "BAR"' in cmd)
def test_all_forbidden_targets_tested(self):
'''
Test that all forbidden targets are tested in the '154 reserved targets'
test. Needs to be a unit test because it accesses Meson internals.
'''
testdir = os.path.join(self.common_test_dir, '154 reserved targets')
targets = mesonbuild.coredata.forbidden_target_names
# We don't actually define a target with this name
targets.pop('build.ninja')
# Remove this to avoid multiple entries with the same name
# but different case.
targets.pop('PHONY')
for i in targets:
self.assertPathExists(os.path.join(testdir, i))
def detect_prebuild_env(self):
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
stlinker = env.detect_static_linker(cc)
if mesonbuild.mesonlib.is_windows():
object_suffix = 'obj'
shared_suffix = 'dll'
elif mesonbuild.mesonlib.is_cygwin():
object_suffix = 'o'
shared_suffix = 'dll'
elif mesonbuild.mesonlib.is_osx():
object_suffix = 'o'
shared_suffix = 'dylib'
else:
object_suffix = 'o'
shared_suffix = 'so'
return (cc, stlinker, object_suffix, shared_suffix)
def pbcompile(self, compiler, source, objectfile, extra_args=None):
cmd = compiler.get_exelist()
extra_args = extra_args or []
if compiler.get_argument_syntax() == 'msvc':
cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args
else:
cmd += ['-c', source, '-o', objectfile] + extra_args
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
def test_prebuilt_object(self):
(compiler, _, object_suffix, _) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '15 prebuilt object')
source = os.path.join(tdir, 'source.c')
objectfile = os.path.join(tdir, 'prebuilt.' + object_suffix)
self.pbcompile(compiler, source, objectfile)
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(objectfile)
def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None):
if extra_args is None:
extra_args = []
if compiler.get_argument_syntax() == 'msvc':
link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile]
else:
link_cmd = ['ar', 'csr', outfile, objectfile]
link_cmd = linker.get_exelist()
link_cmd += linker.get_always_args()
link_cmd += linker.get_std_link_args()
link_cmd += linker.get_output_args(outfile)
link_cmd += [objectfile]
self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
try:
subprocess.check_call(link_cmd)
finally:
os.unlink(objectfile)
def test_prebuilt_static_lib(self):
(cc, stlinker, object_suffix, _) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '16 prebuilt static')
source = os.path.join(tdir, 'libdir/best.c')
objectfile = os.path.join(tdir, 'libdir/best.' + object_suffix)
stlibfile = os.path.join(tdir, 'libdir/libbest.a')
self.build_static_lib(cc, stlinker, source, objectfile, stlibfile)
# Run the test
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(stlibfile)
def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None):
if extra_args is None:
extra_args = []
if compiler.get_argument_syntax() == 'msvc':
link_cmd = compiler.get_linker_exelist() + [
'/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile,
'/OUT:' + outfile, objectfile]
else:
if not (compiler.info.is_windows() or compiler.info.is_cygwin() or compiler.info.is_darwin()):
extra_args += ['-fPIC']
link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile]
if not mesonbuild.mesonlib.is_osx():
link_cmd += ['-Wl,-soname=' + os.path.basename(outfile)]
self.pbcompile(compiler, source, objectfile, extra_args=extra_args)
try:
subprocess.check_call(link_cmd)
finally:
os.unlink(objectfile)
def test_prebuilt_shared_lib(self):
(cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env()
tdir = os.path.join(self.unit_test_dir, '17 prebuilt shared')
source = os.path.join(tdir, 'alexandria.c')
objectfile = os.path.join(tdir, 'alexandria.' + object_suffix)
impfile = os.path.join(tdir, 'alexandria.lib')
if cc.get_argument_syntax() == 'msvc':
shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix)
elif is_cygwin():
shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix)
else:
shlibfile = os.path.join(tdir, 'libalexandria.' + shared_suffix)
self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
# Run the test
try:
self.init(tdir)
self.build()
self.run_tests()
finally:
os.unlink(shlibfile)
if mesonbuild.mesonlib.is_windows():
# Clean up all the garbage MSVC writes in the
# source tree.
for fname in glob(os.path.join(tdir, 'alexandria.*')):
if os.path.splitext(fname)[1] not in ['.c', '.h']:
os.unlink(fname)
@skipIfNoPkgconfig
def test_pkgconfig_static(self):
'''
Test that the we prefer static libraries when `static: true` is
passed to dependency() with pkg-config. Can't be an ordinary test
because we need to build libs and try to find them from meson.build
Also test that it's not a hard error to have unsatisfiable library deps
since system libraries -lm will never be found statically.
https://github.com/mesonbuild/meson/issues/2785
'''
(cc, stlinker, objext, shext) = self.detect_prebuild_env()
testdir = os.path.join(self.unit_test_dir, '18 pkgconfig static')
source = os.path.join(testdir, 'foo.c')
objectfile = os.path.join(testdir, 'foo.' + objext)
stlibfile = os.path.join(testdir, 'libfoo.a')
impfile = os.path.join(testdir, 'foo.lib')
if cc.get_argument_syntax() == 'msvc':
shlibfile = os.path.join(testdir, 'foo.' + shext)
elif is_cygwin():
shlibfile = os.path.join(testdir, 'cygfoo.' + shext)
else:
shlibfile = os.path.join(testdir, 'libfoo.' + shext)
# Build libs
self.build_static_lib(cc, stlinker, source, objectfile, stlibfile, extra_args=['-DFOO_STATIC'])
self.build_shared_lib(cc, source, objectfile, shlibfile, impfile)
# Run test
try:
self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': self.builddir})
self.build()
self.run_tests()
finally:
os.unlink(stlibfile)
os.unlink(shlibfile)
if mesonbuild.mesonlib.is_windows():
# Clean up all the garbage MSVC writes in the
# source tree.
for fname in glob(os.path.join(testdir, 'foo.*')):
if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']:
os.unlink(fname)
@skipIfNoPkgconfig
def test_pkgconfig_gen_escaping(self):
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
prefix = '/usr/with spaces'
libdir = 'lib'
self.init(testdir, extra_args=['--prefix=' + prefix,
'--libdir=' + libdir])
# Find foo dependency
os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
env = get_fake_env(testdir, self.builddir, self.prefix)
kwargs = {'required': True, 'silent': True}
foo_dep = PkgConfigDependency('libfoo', env, kwargs)
# Ensure link_args are properly quoted
libdir = PurePath(prefix) / PurePath(libdir)
link_args = ['-L' + libdir.as_posix(), '-lfoo']
self.assertEqual(foo_dep.get_link_args(), link_args)
# Ensure include args are properly quoted
incdir = PurePath(prefix) / PurePath('include')
cargs = ['-I' + incdir.as_posix()]
self.assertEqual(foo_dep.get_compile_args(), cargs)
def test_array_option_change(self):
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': ['foo', 'bar'],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir)
original = get_opt()
self.assertDictEqual(original, expected)
expected['value'] = ['oink', 'boink']
self.setconf('-Dlist=oink,boink')
changed = get_opt()
self.assertEqual(changed, expected)
def test_array_option_bad_change(self):
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': ['foo', 'bar'],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir)
original = get_opt()
self.assertDictEqual(original, expected)
with self.assertRaises(subprocess.CalledProcessError):
self.setconf('-Dlist=bad')
changed = get_opt()
self.assertDictEqual(changed, expected)
def test_array_option_empty_equivalents(self):
"""Array options treat -Dopt=[] and -Dopt= as equivalent."""
def get_opt():
opts = self.introspect('--buildoptions')
for x in opts:
if x.get('name') == 'list':
return x
raise Exception(opts)
expected = {
'name': 'list',
'description': 'list',
'section': 'user',
'type': 'array',
'value': [],
'machine': 'any',
}
tdir = os.path.join(self.unit_test_dir, '19 array option')
self.init(tdir, extra_args='-Dlist=')
original = get_opt()
self.assertDictEqual(original, expected)
def opt_has(self, name, value):
res = self.introspect('--buildoptions')
found = False
for i in res:
if i['name'] == name:
self.assertEqual(i['value'], value)
found = True
break
self.assertTrue(found, "Array option not found in introspect data.")
def test_free_stringarray_setting(self):
testdir = os.path.join(self.common_test_dir, '43 options')
self.init(testdir)
self.opt_has('free_array_opt', [])
self.setconf('-Dfree_array_opt=foo,bar', will_build=False)
self.opt_has('free_array_opt', ['foo', 'bar'])
self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False)
self.opt_has('free_array_opt', ['a,b', 'c,d'])
def test_subproject_promotion(self):
testdir = os.path.join(self.unit_test_dir, '12 promote')
workdir = os.path.join(self.builddir, 'work')
shutil.copytree(testdir, workdir)
spdir = os.path.join(workdir, 'subprojects')
s3dir = os.path.join(spdir, 's3')
scommondir = os.path.join(spdir, 'scommon')
self.assertFalse(os.path.isdir(s3dir))
subprocess.check_call(self.wrap_command + ['promote', 's3'], cwd=workdir)
self.assertTrue(os.path.isdir(s3dir))
self.assertFalse(os.path.isdir(scommondir))
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'scommon'],
cwd=workdir,
stdout=subprocess.DEVNULL), 0)
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'invalid/path/to/scommon'],
cwd=workdir,
stderr=subprocess.DEVNULL), 0)
self.assertFalse(os.path.isdir(scommondir))
subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/scommon'], cwd=workdir)
self.assertTrue(os.path.isdir(scommondir))
promoted_wrap = os.path.join(spdir, 'athing.wrap')
self.assertFalse(os.path.isfile(promoted_wrap))
subprocess.check_call(self.wrap_command + ['promote', 'athing'], cwd=workdir)
self.assertTrue(os.path.isfile(promoted_wrap))
self.init(workdir)
self.build()
def test_subproject_promotion_wrap(self):
testdir = os.path.join(self.unit_test_dir, '44 promote wrap')
workdir = os.path.join(self.builddir, 'work')
shutil.copytree(testdir, workdir)
spdir = os.path.join(workdir, 'subprojects')
ambiguous_wrap = os.path.join(spdir, 'ambiguous.wrap')
self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'ambiguous'],
cwd=workdir,
stdout=subprocess.DEVNULL), 0)
self.assertFalse(os.path.isfile(ambiguous_wrap))
subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/ambiguous.wrap'], cwd=workdir)
self.assertTrue(os.path.isfile(ambiguous_wrap))
def test_warning_location(self):
tdir = os.path.join(self.unit_test_dir, '22 warning location')
out = self.init(tdir)
for expected in [
r'meson.build:4: WARNING: Keyword argument "link_with" defined multiple times.',
r'sub' + os.path.sep + r'meson.build:3: WARNING: Keyword argument "link_with" defined multiple times.',
r'meson.build:6: WARNING: a warning of some sort',
r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning',
r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.',
r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.",
r'meson.build:1: WARNING: Passed invalid keyword argument "invalid".',
]:
self.assertRegex(out, re.escape(expected))
def test_permitted_method_kwargs(self):
tdir = os.path.join(self.unit_test_dir, '25 non-permitted kwargs')
out = self.init(tdir)
for expected in [
r'WARNING: Passed invalid keyword argument "prefixxx".',
r'WARNING: Passed invalid keyword argument "argsxx".',
r'WARNING: Passed invalid keyword argument "invalidxx".',
]:
self.assertRegex(out, re.escape(expected))
def test_templates(self):
ninja = detect_ninja()
if ninja is None:
raise unittest.SkipTest('This test currently requires ninja. Fix this once "meson build" works.')
langs = ['c']
env = get_fake_env()
try:
env.detect_cpp_compiler(MachineChoice.HOST)
langs.append('cpp')
except EnvironmentException:
pass
try:
env.detect_d_compiler(MachineChoice.HOST)
langs.append('d')
except EnvironmentException:
pass
try:
env.detect_fortran_compiler(MachineChoice.HOST)
if is_windows() or platform.machine().lower() != 'e2k':
# Elbrus Fortran compiler can't generate debug information
langs.append('fortran')
except EnvironmentException:
pass
try:
env.detect_objc_compiler(MachineChoice.HOST)
langs.append('objc')
except EnvironmentException:
pass
# FIXME: omitting rust as Windows AppVeyor CI finds Rust but doesn't link correctly
for lang in langs:
for target_type in ('executable', 'library'):
# test empty directory
with tempfile.TemporaryDirectory() as tmpdir:
self._run(self.meson_command + ['init', '--language', lang, '--type', target_type],
workdir=tmpdir)
self._run(self.setup_command + ['--backend=ninja', 'builddir'],
workdir=tmpdir)
self._run(ninja,
workdir=os.path.join(tmpdir, 'builddir'))
# test directory with existing code file
if lang in ('c', 'cpp'):
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'foo.' + lang), 'w') as f:
f.write('int main(void) {}')
self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
# The test uses mocking and thus requires that
# the current process is the one to run the Meson steps.
# If we are using an external test executable (most commonly
# in Debian autopkgtests) then the mocking won't work.
@unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
def test_cross_file_system_paths(self):
if is_windows():
raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
if is_sunos():
cc = 'gcc'
else:
cc = 'cc'
testdir = os.path.join(self.common_test_dir, '1 trivial')
cross_content = textwrap.dedent("""\
[binaries]
c = '/usr/bin/{}'
ar = '/usr/bin/ar'
strip = '/usr/bin/ar'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'x86'
cpu = 'i686'
endian = 'little'
""".format(cc))
with tempfile.TemporaryDirectory() as d:
dir_ = os.path.join(d, 'meson', 'cross')
os.makedirs(dir_)
with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
f.write(cross_content)
name = os.path.basename(f.name)
with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
os.environ.pop('XDG_DATA_HOME', None)
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
with tempfile.TemporaryDirectory() as d:
dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
os.makedirs(dir_)
with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
f.write(cross_content)
name = os.path.basename(f.name)
# If XDG_DATA_HOME is set in the environment running the
# tests this test will fail, os mock the environment, pop
# it, then test
with mock.patch.dict(os.environ):
os.environ.pop('XDG_DATA_HOME', None)
with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
self.wipe()
def test_compiler_run_command(self):
'''
The test checks that the compiler object can be passed to
run_command().
'''
testdir = os.path.join(self.unit_test_dir, '24 compiler run_command')
self.init(testdir)
def test_identical_target_name_in_subproject_flat_layout(self):
'''
Test that identical targets in different subprojects do not collide
if layout is flat.
'''
testdir = os.path.join(self.common_test_dir, '177 identical target name in subproject flat layout')
self.init(testdir, extra_args=['--layout=flat'])
self.build()
def test_identical_target_name_in_subdir_flat_layout(self):
'''
Test that identical targets in different subdirs do not collide
if layout is flat.
'''
testdir = os.path.join(self.common_test_dir, '186 same target name flat layout')
self.init(testdir, extra_args=['--layout=flat'])
self.build()
def test_flock(self):
exception_raised = False
with tempfile.TemporaryDirectory() as tdir:
os.mkdir(os.path.join(tdir, 'meson-private'))
with BuildDirLock(tdir):
try:
with BuildDirLock(tdir):
pass
except MesonException:
exception_raised = True
self.assertTrue(exception_raised, 'Double locking did not raise exception.')
@unittest.skipIf(is_osx(), 'Test not applicable to OSX')
def test_check_module_linking(self):
"""
Test that link_with: a shared module issues a warning
https://github.com/mesonbuild/meson/issues/2865
(That an error is raised on OSX is exercised by test failing/78)
"""
tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking')
out = self.init(tdir)
msg = ('''WARNING: target links against shared modules. This is not
recommended as it is not supported on some platforms''')
self.assertIn(msg, out)
def test_ndebug_if_release_disabled(self):
testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release')
self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release'])
self.build()
exe = os.path.join(self.builddir, 'main')
self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip())
def test_ndebug_if_release_enabled(self):
testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release')
self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release'])
self.build()
exe = os.path.join(self.builddir, 'main')
self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip())
def test_guessed_linker_dependencies(self):
'''
Test that meson adds dependencies for libraries based on the final
linker command line.
'''
testdirbase = os.path.join(self.unit_test_dir, '29 guessed linker dependencies')
testdirlib = os.path.join(testdirbase, 'lib')
extra_args = None
libdir_flags = ['-L']
env = get_fake_env(testdirlib, self.builddir, self.prefix)
if env.detect_c_compiler(MachineChoice.HOST).get_id() in {'msvc', 'clang-cl', 'intel-cl'}:
# msvc-like compiler, also test it with msvc-specific flags
libdir_flags += ['/LIBPATH:', '-LIBPATH:']
else:
# static libraries are not linkable with -l with msvc because meson installs them
# as .a files which unix_args_to_native will not know as it expects libraries to use
# .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc
# this tests needs to use shared libraries to test the path resolving logic in the
# dependency generation code path.
extra_args = ['--default-library', 'static']
initial_builddir = self.builddir
initial_installdir = self.installdir
for libdir_flag in libdir_flags:
# build library
self.new_builddir()
self.init(testdirlib, extra_args=extra_args)
self.build()
self.install()
libbuilddir = self.builddir
installdir = self.installdir
libdir = os.path.join(self.installdir, self.prefix.lstrip('/').lstrip('\\'), 'lib')
# build user of library
self.new_builddir()
# replace is needed because meson mangles platform paths passed via LDFLAGS
self.init(os.path.join(testdirbase, 'exe'),
override_envvars={"LDFLAGS": '{}{}'.format(libdir_flag, libdir.replace('\\', '/'))})
self.build()
self.assertBuildIsNoop()
# rebuild library
exebuilddir = self.builddir
self.installdir = installdir
self.builddir = libbuilddir
# Microsoft's compiler is quite smart about touching import libs on changes,
# so ensure that there is actually a change in symbols.
self.setconf('-Dmore_exports=true')
self.build()
self.install()
# no ensure_backend_detects_changes needed because self.setconf did that already
# assert user of library will be rebuild
self.builddir = exebuilddir
self.assertRebuiltTarget('app')
# restore dirs for the next test case
self.installdir = initial_builddir
self.builddir = initial_installdir
def test_conflicting_d_dash_option(self):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
with self.assertRaises(subprocess.CalledProcessError) as e:
self.init(testdir, extra_args=['-Dbindir=foo', '--bindir=bar'])
# Just to ensure that we caught the correct error
self.assertIn('passed as both', e.stderr)
def _test_same_option_twice(self, arg, args):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
self.init(testdir, extra_args=args)
opts = self.introspect('--buildoptions')
for item in opts:
if item['name'] == arg:
self.assertEqual(item['value'], 'bar')
return
raise Exception('Missing {} value?'.format(arg))
def test_same_dash_option_twice(self):
self._test_same_option_twice('bindir', ['--bindir=foo', '--bindir=bar'])
def test_same_d_option_twice(self):
self._test_same_option_twice('bindir', ['-Dbindir=foo', '-Dbindir=bar'])
def test_same_project_d_option_twice(self):
self._test_same_option_twice('one', ['-Done=foo', '-Done=bar'])
def _test_same_option_twice_configure(self, arg, args):
testdir = os.path.join(self.unit_test_dir, '37 mixed command line args')
self.init(testdir)
self.setconf(args)
opts = self.introspect('--buildoptions')
for item in opts:
if item['name'] == arg:
self.assertEqual(item['value'], 'bar')
return
raise Exception('Missing {} value?'.format(arg))
def test_same_dash_option_twice_configure(self):
self._test_same_option_twice_configure(
'bindir', ['--bindir=foo', '--bindir=bar'])
def test_same_d_option_twice_configure(self):
self._test_same_option_twice_configure(
'bindir', ['-Dbindir=foo', '-Dbindir=bar'])
def test_same_project_d_option_twice_configure(self):
self._test_same_option_twice_configure(
'one', ['-Done=foo', '-Done=bar'])
def test_command_line(self):
testdir = os.path.join(self.unit_test_dir, '34 command line')
# Verify default values when passing no args
self.init(testdir)
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'static')
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.assertEqual(obj.user_options['set_sub_opt'].value, True)
self.assertEqual(obj.user_options['subp:subp_opt'].value, 'default3')
self.wipe()
# warning_level is special, it's --warnlevel instead of --warning-level
# for historical reasons
self.init(testdir, extra_args=['--warnlevel=2'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '2')
self.setconf('--warnlevel=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '3')
self.wipe()
# But when using -D syntax, it should be 'warning_level'
self.init(testdir, extra_args=['-Dwarning_level=2'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '2')
self.setconf('-Dwarning_level=3')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '3')
self.wipe()
# Mixing --option and -Doption is forbidden
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(testdir, extra_args=['--warnlevel=1', '-Dwarning_level=3'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('as both', cm.exception.output)
self.init(testdir)
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.setconf(['--warnlevel=1', '-Dwarning_level=3'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('as both', cm.exception.output)
self.wipe()
# --default-library should override default value from project()
self.init(testdir, extra_args=['--default-library=both'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'both')
self.setconf('--default-library=shared')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'shared')
if self.backend is Backend.ninja:
# reconfigure target works only with ninja backend
self.build('reconfigure')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['default_library'].value, 'shared')
self.wipe()
# Should warn on unknown options
out = self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo'])
self.assertIn('Unknown options: "bad, foo, wrong_link_args"', out)
self.wipe()
# Should fail on malformed option
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(testdir, extra_args=['-Dfoo'])
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('Option \'foo\' must have a value separated by equals sign.', cm.exception.output)
self.init(testdir)
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.setconf('-Dfoo')
self.assertNotEqual(0, cm.exception.returncode)
self.assertIn('Option \'foo\' must have a value separated by equals sign.', cm.exception.output)
self.wipe()
# It is not an error to set wrong option for unknown subprojects or
# language because we don't have control on which one will be selected.
self.init(testdir, extra_args=['-Dc_wrong=1', '-Dwrong:bad=1', '-Db_wrong=1'])
self.wipe()
# Test we can set subproject option
self.init(testdir, extra_args=['-Dsubp:subp_opt=foo'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.user_options['subp:subp_opt'].value, 'foo')
self.wipe()
# c_args value should be parsed with split_args
self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dfoo', '-Dbar', '-Dthird=one two'])
self.setconf('-Dc_args="foo bar" one two')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo bar', 'one', 'two'])
self.wipe()
self.init(testdir, extra_args=['-Dset_percent_opt=myoption%'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.user_options['set_percent_opt'].value, 'myoption%')
self.wipe()
# Setting a 2nd time the same option should override the first value
try:
self.init(testdir, extra_args=['--bindir=foo', '--bindir=bar',
'-Dbuildtype=plain', '-Dbuildtype=release',
'-Db_sanitize=address', '-Db_sanitize=thread',
'-Dc_args=-Dfoo', '-Dc_args=-Dbar'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['bindir'].value, 'bar')
self.assertEqual(obj.builtins['buildtype'].value, 'release')
self.assertEqual(obj.base_options['b_sanitize'].value, 'thread')
self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dbar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
'-Db_sanitize=thread', '-Db_sanitize=address',
'-Dc_args=-Dbar', '-Dc_args=-Dfoo'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['bindir'].value, 'foo')
self.assertEqual(obj.builtins['buildtype'].value, 'plain')
self.assertEqual(obj.base_options['b_sanitize'].value, 'address')
self.assertEqual(obj.compiler_options.host['c_args'].value, ['-Dfoo'])
self.wipe()
except KeyError:
# Ignore KeyError, it happens on CI for compilers that does not
# support b_sanitize. We have to test with a base option because
# they used to fail this test with Meson 0.46 an earlier versions.
pass
def test_warning_level_0(self):
testdir = os.path.join(self.common_test_dir, '214 warning level 0')
# Verify default values when passing no args
self.init(testdir)
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
# verify we can override w/ --warnlevel
self.init(testdir, extra_args=['--warnlevel=1'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.setconf('--warnlevel=0')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
# verify we can override w/ -Dwarning_level
self.init(testdir, extra_args=['-Dwarning_level=1'])
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '1')
self.setconf('-Dwarning_level=0')
obj = mesonbuild.coredata.load(self.builddir)
self.assertEqual(obj.builtins['warning_level'].value, '0')
self.wipe()
def test_feature_check_usage_subprojects(self):
testdir = os.path.join(self.unit_test_dir, '41 featurenew subprojects')
out = self.init(testdir)
# Parent project warns correctly
self.assertRegex(out, "WARNING: Project targeting '>=0.45'.*'0.47.0': dict")
# Subprojects warn correctly
self.assertRegex(out, r"\|WARNING: Project targeting '>=0.40'.*'0.44.0': disabler")
self.assertRegex(out, r"\|WARNING: Project targeting '!=0.40'.*'0.44.0': disabler")
# Subproject has a new-enough meson_version, no warning
self.assertNotRegex(out, "WARNING: Project targeting.*Python")
# Ensure a summary is printed in the subproject and the outer project
self.assertRegex(out, r"\|WARNING: Project specifies a minimum meson_version '>=0.40'")
self.assertRegex(out, r"\| \* 0.44.0: {'disabler'}")
self.assertRegex(out, "WARNING: Project specifies a minimum meson_version '>=0.45'")
self.assertRegex(out, " * 0.47.0: {'dict'}")
def test_configure_file_warnings(self):
testdir = os.path.join(self.common_test_dir, "14 configure file")
out = self.init(testdir)
self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*")
self.assertRegex(out, "WARNING:.*'FOO_BAR'.*nosubst-nocopy2.txt.in.*not present.*")
self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*")
self.assertRegex(out, "WARNING:.*empty configuration_data.*test.py.in")
# Warnings for configuration files that are overwritten.
self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites")
self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites")
self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites")
self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites")
self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites")
# No warnings about empty configuration data objects passed to files with substitutions
self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in")
self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in")
with open(os.path.join(self.builddir, 'nosubst-nocopy1.txt'), 'rb') as f:
self.assertEqual(f.read().strip(), b'/* #undef FOO_BAR */')
with open(os.path.join(self.builddir, 'nosubst-nocopy2.txt'), 'rb') as f:
self.assertEqual(f.read().strip(), b'')
self.assertRegex(out, r"DEPRECATION:.*\['array'\] is invalid.*dict")
def test_dirs(self):
with tempfile.TemporaryDirectory() as containing:
with tempfile.TemporaryDirectory(dir=containing) as srcdir:
mfile = os.path.join(srcdir, 'meson.build')
of = open(mfile, 'w')
of.write("project('foobar', 'c')\n")
of.close()
pc = subprocess.run(self.setup_command,
cwd=srcdir,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL)
self.assertIn(b'Must specify at least one directory name', pc.stdout)
with tempfile.TemporaryDirectory(dir=srcdir) as builddir:
subprocess.run(self.setup_command,
check=True,
cwd=builddir,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
def get_opts_as_dict(self):
result = {}
for i in self.introspect('--buildoptions'):
result[i['name']] = i['value']
return result
def test_buildtype_setting(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'debug')
self.assertEqual(opts['debug'], True)
self.setconf('-Ddebug=false')
opts = self.get_opts_as_dict()
self.assertEqual(opts['debug'], False)
self.assertEqual(opts['buildtype'], 'plain')
self.assertEqual(opts['optimization'], '0')
# Setting optimizations to 3 should cause buildtype
# to go to release mode.
self.setconf('-Doptimization=3')
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'release')
self.assertEqual(opts['debug'], False)
self.assertEqual(opts['optimization'], '3')
# Going to debug build type should reset debugging
# and optimization
self.setconf('-Dbuildtype=debug')
opts = self.get_opts_as_dict()
self.assertEqual(opts['buildtype'], 'debug')
self.assertEqual(opts['debug'], True)
self.assertEqual(opts['optimization'], '0')
@skipIfNoPkgconfig
@unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows')
def test_native_dep_pkgconfig(self):
testdir = os.path.join(self.unit_test_dir,
'46 native dep pkgconfig var')
with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
crossfile.write(textwrap.dedent(
'''[binaries]
pkgconfig = r'{0}'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7'
endian = 'little'
'''.format(os.path.join(testdir, 'cross_pkgconfig.py'))))
crossfile.flush()
self.meson_cross_file = crossfile.name
env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir,
'native_pkgconfig')}
self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env)
self.wipe()
self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
def __reconfigure(self, change_minor=False):
# Set an older version to force a reconfigure from scratch
filename = os.path.join(self.privatedir, 'coredata.dat')
with open(filename, 'rb') as f:
obj = pickle.load(f)
if change_minor:
v = mesonbuild.coredata.version.split('.')
obj.version = '.'.join(v[0:2] + [str(int(v[2]) + 1)])
else:
obj.version = '0.47.0'
with open(filename, 'wb') as f:
pickle.dump(obj, f)
def test_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
self.init(testdir, extra_args=['-Dopt1=val1'])
self.setconf('-Dopt2=val2')
self.__reconfigure()
out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch')
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 default4')
self.build()
self.run_tests()
# Create a file in builddir and verify wipe command removes it
filename = os.path.join(self.builddir, 'something')
open(filename, 'w').close()
self.assertTrue(os.path.exists(filename))
out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4'])
self.assertFalse(os.path.exists(filename))
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 val4')
self.build()
self.run_tests()
def test_wipe_from_builddir(self):
testdir = os.path.join(self.common_test_dir, '161 custom target subdir depend files')
self.init(testdir)
self.__reconfigure()
with Path(self.builddir):
self.init(testdir, extra_args=['--wipe'])
def test_minor_version_does_not_reconfigure_wipe(self):
testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
self.init(testdir, extra_args=['-Dopt1=val1'])
self.setconf('-Dopt2=val2')
self.__reconfigure(change_minor=True)
out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
self.assertNotRegex(out, 'WARNING:.*Regenerating configuration from scratch')
self.assertRegex(out, 'opt1 val1')
self.assertRegex(out, 'opt2 val2')
self.assertRegex(out, 'opt3 val3')
self.assertRegex(out, 'opt4 default4')
self.build()
self.run_tests()
def test_target_construct_id_from_path(self):
# This id is stable but not guessable.
# The test is supposed to prevent unintentional
# changes of target ID generation.
target_id = Target.construct_id_from_path('some/obscure/subdir',
'target-id', '@suffix')
self.assertEqual('5e002d3@@target-id@suffix', target_id)
target_id = Target.construct_id_from_path('subproject/foo/subdir/bar',
'target2-id', '@other')
self.assertEqual('81d46d1@@target2-id@other', target_id)
def test_introspect_projectinfo_without_configured_build(self):
testfile = os.path.join(self.common_test_dir, '35 run program', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'run command')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '43 options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'options')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '46 subproject options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'suboptions')
self.assertEqual(len(res['subprojects']), 1)
subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files'])
self.assertEqual(subproject_files, set(['subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build']))
self.assertEqual(res['subprojects'][0]['name'], 'subproject')
self.assertEqual(res['subprojects'][0]['version'], 'undefined')
self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject')
def test_introspect_projectinfo_subprojects(self):
testdir = os.path.join(self.common_test_dir, '102 subproject subdir')
self.init(testdir)
res = self.introspect('--projectinfo')
expected = {
'descriptive_name': 'proj',
'version': 'undefined',
'subproject_dir': 'subprojects',
'subprojects': [
{
'descriptive_name': 'sub',
'name': 'sub',
'version': 'undefined'
}
]
}
self.assertDictEqual(res, expected)
def test_introspection_target_subproject(self):
testdir = os.path.join(self.common_test_dir, '45 subproject')
self.init(testdir)
res = self.introspect('--targets')
expected = {
'sublib': 'sublib',
'simpletest': 'sublib',
'user': None
}
for entry in res:
name = entry['name']
self.assertEqual(entry['subproject'], expected[name])
def test_introspect_projectinfo_subproject_dir(self):
testdir = os.path.join(self.common_test_dir, '78 custom subproject dir')
self.init(testdir)
res = self.introspect('--projectinfo')
self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
def test_introspect_projectinfo_subproject_dir_from_source(self):
testfile = os.path.join(self.common_test_dir, '78 custom subproject dir', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
@skipIfNoExecutable('clang-format')
def test_clang_format(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Clang-format is for now only supported on Ninja, not {}'.format(self.backend.name))
testdir = os.path.join(self.unit_test_dir, '54 clang-format')
testfile = os.path.join(testdir, 'prog.c')
badfile = os.path.join(testdir, 'prog_orig_c')
goodfile = os.path.join(testdir, 'prog_expected_c')
testheader = os.path.join(testdir, 'header.h')
badheader = os.path.join(testdir, 'header_orig_h')
goodheader = os.path.join(testdir, 'header_expected_h')
try:
shutil.copyfile(badfile, testfile)
shutil.copyfile(badheader, testheader)
self.init(testdir)
self.assertNotEqual(Path(testfile).read_text(),
Path(goodfile).read_text())
self.assertNotEqual(Path(testheader).read_text(),
Path(goodheader).read_text())
self.run_target('clang-format')
self.assertEqual(Path(testheader).read_text(),
Path(goodheader).read_text())
finally:
if os.path.exists(testfile):
os.unlink(testfile)
if os.path.exists(testheader):
os.unlink(testheader)
@skipIfNoExecutable('clang-tidy')
def test_clang_tidy(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Clang-tidy is for now only supported on Ninja, not {}'.format(self.backend.name))
if shutil.which('c++') is None:
raise unittest.SkipTest('Clang-tidy breaks when ccache is used and "c++" not in path.')
if is_osx():
raise unittest.SkipTest('Apple ships a broken clang-tidy that chokes on -pipe.')
testdir = os.path.join(self.unit_test_dir, '70 clang-tidy')
self.init(testdir, override_envvars={'CXX': 'c++'})
out = self.run_target('clang-tidy')
self.assertIn('cttest.cpp:4:20', out)
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '71 cross')
# Do a build to generate a cross file where the host is this target
self.init(testdir, extra_args=['-Dgenerate=true'])
self.meson_cross_file = os.path.join(self.builddir, "crossfile")
self.assertTrue(os.path.exists(self.meson_cross_file))
# Now verify that this is detected as cross
self.new_builddir()
self.init(testdir)
def test_introspect_buildoptions_without_configured_build(self):
testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args)
self.init(testdir, default_args=False)
res_wb = self.introspect('--buildoptions')
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
def test_meson_configure_from_source_does_not_crash(self):
testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions')
self._run(self.mconf_command + [testdir])
def test_introspect_json_dump(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
self.init(testdir)
infodir = os.path.join(self.builddir, 'meson-info')
self.assertPathExists(infodir)
def assertKeyTypes(key_type_list, obj):
for i in key_type_list:
self.assertIn(i[0], obj)
self.assertIsInstance(obj[i[0]], i[1])
root_keylist = [
('benchmarks', list),
('buildoptions', list),
('buildsystem_files', list),
('dependencies', list),
('installed', dict),
('projectinfo', dict),
('targets', list),
('tests', list),
]
test_keylist = [
('cmd', list),
('env', dict),
('name', str),
('timeout', int),
('suite', list),
('is_parallel', bool),
('protocol', str),
]
buildoptions_keylist = [
('name', str),
('section', str),
('type', str),
('description', str),
('machine', str),
]
buildoptions_typelist = [
('combo', str, [('choices', list)]),
('string', str, []),
('boolean', bool, []),
('integer', int, []),
('array', list, []),
]
buildoptions_sections = ['core', 'backend', 'base', 'compiler', 'directory', 'user', 'test']
buildoptions_machines = ['any', 'build', 'host']
dependencies_typelist = [
('name', str),
('version', str),
('compile_args', list),
('link_args', list),
]
targets_typelist = [
('name', str),
('id', str),
('type', str),
('defined_in', str),
('filename', list),
('build_by_default', bool),
('target_sources', list),
('installed', bool),
]
targets_sources_typelist = [
('language', str),
('compiler', list),
('parameters', list),
('sources', list),
('generated_sources', list),
]
# First load all files
res = {}
for i in root_keylist:
curr = os.path.join(infodir, 'intro-{}.json'.format(i[0]))
self.assertPathExists(curr)
with open(curr, 'r') as fp:
res[i[0]] = json.load(fp)
assertKeyTypes(root_keylist, res)
# Check Tests and benchmarks
tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
for i in res['benchmarks'] + res['tests']:
assertKeyTypes(test_keylist, i)
if i['name'] in tests_to_find:
tests_to_find.remove(i['name'])
self.assertListEqual(tests_to_find, [])
# Check buildoptions
buildopts_to_find = {'cpp_std': 'c++11'}
for i in res['buildoptions']:
assertKeyTypes(buildoptions_keylist, i)
valid_type = False
for j in buildoptions_typelist:
if i['type'] == j[0]:
self.assertIsInstance(i['value'], j[1])
assertKeyTypes(j[2], i)
valid_type = True
break
self.assertIn(i['section'], buildoptions_sections)
self.assertIn(i['machine'], buildoptions_machines)
self.assertTrue(valid_type)
if i['name'] in buildopts_to_find:
self.assertEqual(i['value'], buildopts_to_find[i['name']])
buildopts_to_find.pop(i['name'], None)
self.assertDictEqual(buildopts_to_find, {})
# Check buildsystem_files
bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build']
bs_files = [os.path.join(testdir, x) for x in bs_files]
self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
# Check dependencies
dependencies_to_find = ['threads']
for i in res['dependencies']:
assertKeyTypes(dependencies_typelist, i)
if i['name'] in dependencies_to_find:
dependencies_to_find.remove(i['name'])
self.assertListEqual(dependencies_to_find, [])
# Check projectinfo
self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []})
# Check targets
targets_to_find = {
'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build'),
'staticTestLib': ('static library', True, False, 'staticlib/meson.build'),
'test1': ('executable', True, True, 'meson.build'),
'test2': ('executable', True, False, 'meson.build'),
'test3': ('executable', True, False, 'meson.build'),
}
for i in res['targets']:
assertKeyTypes(targets_typelist, i)
if i['name'] in targets_to_find:
tgt = targets_to_find[i['name']]
self.assertEqual(i['type'], tgt[0])
self.assertEqual(i['build_by_default'], tgt[1])
self.assertEqual(i['installed'], tgt[2])
self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3]))
targets_to_find.pop(i['name'], None)
for j in i['target_sources']:
assertKeyTypes(targets_sources_typelist, j)
self.assertDictEqual(targets_to_find, {})
def test_introspect_file_dump_equals_all(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
self.init(testdir)
res_all = self.introspect('--all')
res_file = {}
root_keylist = [
'benchmarks',
'buildoptions',
'buildsystem_files',
'dependencies',
'installed',
'projectinfo',
'targets',
'tests',
]
infodir = os.path.join(self.builddir, 'meson-info')
self.assertPathExists(infodir)
for i in root_keylist:
curr = os.path.join(infodir, 'intro-{}.json'.format(i))
self.assertPathExists(curr)
with open(curr, 'r') as fp:
res_file[i] = json.load(fp)
self.assertEqual(res_all, res_file)
def test_introspect_meson_info(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res1 = json.load(fp)
for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']:
self.assertIn(i, res1)
self.assertEqual(res1['error'], False)
self.assertEqual(res1['build_files_updated'], True)
def test_introspect_config_update(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res1 = json.load(fp)
self.setconf('-Dcpp_std=c++14')
self.setconf('-Dbuildtype=release')
for idx, i in enumerate(res1):
if i['name'] == 'cpp_std':
res1[idx]['value'] = 'c++14'
if i['name'] == 'build.cpp_std':
res1[idx]['value'] = 'c++14'
if i['name'] == 'buildtype':
res1[idx]['value'] = 'release'
if i['name'] == 'optimization':
res1[idx]['value'] = '3'
if i['name'] == 'debug':
res1[idx]['value'] = False
with open(introfile, 'r') as fp:
res2 = json.load(fp)
self.assertListEqual(res1, res2)
def test_introspect_targets_from_source(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
testfile = os.path.join(testdir, 'meson.build')
introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json')
self.init(testdir)
self.assertPathExists(introfile)
with open(introfile, 'r') as fp:
res_wb = json.load(fp)
res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args)
# Account for differences in output
for i in res_wb:
i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
if 'install_filename' in i:
del i['install_filename']
sources = []
for j in i['target_sources']:
sources += j['sources']
i['target_sources'] = [{
'language': 'unknown',
'compiler': [],
'parameters': [],
'sources': sources,
'generated_sources': []
}]
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
def test_introspect_dependencies_from_source(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args)
expected = [
{
'name': 'threads',
'required': True,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'zlib',
'required': False,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'bugDep1',
'required': True,
'version': [],
'has_fallback': False,
'conditional': False
},
{
'name': 'somethingthatdoesnotexist',
'required': True,
'version': ['>=1.2.3'],
'has_fallback': False,
'conditional': True
},
{
'name': 'look_i_have_a_fallback',
'required': True,
'version': ['>=1.0.0', '<=99.9.9'],
'has_fallback': True,
'conditional': True
}
]
self.maxDiff = None
self.assertListEqual(res_nb, expected)
def test_unstable_coredata(self):
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
# just test that the command does not fail (e.g. because it throws an exception)
self._run([*self.meson_command, 'unstable-coredata', self.builddir])
@skip_if_no_cmake
def test_cmake_prefix_path(self):
testdir = os.path.join(self.unit_test_dir, '64 cmake_prefix_path')
self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')])
@skip_if_no_cmake
def test_cmake_parser(self):
testdir = os.path.join(self.unit_test_dir, '65 cmake parser')
self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')])
def test_alias_target(self):
if self.backend is Backend.vs:
# FIXME: This unit test is broken with vs backend, needs investigation
raise unittest.SkipTest('Skipping alias_target test with {} backend'.format(self.backend.name))
testdir = os.path.join(self.unit_test_dir, '66 alias target')
self.init(testdir)
self.build()
self.assertPathDoesNotExist(os.path.join(self.builddir, 'prog' + exe_suffix))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'hello.txt'))
self.run_target('build-all')
self.assertPathExists(os.path.join(self.builddir, 'prog' + exe_suffix))
self.assertPathExists(os.path.join(self.builddir, 'hello.txt'))
def test_configure(self):
testdir = os.path.join(self.common_test_dir, '2 cpp')
self.init(testdir)
self._run(self.mconf_command + [self.builddir])
def test_summary(self):
testdir = os.path.join(self.unit_test_dir, '74 summary')
out = self.init(testdir)
expected = textwrap.dedent(r'''
Some Subproject 2.0
string: bar
integer: 1
boolean: True
My Project 1.0
Configuration
Some boolean: False
Another boolean: True
Some string: Hello World
A list: string
1
True
empty list:
A number: 1
yes: YES
no: NO
Subprojects
sub: YES
sub2: NO
''')
expected_lines = expected.split('\n')[1:]
out_start = out.find(expected_lines[0])
out_lines = out[out_start:].split('\n')[:len(expected_lines)]
if sys.version_info < (3, 7, 0):
# Dictionary order is not stable in Python <3.7, so sort the lines
# while comparing
self.assertEqual(sorted(expected_lines), sorted(out_lines))
else:
self.assertEqual(expected_lines, out_lines)
class FailureTests(BasePlatformTests):
'''
Tests that test failure conditions. Build files here should be dynamically
generated and static tests should go into `test cases/failing*`.
This is useful because there can be many ways in which a particular
function can fail, and creating failing tests for all of them is tedious
and slows down testing.
'''
dnf = "[Dd]ependency.*not found(:.*)?"
nopkg = '[Pp]kg-config.*not found'
def setUp(self):
super().setUp()
self.srcdir = os.path.realpath(tempfile.mkdtemp())
self.mbuild = os.path.join(self.srcdir, 'meson.build')
self.moptions = os.path.join(self.srcdir, 'meson_options.txt')
def tearDown(self):
super().tearDown()
windows_proof_rmtree(self.srcdir)
def assertMesonRaises(self, contents, match, *,
extra_args=None,
langs=None,
meson_version=None,
options=None,
override_envvars=None):
'''
Assert that running meson configure on the specified @contents raises
a error message matching regex @match.
'''
if langs is None:
langs = []
with open(self.mbuild, 'w') as f:
f.write("project('failure test', 'c', 'cpp'")
if meson_version:
f.write(", meson_version: '{}'".format(meson_version))
f.write(")\n")
for lang in langs:
f.write("add_languages('{}', required : false)\n".format(lang))
f.write(contents)
if options is not None:
with open(self.moptions, 'w') as f:
f.write(options)
o = {'MESON_FORCE_BACKTRACE': '1'}
if override_envvars is None:
override_envvars = o
else:
override_envvars.update(o)
# Force tracebacks so we can detect them properly
with self.assertRaisesRegex(MesonException, match, msg=contents):
# Must run in-process or we'll get a generic CalledProcessError
self.init(self.srcdir, extra_args=extra_args,
inprocess=True,
override_envvars = override_envvars)
def obtainMesonOutput(self, contents, match, extra_args, langs, meson_version=None):
if langs is None:
langs = []
with open(self.mbuild, 'w') as f:
f.write("project('output test', 'c', 'cpp'")
if meson_version:
f.write(", meson_version: '{}'".format(meson_version))
f.write(")\n")
for lang in langs:
f.write("add_languages('{}', required : false)\n".format(lang))
f.write(contents)
# Run in-process for speed and consistency with assertMesonRaises
return self.init(self.srcdir, extra_args=extra_args, inprocess=True)
def assertMesonOutputs(self, contents, match, extra_args=None, langs=None, meson_version=None):
'''
Assert that running meson configure on the specified @contents outputs
something that matches regex @match.
'''
out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version)
self.assertRegex(out, match)
def assertMesonDoesNotOutput(self, contents, match, extra_args=None, langs=None, meson_version=None):
'''
Assert that running meson configure on the specified @contents does not output
something that matches regex @match.
'''
out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version)
self.assertNotRegex(out, match)
@skipIfNoPkgconfig
def test_dependency(self):
if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0:
raise unittest.SkipTest('zlib not found with pkg-config')
a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"),
("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"),
("dependency('zlib', version : 1)", "[Vv]ersion.*string or list"),
("dependency('zlib', required : 1)", "[Rr]equired.*boolean"),
("dependency('zlib', method : 1)", "[Mm]ethod.*string"),
("dependency('zlibfail')", self.dnf),)
for contents, match in a:
self.assertMesonRaises(contents, match)
def test_apple_frameworks_dependency(self):
if not is_osx():
raise unittest.SkipTest('only run on macOS')
self.assertMesonRaises("dependency('appleframeworks')",
"requires at least one module")
def test_extraframework_dependency_method(self):
code = "dependency('python', method : 'extraframework')"
if not is_osx():
self.assertMesonRaises(code, self.dnf)
else:
# Python2 framework is always available on macOS
self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES')
def test_sdl2_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('sdl2-config'):
raise unittest.SkipTest('sdl2-config found')
self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf)
if shutil.which('pkg-config'):
self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf)
with no_pkgconfig():
# Look for pkg-config, cache it, then
# Use cached pkg-config without erroring out, then
# Use cached pkg-config to error out
code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \
"dependency('foobarrr2', method : 'pkg-config', required : false)\n" \
"dependency('sdl2', method : 'pkg-config')"
self.assertMesonRaises(code, self.nopkg)
def test_gnustep_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('gnustep-config'):
raise unittest.SkipTest('gnustep-config found')
self.assertMesonRaises("dependency('gnustep')",
"(requires a Objc compiler|{})".format(self.dnf),
langs = ['objc'])
def test_wx_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('wx-config-3.0') or shutil.which('wx-config') or shutil.which('wx-config-gtk3'):
raise unittest.SkipTest('wx-config, wx-config-3.0 or wx-config-gtk3 found')
self.assertMesonRaises("dependency('wxwidgets')", self.dnf)
self.assertMesonOutputs("dependency('wxwidgets', required : false)",
"Run-time dependency .*WxWidgets.* found: .*NO.*")
def test_wx_dependency(self):
if not shutil.which('wx-config-3.0') and not shutil.which('wx-config') and not shutil.which('wx-config-gtk3'):
raise unittest.SkipTest('Neither wx-config, wx-config-3.0 nor wx-config-gtk3 found')
self.assertMesonRaises("dependency('wxwidgets', modules : 1)",
"module argument is not a string")
def test_llvm_dependency(self):
self.assertMesonRaises("dependency('llvm', modules : 'fail')",
"(required.*fail|{})".format(self.dnf))
def test_boost_notfound_dependency(self):
# Can be run even if Boost is found or not
self.assertMesonRaises("dependency('boost', modules : 1)",
"module.*not a string")
self.assertMesonRaises("dependency('boost', modules : 'fail')",
"(fail.*not found|{})".format(self.dnf))
def test_boost_BOOST_ROOT_dependency(self):
# Test BOOST_ROOT; can be run even if Boost is found or not
self.assertMesonRaises("dependency('boost')",
"(BOOST_ROOT.*absolute|{})".format(self.dnf),
override_envvars = {'BOOST_ROOT': 'relative/path'})
def test_dependency_invalid_method(self):
code = '''zlib_dep = dependency('zlib', required : false)
zlib_dep.get_configtool_variable('foo')
'''
self.assertMesonRaises(code, ".* is not a config-tool dependency")
code = '''zlib_dep = dependency('zlib', required : false)
dep = declare_dependency(dependencies : zlib_dep)
dep.get_pkgconfig_variable('foo')
'''
self.assertMesonRaises(code, "Method.*pkgconfig.*is invalid.*internal")
code = '''zlib_dep = dependency('zlib', required : false)
dep = declare_dependency(dependencies : zlib_dep)
dep.get_configtool_variable('foo')
'''
self.assertMesonRaises(code, "Method.*configtool.*is invalid.*internal")
def test_objc_cpp_detection(self):
'''
Test that when we can't detect objc or objcpp, we fail gracefully.
'''
env = get_fake_env()
try:
env.detect_objc_compiler(MachineChoice.HOST)
env.detect_objcpp_compiler(MachineChoice.HOST)
except EnvironmentException:
code = "add_languages('objc')\nadd_languages('objcpp')"
self.assertMesonRaises(code, "Unknown compiler")
return
raise unittest.SkipTest("objc and objcpp found, can't test detection failure")
def test_subproject_variables(self):
'''
Test that:
1. The correct message is outputted when a not-required dep is not
found and the fallback subproject is also not found.
2. A not-required fallback dependency is not found because the
subproject failed to parse.
3. A not-found not-required dep with a fallback subproject outputs the
correct message when the fallback subproject is found but the
variable inside it is not.
4. A fallback dependency is found from the subproject parsed in (3)
5. The correct message is outputted when the .wrap file is missing for
a sub-subproject.
'''
tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables')
out = self.init(tdir, inprocess=True)
self.assertRegex(out, r"Subproject directory not found and .*nosubproj.wrap.* file not found")
self.assertRegex(out, r'Function does not take positional arguments.')
self.assertRegex(out, r'WARNING:.* Dependency .*subsubproject.* not found but it is available in a sub-subproject.')
self.assertRegex(out, r'Subproject directory not found and .*subsubproject.wrap.* file not found')
self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*')
def test_exception_exit_status(self):
'''
Test exit status on python exception
'''
tdir = os.path.join(self.unit_test_dir, '21 exit status')
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.init(tdir, inprocess=False, override_envvars = {'MESON_UNIT_TEST': '1'})
self.assertEqual(cm.exception.returncode, 2)
self.wipe()
def test_dict_requires_key_value_pairs(self):
self.assertMesonRaises("dict = {3, 'foo': 'bar'}",
'Only key:value pairs are valid in dict construction.')
self.assertMesonRaises("{'foo': 'bar', 3}",
'Only key:value pairs are valid in dict construction.')
def test_dict_forbids_duplicate_keys(self):
self.assertMesonRaises("dict = {'a': 41, 'a': 42}",
'Duplicate dictionary key: a.*')
def test_dict_forbids_integer_key(self):
self.assertMesonRaises("dict = {3: 'foo'}",
'Key must be a string.*')
def test_using_too_recent_feature(self):
# Here we use a dict, which was introduced in 0.47.0
self.assertMesonOutputs("dict = {}",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.46.0')
def test_using_recent_feature(self):
# Same as above, except the meson version is now appropriate
self.assertMesonDoesNotOutput("dict = {}",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.47')
def test_using_too_recent_feature_dependency(self):
self.assertMesonOutputs("dependency('pcap', required: false)",
".*WARNING.*Project targeting.*but.*",
meson_version='>= 0.41.0')
def test_vcs_tag_featurenew_build_always_stale(self):
'https://github.com/mesonbuild/meson/issues/3904'
vcs_tag = '''version_data = configuration_data()
version_data.set('PROJVER', '@VCS_TAG@')
vf = configure_file(output : 'version.h.in', configuration: version_data)
f = vcs_tag(input : vf, output : 'version.h')
'''
msg = '.*WARNING:.*feature.*build_always_stale.*custom_target.*'
self.assertMesonDoesNotOutput(vcs_tag, msg, meson_version='>=0.43')
def test_missing_subproject_not_required_and_required(self):
self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" +
"sub2 = subproject('not-found-subproject', required: true)",
""".*Subproject "subprojects/not-found-subproject" required but not found.*""")
def test_get_variable_on_not_found_project(self):
self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" +
"sub1.get_variable('naaa')",
"""Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""")
def test_version_checked_before_parsing_options(self):
'''
https://github.com/mesonbuild/meson/issues/5281
'''
options = "option('some-option', type: 'foo', value: '')"
match = 'Meson version is.*but project requires >=2000'
self.assertMesonRaises("", match, meson_version='>=2000', options=options)
def test_assert_default_message(self):
self.assertMesonRaises("k1 = 'a'\n" +
"assert({\n" +
" k1: 1,\n" +
"}['a'] == 2)\n",
r"Assert failed: {k1 : 1}\['a'\] == 2")
@unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)")
class WindowsTests(BasePlatformTests):
'''
Tests that should run on Cygwin, MinGW, and MSVC
'''
def setUp(self):
super().setUp()
self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows')
@unittest.skipIf(is_cygwin(), 'Test only applicable to Windows')
def test_find_program(self):
'''
Test that Windows-specific edge-cases in find_program are functioning
correctly. Cannot be an ordinary test because it involves manipulating
PATH to point to a directory with Python scripts.
'''
testdir = os.path.join(self.platform_test_dir, '8 find program')
# Find `cmd` and `cmd.exe`
prog1 = ExternalProgram('cmd')
self.assertTrue(prog1.found(), msg='cmd not found')
prog2 = ExternalProgram('cmd.exe')
self.assertTrue(prog2.found(), msg='cmd.exe not found')
self.assertPathEqual(prog1.get_path(), prog2.get_path())
# Find cmd with an absolute path that's missing the extension
cmd_path = prog2.get_path()[:-4]
prog = ExternalProgram(cmd_path)
self.assertTrue(prog.found(), msg='{!r} not found'.format(cmd_path))
# Finding a script with no extension inside a directory works
prog = ExternalProgram(os.path.join(testdir, 'test-script'))
self.assertTrue(prog.found(), msg='test-script not found')
# Finding a script with an extension inside a directory works
prog = ExternalProgram(os.path.join(testdir, 'test-script-ext.py'))
self.assertTrue(prog.found(), msg='test-script-ext.py not found')
# Finding a script in PATH
os.environ['PATH'] += os.pathsep + testdir
# Finding a script in PATH w/o extension works and adds the interpreter
# (check only if `.PY` is in PATHEXT)
if '.PY' in [ext.upper() for ext in os.environ['PATHEXT'].split(';')]:
prog = ExternalProgram('test-script-ext')
self.assertTrue(prog.found(), msg='test-script-ext not found in PATH')
self.assertPathEqual(prog.get_command()[0], python_command[0])
self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
# Finding a script in PATH with extension works and adds the interpreter
prog = ExternalProgram('test-script-ext.py')
self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH')
self.assertPathEqual(prog.get_command()[0], python_command[0])
self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py')
# Ensure that WindowsApps gets removed from PATH
path = os.environ['PATH']
if 'WindowsApps' not in path:
username = os.environ['USERNAME']
appstore_dir = r'C:\Users\{}\AppData\Local\Microsoft\WindowsApps'.format(username)
path = os.pathsep + appstore_dir
path = ExternalProgram._windows_sanitize_path(path)
self.assertNotIn('WindowsApps', path)
def test_ignore_libs(self):
'''
Test that find_library on libs that are to be ignored returns an empty
array of arguments. Must be a unit test because we cannot inspect
ExternalLibraryHolder from build files.
'''
testdir = os.path.join(self.platform_test_dir, '1 basic')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Not using MSVC')
# To force people to update this test, and also test
self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt', 'execinfo'})
for l in cc.ignore_libs:
self.assertEqual(cc.find_library(l, env, []), [])
def test_rc_depends_files(self):
testdir = os.path.join(self.platform_test_dir, '5 resources')
# resource compiler depfile generation is not yet implemented for msvc
env = get_fake_env(testdir, self.builddir, self.prefix)
depfile_works = env.detect_c_compiler(MachineChoice.HOST).get_id() not in {'msvc', 'clang-cl', 'intel-cl'}
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Test compile_resources(depend_file:)
# Changing mtime of sample.ico should rebuild prog
self.utime(os.path.join(testdir, 'res', 'sample.ico'))
self.assertRebuiltTarget('prog')
# Test depfile generation by compile_resources
# Changing mtime of resource.h should rebuild myres.rc and then prog
if depfile_works:
self.utime(os.path.join(testdir, 'inc', 'resource', 'resource.h'))
self.assertRebuiltTarget('prog')
self.wipe()
if depfile_works:
testdir = os.path.join(self.platform_test_dir, '12 resources with custom targets')
self.init(testdir)
self.build()
# Immediately rebuilding should not do anything
self.assertBuildIsNoop()
# Changing mtime of resource.h should rebuild myres_1.rc and then prog_1
self.utime(os.path.join(testdir, 'res', 'resource.h'))
self.assertRebuiltTarget('prog_1')
def test_msvc_cpp17(self):
testdir = os.path.join(self.unit_test_dir, '45 vscpp17')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
try:
self.init(testdir)
except subprocess.CalledProcessError:
# According to Python docs, output is only stored when
# using check_output. We don't use it, so we can't check
# that the output is correct (i.e. that it failed due
# to the right reason).
return
self.build()
def test_install_pdb_introspection(self):
testdir = os.path.join(self.platform_test_dir, '1 basic')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Test only applies to MSVC-like compilers')
self.init(testdir)
installed = self.introspect('--installed')
files = [os.path.basename(path) for path in installed.values()]
self.assertTrue('prog.pdb' in files)
def _check_ld(self, name: str, lang: str, expected: str) -> None:
if not shutil.which(name):
raise unittest.SkipTest('Could not find {}.'.format(name))
envvar = mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]
with mock.patch.dict(os.environ, {envvar: name}):
env = get_fake_env()
try:
comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
except EnvironmentException:
raise unittest.SkipTest('Could not find a compiler for {}'.format(lang))
self.assertEqual(comp.linker.id, expected)
def test_link_environment_variable_lld_link(self):
self._check_ld('lld-link', 'c', 'lld-link')
def test_link_environment_variable_link(self):
self._check_ld('link', 'c', 'link')
def test_link_environment_variable_optlink(self):
self._check_ld('optlink', 'c', 'optlink')
def test_link_environment_variable_rust(self):
self._check_ld('link', 'rust', 'link')
def test_pefile_checksum(self):
try:
import pefile
except ImportError:
if is_ci():
raise
raise unittest.SkipTest('pefile module not found')
testdir = os.path.join(self.common_test_dir, '6 linkshared')
self.init(testdir)
self.build()
# Test that binaries have a non-zero checksum
env = get_fake_env()
cc = env.detect_c_compiler(MachineChoice.HOST)
cc_id = cc.get_id()
ld_id = cc.get_linker_id()
dll = glob(os.path.join(self.builddir, '*mycpplib.dll'))[0]
exe = os.path.join(self.builddir, 'cppprog.exe')
for f in (dll, exe):
pe = pefile.PE(f)
msg = 'PE file: {!r}, compiler: {!r}, linker: {!r}'.format(f, cc_id, ld_id)
if cc_id == 'clang-cl':
# Latest clang-cl tested (7.0) does not write checksums out
self.assertFalse(pe.verify_checksum(), msg=msg)
else:
# Verify that a valid checksum was written by all other compilers
self.assertTrue(pe.verify_checksum(), msg=msg)
@unittest.skipUnless(is_osx(), "requires Darwin")
class DarwinTests(BasePlatformTests):
'''
Tests that should run on macOS
'''
def setUp(self):
super().setUp()
self.platform_test_dir = os.path.join(self.src_root, 'test cases/osx')
def test_apple_bitcode(self):
'''
Test that -fembed-bitcode is correctly added while compiling and
-bitcode_bundle is added while linking when b_bitcode is true and not
when it is false. This can't be an ordinary test case because we need
to inspect the compiler database.
'''
testdir = os.path.join(self.platform_test_dir, '7 bitcode')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
if cc.id != 'clang':
raise unittest.SkipTest('Not using Clang on OSX')
# Try with bitcode enabled
out = self.init(testdir, extra_args='-Db_bitcode=true')
# Warning was printed
self.assertRegex(out, 'WARNING:.*b_bitcode')
# Compiler options were added
for compdb in self.get_compdb():
if 'module' in compdb['file']:
self.assertNotIn('-fembed-bitcode', compdb['command'])
else:
self.assertIn('-fembed-bitcode', compdb['command'])
build_ninja = os.path.join(self.builddir, 'build.ninja')
# Linker options were added
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('LINK_ARGS =.*-bitcode_bundle', contents)
self.assertIsNotNone(m, msg=contents)
# Try with bitcode disabled
self.setconf('-Db_bitcode=false')
# Regenerate build
self.build()
for compdb in self.get_compdb():
self.assertNotIn('-fembed-bitcode', compdb['command'])
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
contents = f.read()
m = re.search('LINK_ARGS =.*-bitcode_bundle', contents)
self.assertIsNone(m, msg=contents)
def test_apple_bitcode_modules(self):
'''
Same as above, just for shared_module()
'''
testdir = os.path.join(self.common_test_dir, '152 shared module resolving symbol in executable')
# Ensure that it builds even with bitcode enabled
self.init(testdir, extra_args='-Db_bitcode=true')
self.build()
self.run_tests()
def _get_darwin_versions(self, fname):
fname = os.path.join(self.builddir, fname)
out = subprocess.check_output(['otool', '-L', fname], universal_newlines=True)
m = re.match(r'.*version (.*), current version (.*)\)', out.split('\n')[1])
self.assertIsNotNone(m, msg=out)
return m.groups()
@skipIfNoPkgconfig
def test_library_versioning(self):
'''
Ensure that compatibility_version and current_version are set correctly
'''
testdir = os.path.join(self.platform_test_dir, '2 library versions')
self.init(testdir)
self.build()
targets = {}
for t in self.introspect('--targets'):
targets[t['name']] = t['filename'][0] if isinstance(t['filename'], list) else t['filename']
self.assertEqual(self._get_darwin_versions(targets['some']), ('7.0.0', '7.0.0'))
self.assertEqual(self._get_darwin_versions(targets['noversion']), ('0.0.0', '0.0.0'))
self.assertEqual(self._get_darwin_versions(targets['onlyversion']), ('1.0.0', '1.0.0'))
self.assertEqual(self._get_darwin_versions(targets['onlysoversion']), ('5.0.0', '5.0.0'))
self.assertEqual(self._get_darwin_versions(targets['intver']), ('2.0.0', '2.0.0'))
self.assertEqual(self._get_darwin_versions(targets['stringver']), ('2.3.0', '2.3.0'))
self.assertEqual(self._get_darwin_versions(targets['stringlistver']), ('2.4.0', '2.4.0'))
self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0'))
self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1'))
def test_duplicate_rpath(self):
testdir = os.path.join(self.unit_test_dir, '10 build_rpath')
# We purposely pass a duplicate rpath to Meson, in order
# to ascertain that Meson does not call install_name_tool
# with duplicate -delete_rpath arguments, which would
# lead to erroring out on installation
env = {"LDFLAGS": "-Wl,-rpath,/foo/bar"}
self.init(testdir, override_envvars=env)
self.build()
self.install()
def test_removing_unused_linker_args(self):
testdir = os.path.join(self.common_test_dir, '108 has arg')
env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'}
self.init(testdir, override_envvars=env)
@unittest.skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
'''
Tests that should run on Linux, macOS, and *BSD
'''
def test_basic_soname(self):
'''
Test that the soname is set correctly for shared libraries. This can't
be an ordinary test case because we need to run `readelf` and actually
check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '4 shared')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'libmylib.so')
soname = get_soname(lib1)
self.assertEqual(soname, 'libmylib.so')
def test_custom_soname(self):
'''
Test that the soname is set correctly for shared libraries when
a custom prefix and/or suffix is used. This can't be an ordinary test
case because we need to run `readelf` and actually check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '25 library versions')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix')
soname = get_soname(lib1)
self.assertEqual(soname, 'prefixsomelib.suffix')
def test_pic(self):
'''
Test that -fPIC is correctly added to static libraries when b_staticpic
is true and not when it is false. This can't be an ordinary test case
because we need to inspect the compiler database.
'''
if is_windows() or is_cygwin() or is_osx():
raise unittest.SkipTest('PIC not relevant')
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir)
compdb = self.get_compdb()
self.assertIn('-fPIC', compdb[0]['command'])
self.setconf('-Db_staticpic=false')
# Regenerate build
self.build()
compdb = self.get_compdb()
self.assertNotIn('-fPIC', compdb[0]['command'])
def test_pkgconfig_gen(self):
'''
Test that generated pkg-config files can be found and have the correct
version and link args. This can't be an ordinary test case because we
need to run pkg-config outside of a Meson build file.
https://github.com/mesonbuild/meson/issues/889
'''
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
env = get_fake_env(testdir, self.builddir, self.prefix)
kwargs = {'required': True, 'silent': True}
os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
foo_dep = PkgConfigDependency('libfoo', env, kwargs)
self.assertTrue(foo_dep.found())
self.assertEqual(foo_dep.get_version(), '1.0')
self.assertIn('-lfoo', foo_dep.get_link_args())
self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
def test_pkgconfig_gen_deps(self):
'''
Test that generated pkg-config files correctly handle dependencies
'''
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
privatedir1 = self.privatedir
self.new_builddir()
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen', 'dependencies')
self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1})
privatedir2 = self.privatedir
os.environ
env = {
'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]),
'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib',
}
self._run(['pkg-config', 'dependency-test', '--validate'], override_envvars=env)
# pkg-config strips some duplicated flags so we have to parse the
# generated file ourself.
expected = {
'Requires': 'libexposed',
'Requires.private': 'libfoo >= 1.0',
'Libs': '-L${libdir} -llibmain -pthread -lcustom',
'Libs.private': '-lcustom2 -L${libdir} -llibinternal',
'Cflags': '-I${includedir} -pthread -DCUSTOM',
}
if is_osx() or is_haiku():
expected['Cflags'] = expected['Cflags'].replace('-pthread ', '')
with open(os.path.join(privatedir2, 'dependency-test.pc')) as f:
matched_lines = 0
for line in f:
parts = line.split(':', 1)
if parts[0] in expected:
key = parts[0]
val = parts[1].strip()
expected_val = expected[key]
self.assertEqual(expected_val, val)
matched_lines += 1
self.assertEqual(len(expected), matched_lines)
cmd = ['pkg-config', 'requires-test']
out = self._run(cmd + ['--print-requires'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'requires-private-test']
out = self._run(cmd + ['--print-requires-private'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'pub-lib-order']
out = self._run(cmd + ['--libs'], override_envvars=env).strip().split()
self.assertEqual(out, ['-llibmain2', '-llibinternal'])
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig')
self.init(testdir)
with open(os.path.join(self.privatedir, 'somename.pc')) as f:
pcfile = f.read()
self.assertFalse('blub_blob_blib' in pcfile)
def test_vala_c_warnings(self):
'''
Test that no warnings are emitted for C code generated by Vala. This
can't be an ordinary test case because we need to inspect the compiler
database.
https://github.com/mesonbuild/meson/issues/864
'''
if not shutil.which('valac'):
raise unittest.SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '5 target glib')
self.init(testdir)
compdb = self.get_compdb()
vala_command = None
c_command = None
for each in compdb:
if each['file'].endswith('GLib.Thread.c'):
vala_command = each['command']
elif each['file'].endswith('GLib.Thread.vala'):
continue
elif each['file'].endswith('retcode.c'):
c_command = each['command']
else:
m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file'])
raise AssertionError(m)
self.assertIsNotNone(vala_command)
self.assertIsNotNone(c_command)
# -w suppresses all warnings, should be there in Vala but not in C
self.assertIn(" -w ", vala_command)
self.assertNotIn(" -w ", c_command)
# -Wall enables all warnings, should be there in C but not in Vala
self.assertNotIn(" -Wall ", vala_command)
self.assertIn(" -Wall ", c_command)
# -Werror converts warnings to errors, should always be there since it's
# injected by an unrelated piece of code and the project has werror=true
self.assertIn(" -Werror ", vala_command)
self.assertIn(" -Werror ", c_command)
@skipIfNoPkgconfig
def test_qtdependency_pkgconfig_detection(self):
'''
Test that qt4 and qt5 detection with pkgconfig works.
'''
# Verify Qt4 or Qt5 can be found with pkg-config
qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore'])
qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core'])
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=pkg-config'])
# Confirm that the dependency was found with pkg-config
mesonlog = self.get_meson_log()
if qt4 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)\n')
if qt5 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)\n')
@skip_if_not_base_option('b_sanitize')
def test_generate_gir_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
if is_openbsd():
raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
def test_qt5dependency_qmake_detection(self):
'''
Test that qt5 detection with qmake works. This can't be an ordinary
test case because it involves setting the environment.
'''
# Verify that qmake is for Qt5
if not shutil.which('qmake-qt5'):
if not shutil.which('qmake'):
raise unittest.SkipTest('QMake not found')
output = subprocess.getoutput('qmake --version')
if 'Qt version 5' not in output:
raise unittest.SkipTest('Qmake found, but it is not for Qt 5.')
# Disable pkg-config codepath and force searching with qmake/qmake-qt5
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
mesonlog = self.get_meson_log()
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n')
def _test_soname_impl(self, libpath, install):
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames')
testdir = os.path.join(self.unit_test_dir, '1 soname')
self.init(testdir)
self.build()
if install:
self.install()
# File without aliases set.
nover = os.path.join(libpath, 'libnover.so')
self.assertPathExists(nover)
self.assertFalse(os.path.islink(nover))
self.assertEqual(get_soname(nover), 'libnover.so')
self.assertEqual(len(glob(nover[:-3] + '*')), 1)
# File with version set
verset = os.path.join(libpath, 'libverset.so')
self.assertPathExists(verset + '.4.5.6')
self.assertEqual(os.readlink(verset), 'libverset.so.4')
self.assertEqual(get_soname(verset), 'libverset.so.4')
self.assertEqual(len(glob(verset[:-3] + '*')), 3)
# File with soversion set
soverset = os.path.join(libpath, 'libsoverset.so')
self.assertPathExists(soverset + '.1.2.3')
self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(len(glob(soverset[:-3] + '*')), 2)
# File with version and soversion set to same values
settosame = os.path.join(libpath, 'libsettosame.so')
self.assertPathExists(settosame + '.7.8.9')
self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(len(glob(settosame[:-3] + '*')), 2)
# File with version and soversion set to different values
bothset = os.path.join(libpath, 'libbothset.so')
self.assertPathExists(bothset + '.1.2.3')
self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
self.assertEqual(len(glob(bothset[:-3] + '*')), 3)
def test_soname(self):
self._test_soname_impl(self.builddir, False)
def test_installed_soname(self):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
an ordinary test case because it needs the environment to be set.
'''
testdir = os.path.join(self.common_test_dir, '39 has function')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
Oflag = '-O3'
OflagCPP = Oflag
if cpp.get_id() in ('clang', 'gcc'):
# prevent developers from adding "int main(int argc, char **argv)"
# to small Meson checks unless these parameters are actually used
OflagCPP += ' -Werror=unused-parameter'
env = {'CFLAGS': Oflag,
'CXXFLAGS': OflagCPP}
self.init(testdir, override_envvars=env)
cmds = self.get_meson_log_compiler_checks()
for cmd in cmds:
if cmd[0] == 'ccache':
cmd = cmd[1:]
# Verify that -I flags from the `args` kwarg are first
# This is set in the '39 has function' test case
self.assertEqual(cmd[1], '-I/tmp')
# Verify that -O3 set via the environment is overridden by -O0
Oargs = [arg for arg in cmd if arg.startswith('-O')]
self.assertEqual(Oargs, [Oflag, '-O0'])
def _test_stds_impl(self, testdir, compiler, p: str):
lang_std = p + '_std'
has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0'))
has_cpp2a_c17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=6.0.0', '>=10.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
has_c18 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=8.0.0', '>=11.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
# Check that all the listed -std=xxx options for this compiler work just fine when used
# https://en.wikipedia.org/wiki/Xcode#Latest_versions
# https://www.gnu.org/software/gcc/projects/cxx-status.html
for v in compiler.get_options()[lang_std].choices:
# we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly
# thus, C++ first
if '++17' in v and not has_cpp17:
continue
elif '++2a' in v and not has_cpp2a_c17: # https://en.cppreference.com/w/cpp/compiler_support
continue
# now C
elif '17' in v and not has_cpp2a_c17:
continue
elif '18' in v and not has_c18:
continue
std_opt = '{}={}'.format(lang_std, v)
self.init(testdir, extra_args=['-D' + std_opt])
cmd = self.get_compdb()[0]['command']
# c++03 and gnu++03 are not understood by ICC, don't try to look for them
skiplist = frozenset([
('intel', 'c++03'),
('intel', 'gnu++03')])
if v != 'none' and not (compiler.get_id(), v) in skiplist:
cmd_std = " -std={} ".format(v)
self.assertIn(cmd_std, cmd)
try:
self.build()
except Exception:
print('{} was {!r}'.format(lang_std, v))
raise
self.wipe()
# Check that an invalid std option in CFLAGS/CPPFLAGS fails
# Needed because by default ICC ignores invalid options
cmd_std = '-std=FAIL'
if p == 'c':
env_flag_name = 'CFLAGS'
elif p == 'cpp':
env_flag_name = 'CXXFLAGS'
else:
raise NotImplementedError('Language {} not defined.'.format(p))
env = {}
env[env_flag_name] = cmd_std
with self.assertRaises((subprocess.CalledProcessError, mesonbuild.mesonlib.EnvironmentException),
msg='C compiler should have failed with -std=FAIL'):
self.init(testdir, override_envvars = env)
# ICC won't fail in the above because additional flags are needed to
# make unknown -std=... options errors.
self.build()
def test_compiler_c_stds(self):
'''
Test that C stds specified for this compiler can all be used. Can't be
an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = env.detect_c_compiler(MachineChoice.HOST)
self._test_stds_impl(testdir, cc, 'c')
def test_compiler_cpp_stds(self):
'''
Test that C++ stds specified for this compiler can all be used. Can't
be an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '2 cpp')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = env.detect_cpp_compiler(MachineChoice.HOST)
self._test_stds_impl(testdir, cpp, 'cpp')
def test_unity_subproj(self):
testdir = os.path.join(self.common_test_dir, '45 subproject')
self.init(testdir, extra_args='--unity=subprojects')
simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe')
self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity.c'))
sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha')
self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity.c'))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
self.build()
def test_installed_modes(self):
'''
Test that files installed by these tests have the correct permissions.
Can't be an ordinary test because our installed_files.txt is very basic.
'''
# Test file modes
testdir = os.path.join(self.common_test_dir, '12 data')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'etc', 'etcfile.dat')
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'rw------T'
self.assertEqual(want_mode, found_mode[1:])
f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-sr-x'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
self.assertEqual(0, statf.st_gid)
f = os.path.join(self.installdir, 'usr', 'share', 'progname',
'fileobject_datafile.dat')
orig = os.path.join(testdir, 'fileobject_datafile.dat')
statf = os.stat(f)
statorig = os.stat(orig)
found_mode = stat.filemode(statf.st_mode)
orig_mode = stat.filemode(statorig.st_mode)
self.assertEqual(orig_mode[1:], found_mode[1:])
self.assertEqual(os.getuid(), statf.st_uid)
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_gid)
self.wipe()
# Test directory modes
testdir = os.path.join(self.common_test_dir, '62 install subdir')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'usr', 'share', 'sub1', 'second.dat')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-x--t'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
def test_installed_modes_extended(self):
'''
Test that files are installed with correct permissions using install_mode.
'''
testdir = os.path.join(self.common_test_dir, '195 install_mode')
self.init(testdir)
self.build()
self.install()
for fsobj, want_mode in [
('bin', 'drwxr-x---'),
('bin/runscript.sh', '-rwxr-sr-x'),
('bin/trivialprog', '-rwxr-sr-x'),
('include', 'drwxr-x---'),
('include/config.h', '-rw-rwSr--'),
('include/rootdir.h', '-r--r--r-T'),
('lib', 'drwxr-x---'),
('lib/libstat.a', '-rw---Sr--'),
('share', 'drwxr-x---'),
('share/man', 'drwxr-x---'),
('share/man/man1', 'drwxr-x---'),
('share/man/man1/foo.1', '-r--r--r-T'),
('share/sub1', 'drwxr-x---'),
('share/sub1/second.dat', '-rwxr-x--t'),
('subdir', 'drwxr-x---'),
('subdir/data.dat', '-rw-rwSr--'),
]:
f = os.path.join(self.installdir, 'usr', *fsobj.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(fsobj, want_mode, found_mode)))
# Ensure that introspect --installed works on all types of files
# FIXME: also verify the files list
self.introspect('--installed')
def test_install_umask(self):
'''
Test that files are installed with correct permissions using default
install umask of 022, regardless of the umask at time the worktree
was checked out or the build was executed.
'''
# Copy source tree to a temporary directory and change permissions
# there to simulate a checkout with umask 002.
orig_testdir = os.path.join(self.unit_test_dir, '26 install umask')
# Create a new testdir under tmpdir.
tmpdir = os.path.realpath(tempfile.mkdtemp())
self.addCleanup(windows_proof_rmtree, tmpdir)
testdir = os.path.join(tmpdir, '26 install umask')
# Copy the tree using shutil.copyfile, which will use the current umask
# instead of preserving permissions of the old tree.
save_umask = os.umask(0o002)
self.addCleanup(os.umask, save_umask)
shutil.copytree(orig_testdir, testdir, copy_function=shutil.copyfile)
# Preserve the executable status of subdir/sayhello though.
os.chmod(os.path.join(testdir, 'subdir', 'sayhello'), 0o775)
self.init(testdir)
# Run the build under a 027 umask now.
os.umask(0o027)
self.build()
# And keep umask 027 for the install step too.
self.install()
for executable in [
'bin/prog',
'share/subdir/sayhello',
]:
f = os.path.join(self.installdir, 'usr', *executable.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(executable, want_mode, found_mode)))
for directory in [
'usr',
'usr/bin',
'usr/include',
'usr/share',
'usr/share/man',
'usr/share/man/man1',
'usr/share/subdir',
]:
f = os.path.join(self.installdir, *directory.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'drwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected directory %s to have mode %s but found %s instead.' %
(directory, want_mode, found_mode)))
for datafile in [
'include/sample.h',
'share/datafile.cat',
'share/file.dat',
'share/man/man1/prog.1',
'share/subdir/datafile.dog',
]:
f = os.path.join(self.installdir, 'usr', *datafile.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rw-r--r--'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(datafile, want_mode, found_mode)))
def test_cpp_std_override(self):
testdir = os.path.join(self.unit_test_dir, '6 std override')
self.init(testdir)
compdb = self.get_compdb()
# Don't try to use -std=c++03 as a check for the
# presence of a compiler flag, as ICC does not
# support it.
for i in compdb:
if 'prog98' in i['file']:
c98_comp = i['command']
if 'prog11' in i['file']:
c11_comp = i['command']
if 'progp' in i['file']:
plain_comp = i['command']
self.assertNotEqual(len(plain_comp), 0)
self.assertIn('-std=c++98', c98_comp)
self.assertNotIn('-std=c++11', c98_comp)
self.assertIn('-std=c++11', c11_comp)
self.assertNotIn('-std=c++98', c11_comp)
self.assertNotIn('-std=c++98', plain_comp)
self.assertNotIn('-std=c++11', plain_comp)
# Now werror
self.assertIn('-Werror', plain_comp)
self.assertNotIn('-Werror', c98_comp)
def test_run_installed(self):
if is_cygwin() or is_osx():
raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable')
testdir = os.path.join(self.unit_test_dir, '7 run installed')
self.init(testdir)
self.build()
self.install()
installed_exe = os.path.join(self.installdir, 'usr/bin/prog')
installed_libdir = os.path.join(self.installdir, 'usr/foo')
installed_lib = os.path.join(installed_libdir, 'libfoo.so')
self.assertTrue(os.path.isfile(installed_exe))
self.assertTrue(os.path.isdir(installed_libdir))
self.assertTrue(os.path.isfile(installed_lib))
# Must fail when run without LD_LIBRARY_PATH to ensure that
# rpath has been properly stripped rather than pointing to the builddir.
self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0)
# When LD_LIBRARY_PATH is set it should start working.
# For some reason setting LD_LIBRARY_PATH in os.environ fails
# when all tests are run (but works when only this test is run),
# but doing this explicitly works.
env = os.environ.copy()
env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')])
self.assertEqual(subprocess.call(installed_exe, env=env), 0)
# Ensure that introspect --installed works
installed = self.introspect('--installed')
for v in installed.values():
self.assertTrue('prog' in v or 'foo' in v)
@skipIfNoPkgconfig
def test_order_of_l_arguments(self):
testdir = os.path.join(self.unit_test_dir, '8 -L -l order')
self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir})
# NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders
# the flags before returning them to -Lfoo -Lbar -lfoo -lbar
# but pkgconf seems to not do that. Sigh. Support both.
expected_order = [('-L/me/first', '-lfoo1'),
('-L/me/second', '-lfoo2'),
('-L/me/first', '-L/me/second'),
('-lfoo1', '-lfoo2'),
('-L/me/second', '-L/me/third'),
('-L/me/third', '-L/me/fourth',),
('-L/me/third', '-lfoo3'),
('-L/me/fourth', '-lfoo4'),
('-lfoo3', '-lfoo4'),
]
with open(os.path.join(self.builddir, 'build.ninja')) as ifile:
for line in ifile:
if expected_order[0][0] in line:
for first, second in expected_order:
self.assertLess(line.index(first), line.index(second))
return
raise RuntimeError('Linker entries not found in the Ninja file.')
def test_introspect_dependencies(self):
'''
Tests that mesonintrospect --dependencies returns expected output.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir)
glib_found = False
gobject_found = False
deps = self.introspect('--dependencies')
self.assertIsInstance(deps, list)
for dep in deps:
self.assertIsInstance(dep, dict)
self.assertIn('name', dep)
self.assertIn('compile_args', dep)
self.assertIn('link_args', dep)
if dep['name'] == 'glib-2.0':
glib_found = True
elif dep['name'] == 'gobject-2.0':
gobject_found = True
self.assertTrue(glib_found)
self.assertTrue(gobject_found)
if subprocess.call(['pkg-config', '--exists', 'glib-2.0 >= 2.56.2']) != 0:
raise unittest.SkipTest('glib >= 2.56.2 needed for the rest')
targets = self.introspect('--targets')
docbook_target = None
for t in targets:
if t['name'] == 'generated-gdbus-docbook':
docbook_target = t
break
self.assertIsInstance(docbook_target, dict)
self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0]))
def test_build_rpath(self):
if is_cygwin():
raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.unit_test_dir, '10 build_rpath')
self.init(testdir)
self.build()
# C program RPATH
build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog'))
self.assertEqual(install_rpath, '/baz')
# C++ program RPATH
build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
self.assertEqual(install_rpath, 'baz')
@skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
if is_openbsd():
raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.common_test_dir, '13 pch')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
compdb = self.get_compdb()
for i in compdb:
self.assertIn("-fsanitize=address", i["command"])
def test_coverage(self):
gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
if not gcovr_exe:
raise unittest.SkipTest('gcovr not found')
if not shutil.which('genhtml') and not gcovr_new_rootdir:
raise unittest.SkipTest('genhtml not found and gcovr is too old')
if 'clang' in os.environ.get('CC', ''):
# We need to use llvm-cov instead of gcovr with clang
raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir, extra_args=['-Db_coverage=true'])
self.build()
self.run_tests()
self.run_target('coverage-html')
def test_cross_find_program(self):
testdir = os.path.join(self.unit_test_dir, '11 cross prog')
crossfile = tempfile.NamedTemporaryFile(mode='w')
print(os.path.join(testdir, 'some_cross_tool.py'))
crossfile.write(textwrap.dedent('''\
[binaries]
c = '/usr/bin/{1}'
ar = '/usr/bin/ar'
strip = '/usr/bin/ar'
sometool.py = ['{0}']
someothertool.py = '{0}'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7' # Not sure if correct.
endian = 'little'
''').format(os.path.join(testdir, 'some_cross_tool.py'),
'gcc' if is_sunos() else 'cc'))
crossfile.flush()
self.meson_cross_file = crossfile.name
self.init(testdir)
def test_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '13 reconfigure')
self.init(testdir, extra_args=['-Db_coverage=true'], default_args=False)
self.build('reconfigure')
def test_vala_generated_source_buildir_inside_source_tree(self):
'''
Test that valac outputs generated C files in the expected location when
the builddir is a subdir of the source tree.
'''
if not shutil.which('valac'):
raise unittest.SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '8 generated sources')
newdir = os.path.join(self.builddir, 'srctree')
shutil.copytree(testdir, newdir)
testdir = newdir
# New builddir
builddir = os.path.join(testdir, 'subdir/_build')
os.makedirs(builddir, exist_ok=True)
self.change_builddir(builddir)
self.init(testdir)
self.build()
def test_old_gnome_module_codepaths(self):
'''
A lot of code in the GNOME module is conditional on the version of the
glib tools that are installed, and breakages in the old code can slip
by once the CI has a newer glib version. So we force the GNOME module
to pretend that it's running on an ancient glib so the fallback code is
also tested.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
mesonbuild.modules.gnome.native_glib_version = '2.20'
env = {'MESON_UNIT_TEST_PRETEND_GLIB_OLD': "1"}
try:
self.init(testdir,
inprocess=True,
override_envvars=env)
self.build(override_envvars=env)
finally:
mesonbuild.modules.gnome.native_glib_version = None
@skipIfNoPkgconfig
def test_pkgconfig_usage(self):
testdir1 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependency')
testdir2 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependee')
if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL) != 0:
raise unittest.SkipTest('Glib 2.0 dependency not available.')
with tempfile.TemporaryDirectory() as tempdirname:
self.init(testdir1, extra_args=['--prefix=' + tempdirname, '--libdir=lib'], default_args=False)
self.install(use_destdir=False)
shutil.rmtree(self.builddir)
os.mkdir(self.builddir)
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc')))
lib_dir = os.path.join(tempdirname, 'lib')
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = pkg_dir
# Private internal libraries must not leak out.
pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'], env=myenv)
self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.')
# Dependencies must not leak to cflags when building only a shared library.
pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'], env=myenv)
self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.')
# Test that the result is usable.
self.init(testdir2, override_envvars=myenv)
self.build(override_envvars=myenv)
myenv = os.environ.copy()
myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')])
if is_cygwin():
bin_dir = os.path.join(tempdirname, 'bin')
myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH']
self.assertTrue(os.path.isdir(lib_dir))
test_exe = os.path.join(self.builddir, 'pkguser')
self.assertTrue(os.path.isfile(test_exe))
subprocess.check_call(test_exe, env=myenv)
@skipIfNoPkgconfig
def test_pkgconfig_relative_paths(self):
testdir = os.path.join(self.unit_test_dir, '62 pkgconfig relative paths')
pkg_dir = os.path.join(testdir, 'pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc')))
env = get_fake_env(testdir, self.builddir, self.prefix)
env.coredata.set_options({'pkg_config_path': pkg_dir}, subproject='')
kwargs = {'required': True, 'silent': True}
relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs)
self.assertTrue(relative_path_dep.found())
# Ensure link_args are properly quoted
libpath = Path(self.builddir) / '../relativepath/lib'
link_args = ['-L' + libpath.as_posix(), '-lrelativepath']
self.assertEqual(relative_path_dep.get_link_args(), link_args)
@skipIfNoPkgconfig
def test_pkgconfig_internal_libraries(self):
'''
'''
with tempfile.TemporaryDirectory() as tempdirname:
# build library
testdirbase = os.path.join(self.unit_test_dir, '32 pkgconfig use libraries')
testdirlib = os.path.join(testdirbase, 'lib')
self.init(testdirlib, extra_args=['--prefix=' + tempdirname,
'--libdir=lib',
'--default-library=static'], default_args=False)
self.build()
self.install(use_destdir=False)
# build user of library
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_static_archive_stripping(self):
'''
Check that Meson produces valid static archives with --strip enabled
'''
with tempfile.TemporaryDirectory() as tempdirname:
testdirbase = os.path.join(self.unit_test_dir, '68 static archive stripping')
# build lib
self.new_builddir()
testdirlib = os.path.join(testdirbase, 'lib')
testlibprefix = os.path.join(tempdirname, 'libprefix')
self.init(testdirlib, extra_args=['--prefix=' + testlibprefix,
'--libdir=lib',
'--default-library=static',
'--buildtype=debug',
'--strip'], default_args=False)
self.build()
self.install(use_destdir=False)
# build executable (uses lib, fails if static archive has been stripped incorrectly)
pkg_dir = os.path.join(testlibprefix, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_pkgconfig_formatting(self):
testdir = os.path.join(self.unit_test_dir, '38 pkgconfig format')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv)
deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething']
if is_windows() or is_cygwin() or is_osx() or is_openbsd():
# On Windows, libintl is a separate library
deps.append(b'-lintl')
self.assertEqual(set(deps), set(stdo.split()))
@skipIfNoPkgconfig
@skip_if_not_language('cs')
def test_pkgconfig_csharp_library(self):
testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip())
@skipIfNoPkgconfig
def test_pkgconfig_link_order(self):
'''
Test that libraries are listed before their dependencies.
'''
testdir = os.path.join(self.unit_test_dir, '53 pkgconfig static link order')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
deps = stdo.split()
self.assertTrue(deps.index(b'-lsomething') < deps.index(b'-ldependency'))
def test_deterministic_dep_order(self):
'''
Test that the dependencies are always listed in a deterministic order.
'''
testdir = os.path.join(self.unit_test_dir, '43 dep order')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if 'build myexe:' in line or 'build myexe.exe:' in line:
self.assertIn('liblib1.a liblib2.a', line)
return
raise RuntimeError('Could not find the build rule')
def test_deterministic_rpath_order(self):
'''
Test that the rpaths are always listed in a deterministic order.
'''
if is_cygwin():
raise unittest.SkipTest('rpath are not used on Cygwin')
testdir = os.path.join(self.unit_test_dir, '42 rpath order')
self.init(testdir)
if is_osx():
rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
else:
rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if '-rpath' in line:
self.assertRegex(line, rpathre)
return
raise RuntimeError('Could not find the rpath')
def test_override_with_exe_dep(self):
'''
Test that we produce the correct dependencies when a program is overridden with an executable.
'''
testdir = os.path.join(self.common_test_dir, '201 override with exe')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if 'main1.c:' in line or 'main2.c:' in line:
self.assertIn('| subprojects/sub/foobar', line)
@skipIfNoPkgconfig
def test_usage_external_library(self):
'''
Test that uninstalled usage of an external library (from the system or
PkgConfigDependency) works. On macOS, this workflow works out of the
box. On Linux, BSDs, Windows, etc, you need to set extra arguments such
as LD_LIBRARY_PATH, etc, so this test is skipped.
The system library is found with cc.find_library() and pkg-config deps.
'''
oldprefix = self.prefix
# Install external library so we can find it
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'external library')
# install into installdir without using DESTDIR
installdir = self.installdir
self.prefix = installdir
self.init(testdir)
self.prefix = oldprefix
self.build()
self.install(use_destdir=False)
## New builddir for the consumer
self.new_builddir()
env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir),
'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')}
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'built library')
# install into installdir without using DESTDIR
self.prefix = self.installdir
self.init(testdir, override_envvars=env)
self.prefix = oldprefix
self.build(override_envvars=env)
# test uninstalled
self.run_tests(override_envvars=env)
if not is_osx():
# Rest of the workflow only works on macOS
return
# test running after installation
self.install(use_destdir=False)
prog = os.path.join(self.installdir, 'bin', 'prog')
self._run([prog])
out = self._run(['otool', '-L', prog])
self.assertNotIn('@rpath', out)
## New builddir for testing that DESTDIR is not added to install_name
self.new_builddir()
# install into installdir with DESTDIR
self.init(testdir, override_envvars=env)
self.build(override_envvars=env)
# test running after installation
self.install(override_envvars=env)
prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog')
lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib')
for f in prog, lib:
out = self._run(['otool', '-L', f])
# Ensure that the otool output does not contain self.installdir
self.assertNotRegex(out, self.installdir + '.*dylib ')
def install_subdir_invalid_symlinks(self, testdir, subdir_path):
'''
Test that installation of broken symlinks works fine.
https://github.com/mesonbuild/meson/issues/3914
'''
testdir = os.path.join(self.common_test_dir, testdir)
subdir = os.path.join(testdir, subdir_path)
curdir = os.getcwd()
os.chdir(subdir)
# Can't distribute broken symlinks in the source tree because it breaks
# the creation of zipapps. Create it dynamically and run the test by
# hand.
src = '../../nonexistent.txt'
os.symlink(src, 'invalid-symlink.txt')
try:
self.init(testdir)
self.build()
self.install()
install_path = subdir_path.split(os.path.sep)[-1]
link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt')
self.assertTrue(os.path.islink(link), msg=link)
self.assertEqual(src, os.readlink(link))
self.assertFalse(os.path.isfile(link), msg=link)
finally:
os.remove(os.path.join(subdir, 'invalid-symlink.txt'))
os.chdir(curdir)
def test_install_subdir_symlinks(self):
self.install_subdir_invalid_symlinks('62 install subdir', os.path.join('sub', 'sub1'))
def test_install_subdir_symlinks_with_default_umask(self):
self.install_subdir_invalid_symlinks('195 install_mode', 'sub2')
def test_install_subdir_symlinks_with_default_umask_and_mode(self):
self.install_subdir_invalid_symlinks('195 install_mode', 'sub1')
@skipIfNoPkgconfigDep('gmodule-2.0')
def test_ldflag_dedup(self):
testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
if is_cygwin() or is_osx():
raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
max_count = 0
search_term = '-Wl,--export-dynamic'
with open(build_ninja, 'r', encoding='utf-8') as f:
for line in f:
max_count = max(max_count, line.count(search_term))
self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.')
def test_compiler_libs_static_dedup(self):
testdir = os.path.join(self.unit_test_dir, '56 dedup compiler libs')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, 'r', encoding='utf-8') as f:
lines = f.readlines()
for lib in ('-ldl', '-lm', '-lc', '-lrt'):
for line in lines:
if lib not in line:
continue
# Assert that
self.assertEqual(len(line.split(lib)), 2, msg=(lib, line))
@skipIfNoPkgconfig
def test_noncross_options(self):
# C_std defined in project options must be in effect also when native compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
self.init(testdir, extra_args=['-Dpkg_config_path=' + testdir])
compdb = self.get_compdb()
self.assertEqual(len(compdb), 2)
self.assertRegex(compdb[0]['command'], '-std=c99')
self.assertRegex(compdb[1]['command'], '-std=c99')
self.build()
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '61 identity cross')
crossfile = tempfile.NamedTemporaryFile(mode='w')
env = {'CC': '"' + os.path.join(testdir, 'build_wrapper.py') + '"'}
crossfile.write('''[binaries]
c = ['{0}']
'''.format(os.path.join(testdir, 'host_wrapper.py')))
crossfile.flush()
self.meson_cross_file = crossfile.name
# TODO should someday be explicit about build platform only here
self.init(testdir, override_envvars=env)
@skipIfNoPkgconfig
def test_static_link(self):
if is_cygwin():
raise unittest.SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.")
# Build some libraries and install them
testdir = os.path.join(self.unit_test_dir, '69 static link/lib')
libdir = os.path.join(self.installdir, self.libdir)
oldprefix = self.prefix
self.prefix = self.installdir
self.init(testdir)
self.install(use_destdir=False)
# Test that installed libraries works
self.new_builddir()
self.prefix = oldprefix
meson_args = ['-Dc_link_args=-L{}'.format(libdir),
'--fatal-meson-warnings']
testdir = os.path.join(self.unit_test_dir, '69 static link')
env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')}
self.init(testdir, extra_args=meson_args, override_envvars=env)
self.build()
self.run_tests()
def _check_ld(self, check: str, name: str, lang: str, expected: str) -> None:
if is_sunos():
raise unittest.SkipTest('Solaris currently cannot override the linker.')
if not shutil.which(check):
raise unittest.SkipTest('Could not find {}.'.format(check))
envvar = mesonbuild.envconfig.BinaryTable.evarMap['{}_ld'.format(lang)]
with mock.patch.dict(os.environ, {envvar: name}):
env = get_fake_env()
comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST)
if lang != 'rust' and comp.use_linker_args('foo') == []:
raise unittest.SkipTest(
'Compiler {} does not support using alternative linkers'.format(comp.id))
self.assertEqual(comp.linker.id, expected)
def test_ld_environment_variable_bfd(self):
self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd')
def test_ld_environment_variable_gold(self):
self._check_ld('ld.gold', 'gold', 'c', 'ld.gold')
def test_ld_environment_variable_lld(self):
self._check_ld('ld.lld', 'lld', 'c', 'ld.lld')
@skipIfNoExecutable('rustc')
def test_ld_environment_variable_rust(self):
self._check_ld('ld.gold', 'gold', 'rust', 'ld.gold')
def test_ld_environment_variable_cpp(self):
self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold')
def test_ld_environment_variable_objc(self):
self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold')
def test_ld_environment_variable_objcpp(self):
self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold')
@skipIfNoExecutable('gfortran')
def test_ld_environment_variable_fortran(self):
self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold')
def compute_sha256(self, filename):
with open(filename, 'rb') as f:
return hashlib.sha256(f.read()).hexdigest()
def test_wrap_with_file_url(self):
testdir = os.path.join(self.unit_test_dir, '73 wrap file url')
source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz')
patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz')
wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap')
source_hash = self.compute_sha256(source_filename)
patch_hash = self.compute_sha256(patch_filename)
wrap = textwrap.dedent("""\
[wrap-file]
directory = foo
source_url = file://{}
source_filename = foo.tar.xz
source_hash = {}
patch_url = file://{}
patch_filename = foo-patch.tar.xz
patch_hash = {}
""".format(source_filename, source_hash, patch_filename, patch_hash))
with open(wrap_filename, 'w') as f:
f.write(wrap)
self.init(testdir)
self.build()
self.run_tests()
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'packagecache'))
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo'))
os.unlink(wrap_filename)
def should_run_cross_arm_tests():
return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
class LinuxCrossArmTests(BasePlatformTests):
'''
Tests that cross-compilation to Linux/ARM works
'''
def setUp(self):
super().setUp()
src_root = os.path.dirname(__file__)
self.meson_cross_file = os.path.join(src_root, 'cross', 'ubuntu-armhf.txt')
def test_cflags_cross_environment_pollution(self):
'''
Test that the CFLAGS environment variable does not pollute the cross
environment. This can't be an ordinary test case because we need to
inspect the compiler database.
'''
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir, override_envvars={'CFLAGS': '-DBUILD_ENVIRONMENT_ONLY'})
compdb = self.get_compdb()
self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command'])
def test_cross_file_overrides_always_args(self):
'''
Test that $lang_args in cross files always override get_always_args().
Needed for overriding the default -D_FILE_OFFSET_BITS=64 on some
architectures such as some Android versions and Raspbian.
https://github.com/mesonbuild/meson/issues/3049
https://github.com/mesonbuild/meson/issues/3089
'''
testdir = os.path.join(self.unit_test_dir, '33 cross file overrides always args')
self.meson_cross_file = os.path.join(testdir, 'ubuntu-armhf-overrides.txt')
self.init(testdir)
compdb = self.get_compdb()
self.assertRegex(compdb[0]['command'], '-D_FILE_OFFSET_BITS=64.*-U_FILE_OFFSET_BITS')
self.build()
def test_cross_libdir(self):
# When cross compiling "libdir" should default to "lib"
# rather than "lib/x86_64-linux-gnu" or something like that.
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
for i in self.introspect('--buildoptions'):
if i['name'] == 'libdir':
self.assertEqual(i['value'], 'lib')
return
self.assertTrue(False, 'Option libdir not in introspect data.')
def test_std_remains(self):
# C_std defined in project options must be in effect also when cross compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
self.init(testdir)
compdb = self.get_compdb()
self.assertRegex(compdb[0]['command'], '-std=c99')
self.build()
@skipIfNoPkgconfig
def test_pkg_config_option(self):
if not shutil.which('arm-linux-gnueabihf-pkg-config'):
raise unittest.SkipTest('Cross-pkgconfig not found.')
testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option')
self.init(testdir, extra_args=[
'-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'),
'-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'),
])
def should_run_cross_mingw_tests():
return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
class LinuxCrossMingwTests(BasePlatformTests):
'''
Tests that cross-compilation to Windows/MinGW works
'''
def setUp(self):
super().setUp()
src_root = os.path.dirname(__file__)
self.meson_cross_file = os.path.join(src_root, 'cross', 'linux-mingw-w64-64bit.txt')
def test_exe_wrapper_behaviour(self):
'''
Test that an exe wrapper that isn't found doesn't cause compiler sanity
checks and compiler checks to fail, but causes configure to fail if it
requires running a cross-built executable (custom_target or run_target)
and causes the tests to be skipped if they are run.
'''
testdir = os.path.join(self.unit_test_dir, '36 exe_wrapper behaviour')
# Configures, builds, and tests fine by default
self.init(testdir)
self.build()
self.run_tests()
self.wipe()
os.mkdir(self.builddir)
# Change cross file to use a non-existing exe_wrapper and it should fail
self.meson_cross_file = os.path.join(testdir, 'broken-cross.txt')
# Force tracebacks so we can detect them properly
env = {'MESON_FORCE_BACKTRACE': '1'}
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*target.*use-exe-wrapper'):
# Must run in-process or we'll get a generic CalledProcessError
self.init(testdir, extra_args='-Drun-target=false',
inprocess=True,
override_envvars=env)
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*run target.*run-prog'):
# Must run in-process or we'll get a generic CalledProcessError
self.init(testdir, extra_args='-Dcustom-target=false',
inprocess=True,
override_envvars=env)
self.init(testdir, extra_args=['-Dcustom-target=false', '-Drun-target=false'],
override_envvars=env)
self.build()
with self.assertRaisesRegex(MesonException, 'exe_wrapper.*PATH'):
# Must run in-process or we'll get a generic CalledProcessError
self.run_tests(inprocess=True, override_envvars=env)
@skipIfNoPkgconfig
def test_cross_pkg_config_option(self):
testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option')
self.init(testdir, extra_args=[
'-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'),
'-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'),
])
class PythonTests(BasePlatformTests):
'''
Tests that verify compilation of python extension modules
'''
def test_versions(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Skipping python tests with {} backend'.format(self.backend.name))
testdir = os.path.join(self.src_root, 'test cases', 'unit', '39 python extmodule')
# No python version specified, this will use meson's python
self.init(testdir)
self.build()
self.run_tests()
self.wipe()
# When specifying a known name, (python2 / python3) the module
# will also try 'python' as a fallback and use it if the major
# version matches
try:
self.init(testdir, extra_args=['-Dpython=python2'])
self.build()
self.run_tests()
except unittest.SkipTest:
# python2 is not necessarily installed on the test machine,
# if it is not, or the python headers can't be found, the test
# will raise MESON_SKIP_TEST, we could check beforehand what version
# of python is available, but it's a bit of a chicken and egg situation,
# as that is the job of the module, so we just ask for forgiveness rather
# than permission.
pass
self.wipe()
for py in ('pypy', 'pypy3'):
try:
self.init(testdir, extra_args=['-Dpython=%s' % py])
except unittest.SkipTest:
# Same as above, pypy2 and pypy3 are not expected to be present
# on the test system, the test project only raises in these cases
continue
# We have a pypy, this is expected to work
self.build()
self.run_tests()
self.wipe()
# The test is configured to error out with MESON_SKIP_TEST
# in case it could not find python
with self.assertRaises(unittest.SkipTest):
self.init(testdir, extra_args=['-Dpython=not-python'])
self.wipe()
# While dir is an external command on both Windows and Linux,
# it certainly isn't python
with self.assertRaises(unittest.SkipTest):
self.init(testdir, extra_args=['-Dpython=dir'])
self.wipe()
class RewriterTests(BasePlatformTests):
def setUp(self):
super().setUp()
self.maxDiff = None
def prime(self, dirname):
copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir)
def rewrite_raw(self, directory, args):
if isinstance(args, str):
args = [args]
command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args
p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, timeout=60)
print('STDOUT:')
print(p.stdout)
print('STDERR:')
print(p.stderr)
if p.returncode != 0:
if 'MESON_SKIP_TEST' in p.stdout:
raise unittest.SkipTest('Project requested skipping.')
raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
if not p.stderr:
return {}
return json.loads(p.stderr)
def rewrite(self, directory, args):
if isinstance(args, str):
args = [args]
return self.rewrite_raw(directory, ['command'] + args)
def test_target_source_list(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_target_add_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
}
}
self.assertDictEqual(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, expected)
def test_target_add_sources_abs(self):
self.prime('1 basic')
abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']]
add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}])
inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}])
self.rewrite(self.builddir, add)
out = self.rewrite(self.builddir, inf)
expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}}
self.assertDictEqual(out, expected)
def test_target_remove_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']},
}
}
self.assertDictEqual(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, expected)
def test_target_subdir(self):
self.prime('2 subdirs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']}
self.assertDictEqual(list(out['target'].values())[0], expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(list(out['target'].values())[0], expected)
def test_target_remove(self):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_tatrget_add(self):
self.prime('1 basic')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'target': {
'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp']},
}
}
self.assertDictEqual(out, expected)
def test_target_remove_subdir(self):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
self.assertDictEqual(out, {})
def test_target_add_subdir(self):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {'name': 'something', 'sources': ['first.c', 'second.c']}
self.assertDictEqual(out['target']['94b671c@@something@exe'], expected)
def test_target_source_sorting(self):
self.prime('5 sorting')
add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}])
inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}])
out = self.rewrite(self.builddir, add_json)
out = self.rewrite(self.builddir, inf_json)
expected = {
'target': {
'exe1@exe': {
'name': 'exe1',
'sources': [
'aaa/a/a1.c',
'aaa/b/b1.c',
'aaa/b/b2.c',
'aaa/f1.c',
'aaa/f2.c',
'aaa/f3.c',
'bbb/a/b1.c',
'bbb/b/b2.c',
'bbb/c1/b5.c',
'bbb/c2/b7.c',
'bbb/c10/b6.c',
'bbb/a4.c',
'bbb/b3.c',
'bbb/b4.c',
'bbb/b5.c',
'a1.c',
'a2.c',
'a3.c',
'a10.c',
'a20.c',
'a30.c',
'a100.c',
'a101.c',
'a110.c',
'a210.c',
'a666.c',
'b1.c',
'c2.c'
]
}
}
}
self.assertDictEqual(out, expected)
def test_target_same_name_skip(self):
self.prime('4 same name targets')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {'name': 'myExe', 'sources': ['main.cpp']}
self.assertEqual(len(out['target']), 2)
for val in out['target'].values():
self.assertDictEqual(expected, val)
def test_kwargs_info(self):
self.prime('3 kwargs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1'},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_set(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']},
'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'},
'dependency#dep1': {'required': True, 'method': 'cmake'}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_add(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_remove(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'license': 'GPL'},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_remove_regex(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=true']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_kwargs_delete(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {},
'target#tgt1': {},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_default_options_set(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
def test_default_options_delete(self):
self.prime('3 kwargs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {
'kwargs': {
'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']},
'target#tgt1': {'build_by_default': True},
'dependency#dep1': {'required': False}
}
}
self.assertDictEqual(out, expected)
class NativeFileTests(BasePlatformTests):
def setUp(self):
super().setUp()
self.testcase = os.path.join(self.unit_test_dir, '47 native file binary')
self.current_config = 0
self.current_wrapper = 0
def helper_create_native_file(self, values):
"""Create a config file as a temporary file.
values should be a nested dictionary structure of {section: {key:
value}}
"""
filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config))
self.current_config += 1
with open(filename, 'wt') as f:
for section, entries in values.items():
f.write('[{}]\n'.format(section))
for k, v in entries.items():
f.write("{}='{}'\n".format(k, v))
return filename
def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs):
"""Creates a wrapper around a binary that overrides specific values."""
filename = os.path.join(dir_ or self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper))
extra_args = extra_args or {}
self.current_wrapper += 1
if is_haiku():
chbang = '#!/bin/env python3'
else:
chbang = '#!/usr/bin/env python3'
with open(filename, 'wt') as f:
f.write(textwrap.dedent('''\
{}
import argparse
import subprocess
import sys
def main():
parser = argparse.ArgumentParser()
'''.format(chbang)))
for name in chain(extra_args, kwargs):
f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name))
f.write(' args, extra_args = parser.parse_known_args()\n')
for name, value in chain(extra_args.items(), kwargs.items()):
f.write(' if args.{}:\n'.format(name))
f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout')))
f.write(' sys.exit(0)\n')
f.write(textwrap.dedent('''
ret = subprocess.run(
["{}"] + extra_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
print(ret.stdout.decode('utf-8'))
print(ret.stderr.decode('utf-8'), file=sys.stderr)
sys.exit(ret.returncode)
if __name__ == '__main__':
main()
'''.format(binary)))
if not is_windows():
os.chmod(filename, 0o755)
return filename
# On windows we need yet another level of indirection, as cmd cannot
# invoke python files itself, so instead we generate a .bat file, which
# invokes our python wrapper
batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper))
with open(batfile, 'wt') as f:
f.write(r'@{} {} %*'.format(sys.executable, filename))
return batfile
def helper_for_compiler(self, lang, cb, for_machine = MachineChoice.HOST):
"""Helper for generating tests for overriding compilers for langaugages
with more than one implementation, such as C, C++, ObjC, ObjC++, and D.
"""
env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
getter = functools.partial(getter, for_machine)
cc = getter()
binary, newid = cb(cc)
env.binaries[for_machine].binaries[lang] = binary
compiler = getter()
self.assertEqual(compiler.id, newid)
def test_multiple_native_files_override(self):
wrapper = self.helper_create_binary_wrapper('bash', version='foo')
config = self.helper_create_native_file({'binaries': {'bash': wrapper}})
wrapper = self.helper_create_binary_wrapper('bash', version='12345')
config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}})
self.init(self.testcase, extra_args=[
'--native-file', config, '--native-file', config2,
'-Dcase=find_program'])
# This test hangs on cygwin.
@unittest.skipIf(os.name != 'posix' or is_cygwin(), 'Uses fifos, which are not available on non Unix OSes.')
def test_native_file_is_pipe(self):
fifo = os.path.join(self.builddir, 'native.file')
os.mkfifo(fifo)
with tempfile.TemporaryDirectory() as d:
wrapper = self.helper_create_binary_wrapper('bash', d, version='12345')
def filler():
with open(fifo, 'w') as f:
f.write('[binaries]\n')
f.write("bash = '{}'\n".format(wrapper))
thread = threading.Thread(target=filler)
thread.start()
self.init(self.testcase, extra_args=['--native-file', fifo, '-Dcase=find_program'])
thread.join()
os.unlink(fifo)
self.init(self.testcase, extra_args=['--wipe'])
def test_multiple_native_files(self):
wrapper = self.helper_create_binary_wrapper('bash', version='12345')
config = self.helper_create_native_file({'binaries': {'bash': wrapper}})
wrapper = self.helper_create_binary_wrapper('python')
config2 = self.helper_create_native_file({'binaries': {'python': wrapper}})
self.init(self.testcase, extra_args=[
'--native-file', config, '--native-file', config2,
'-Dcase=find_program'])
def _simple_test(self, case, binary):
wrapper = self.helper_create_binary_wrapper(binary, version='12345')
config = self.helper_create_native_file({'binaries': {binary: wrapper}})
self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)])
def test_find_program(self):
self._simple_test('find_program', 'bash')
def test_config_tool_dep(self):
# Do the skip at this level to avoid screwing up the cache
if mesonbuild.environment.detect_msys2_arch():
raise unittest.SkipTest('Skipped due to problems with LLVM on MSYS2')
if not shutil.which('llvm-config'):
raise unittest.SkipTest('No llvm-installed, cannot test')
self._simple_test('config_dep', 'llvm-config')
def test_python3_module(self):
self._simple_test('python3', 'python3')
def test_python_module(self):
if is_windows():
# Bat adds extra crap to stdout, so the version check logic in the
# python module breaks. This is fine on other OSes because they
# don't need the extra indirection.
raise unittest.SkipTest('bat indirection breaks internal sanity checks.')
if os.path.exists('/etc/debian_version'):
rc = subprocess.call(['pkg-config', '--cflags', 'python2'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
if rc != 0:
# Python 2 will be removed in Debian Bullseye, thus we must
# remove the build dependency on python2-dev. Keep the tests
# but only run them if dev packages are available.
raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.')
self._simple_test('python', 'python')
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
@skip_if_env_set('CC')
def test_c_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('c', cb)
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
@skip_if_env_set('CXX')
def test_cpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('cpp', cb)
@skip_if_not_language('objc')
@skip_if_env_set('OBJC')
def test_objc_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('objc', cb)
@skip_if_not_language('objcpp')
@skip_if_env_set('OBJCXX')
def test_objcpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('objcpp', cb)
@skip_if_not_language('d')
@skip_if_env_set('DC')
def test_d_compiler(self):
def cb(comp):
if comp.id == 'dmd':
if shutil.which('ldc'):
return 'ldc', 'ldc'
elif shutil.which('gdc'):
return 'gdc', 'gdc'
else:
raise unittest.SkipTest('No alternative dlang compiler found.')
if shutil.which('dmd'):
return 'dmd', 'dmd'
raise unittest.SkipTest('No alternative dlang compiler found.')
self.helper_for_compiler('d', cb)
@skip_if_not_language('cs')
@skip_if_env_set('CSC')
def test_cs_compiler(self):
def cb(comp):
if comp.id == 'csc':
if not shutil.which('mcs'):
raise unittest.SkipTest('No alternate C# implementation.')
return 'mcs', 'mcs'
if not shutil.which('csc'):
raise unittest.SkipTest('No alternate C# implementation.')
return 'csc', 'csc'
self.helper_for_compiler('cs', cb)
@skip_if_not_language('fortran')
@skip_if_env_set('FC')
def test_fortran_compiler(self):
def cb(comp):
if comp.id == 'lcc':
if shutil.which('lfortran'):
return 'lfortran', 'lcc'
raise unittest.SkipTest('No alternate Fortran implementation.')
elif comp.id == 'gcc':
if shutil.which('ifort'):
# There is an ICC for windows (windows build, linux host),
# but we don't support that ATM so lets not worry about it.
if is_windows():
return 'ifort', 'intel-cl'
return 'ifort', 'intel'
elif shutil.which('flang'):
return 'flang', 'flang'
elif shutil.which('pgfortran'):
return 'pgfortran', 'pgi'
# XXX: there are several other fortran compilers meson
# supports, but I don't have any of them to test with
raise unittest.SkipTest('No alternate Fortran implementation.')
if not shutil.which('gfortran'):
raise unittest.SkipTest('No alternate Fortran implementation.')
return 'gfortran', 'gcc'
self.helper_for_compiler('fortran', cb)
def _single_implementation_compiler(self, lang, binary, version_str, version):
"""Helper for languages with a single (supported) implementation.
Builds a wrapper around the compiler to override the version.
"""
wrapper = self.helper_create_binary_wrapper(binary, version=version_str)
env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
getter = functools.partial(getter, MachineChoice.HOST)
env.binaries.host.binaries[lang] = wrapper
compiler = getter()
self.assertEqual(compiler.version, version)
@skip_if_not_language('vala')
@skip_if_env_set('VALAC')
def test_vala_compiler(self):
self._single_implementation_compiler(
'vala', 'valac', 'Vala 1.2345', '1.2345')
@skip_if_not_language('rust')
@skip_if_env_set('RUSTC')
def test_rust_compiler(self):
self._single_implementation_compiler(
'rust', 'rustc', 'rustc 1.2345', '1.2345')
@skip_if_not_language('java')
def test_java_compiler(self):
self._single_implementation_compiler(
'java', 'javac', 'javac 9.99.77', '9.99.77')
@skip_if_not_language('swift')
def test_swift_compiler(self):
wrapper = self.helper_create_binary_wrapper(
'swiftc', version='Swift 1.2345', outfile='stderr',
extra_args={'Xlinker': 'macosx_version. PROJECT:ld - 1.2.3'})
env = get_fake_env()
env.binaries.host.binaries['swift'] = wrapper
compiler = env.detect_swift_compiler(MachineChoice.HOST)
self.assertEqual(compiler.version, '1.2345')
def test_native_file_dirs(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile')])
def test_native_file_dirs_overriden(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'-Ddef_libdir=liblib', '-Dlibdir=liblib'])
def test_compile_sys_path(self):
"""Compiling with a native file stored in a system path works.
There was a bug which caused the paths to be stored incorrectly and
would result in ninja invoking meson in an infinite loop. This tests
for that by actually invoking ninja.
"""
testcase = os.path.join(self.common_test_dir, '1 trivial')
# It really doesn't matter what's in the native file, just that it exists
config = self.helper_create_native_file({'binaries': {'bash': 'false'}})
self.init(testcase, extra_args=['--native-file', config])
self.build()
class CrossFileTests(BasePlatformTests):
"""Tests for cross file functioality not directly related to
cross compiling.
This is mainly aimed to testing overrides from cross files.
"""
def test_cross_file_dirs(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'-Ddef_bindir=binbar',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libdir=libbar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
def test_cross_file_dirs_overriden(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'-Ddef_libdir=liblib', '-Dlibdir=liblib',
'-Ddef_bindir=binbar',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
def test_cross_file_dirs_chain(self):
# crossfile2 overrides crossfile overrides nativefile
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
'--cross-file', os.path.join(testcase, 'crossfile'),
'--cross-file', os.path.join(testcase, 'crossfile2'),
'-Ddef_bindir=binbar2',
'-Ddef_datadir=databar',
'-Ddef_includedir=includebar',
'-Ddef_infodir=infobar',
'-Ddef_libdir=libbar',
'-Ddef_libexecdir=libexecbar',
'-Ddef_localedir=localebar',
'-Ddef_localstatedir=localstatebar',
'-Ddef_mandir=manbar',
'-Ddef_sbindir=sbinbar',
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
class TAPParserTests(unittest.TestCase):
def assert_test(self, events, **kwargs):
if 'explanation' not in kwargs:
kwargs['explanation'] = None
self.assertEqual(next(events), TAPParser.Test(**kwargs))
def assert_plan(self, events, **kwargs):
if 'skipped' not in kwargs:
kwargs['skipped'] = False
if 'explanation' not in kwargs:
kwargs['explanation'] = None
self.assertEqual(next(events), TAPParser.Plan(**kwargs))
def assert_version(self, events, **kwargs):
self.assertEqual(next(events), TAPParser.Version(**kwargs))
def assert_error(self, events):
self.assertEqual(type(next(events)), TAPParser.Error)
def assert_bailout(self, events, **kwargs):
self.assertEqual(next(events), TAPParser.Bailout(**kwargs))
def assert_last(self, events):
with self.assertRaises(StopIteration):
next(events)
def parse_tap(self, s):
parser = TAPParser(io.StringIO(s))
return iter(parser.parse())
def parse_tap_v13(self, s):
events = self.parse_tap('TAP version 13\n' + s)
self.assert_version(events, version=13)
return events
def test_empty(self):
events = self.parse_tap('')
self.assert_last(events)
def test_empty_plan(self):
events = self.parse_tap('1..0')
self.assert_plan(events, count=0, late=False, skipped=True)
self.assert_last(events)
def test_plan_directive(self):
events = self.parse_tap('1..0 # skipped for some reason')
self.assert_plan(events, count=0, late=False, skipped=True,
explanation='for some reason')
self.assert_last(events)
events = self.parse_tap('1..1 # skipped for some reason\nok 1')
self.assert_error(events)
self.assert_plan(events, count=1, late=False, skipped=True,
explanation='for some reason')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('1..1 # todo not supported here\nok 1')
self.assert_error(events)
self.assert_plan(events, count=1, late=False, skipped=False,
explanation='not supported here')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_ok(self):
events = self.parse_tap('ok')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_with_number(self):
events = self.parse_tap('ok 1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_with_name(self):
events = self.parse_tap('ok 1 abc')
self.assert_test(events, number=1, name='abc', result=TestResult.OK)
self.assert_last(events)
def test_one_test_not_ok(self):
events = self.parse_tap('not ok')
self.assert_test(events, number=1, name='', result=TestResult.FAIL)
self.assert_last(events)
def test_one_test_todo(self):
events = self.parse_tap('not ok 1 abc # TODO')
self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL)
self.assert_last(events)
events = self.parse_tap('ok 1 abc # TODO')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
self.assert_last(events)
def test_one_test_skip(self):
events = self.parse_tap('ok 1 abc # SKIP')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
self.assert_last(events)
def test_one_test_skip_failure(self):
events = self.parse_tap('not ok 1 abc # SKIP')
self.assert_test(events, number=1, name='abc', result=TestResult.FAIL)
self.assert_last(events)
def test_many_early_plan(self):
events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4')
self.assert_plan(events, count=4, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_test(events, number=3, name='', result=TestResult.OK)
self.assert_test(events, number=4, name='', result=TestResult.FAIL)
self.assert_last(events)
def test_many_late_plan(self):
events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_test(events, number=3, name='', result=TestResult.OK)
self.assert_test(events, number=4, name='', result=TestResult.FAIL)
self.assert_plan(events, count=4, late=True)
self.assert_last(events)
def test_directive_case(self):
events = self.parse_tap('ok 1 abc # skip')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
self.assert_last(events)
events = self.parse_tap('ok 1 abc # ToDo')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
self.assert_last(events)
def test_directive_explanation(self):
events = self.parse_tap('ok 1 abc # skip why')
self.assert_test(events, number=1, name='abc', result=TestResult.SKIP,
explanation='why')
self.assert_last(events)
events = self.parse_tap('ok 1 abc # ToDo Because')
self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS,
explanation='Because')
self.assert_last(events)
def test_one_test_early_plan(self):
events = self.parse_tap('1..1\nok')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_one_test_late_plan(self):
events = self.parse_tap('ok\n1..1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=1, late=True)
self.assert_last(events)
def test_out_of_order(self):
events = self.parse_tap('ok 2')
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
def test_middle_plan(self):
events = self.parse_tap('ok 1\n1..2\nok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=2, late=True)
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
def test_too_many_plans(self):
events = self.parse_tap('1..1\n1..2\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_error(events)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_too_many(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..1')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_plan(events, count=1, late=True)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..1\nok 1\nnot ok 2')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_error(events)
self.assert_last(events)
def test_too_few(self):
events = self.parse_tap('ok 1\nnot ok 2\n1..3')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_plan(events, count=3, late=True)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..3\nok 1\nnot ok 2')
self.assert_plan(events, count=3, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_error(events)
self.assert_last(events)
def test_too_few_bailout(self):
events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test')
self.assert_plan(events, count=3, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_bailout(events, message='no third test')
self.assert_last(events)
def test_diagnostics(self):
events = self.parse_tap('1..1\n# ignored\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_plan(events, count=1, late=True)
self.assert_last(events)
def test_empty_line(self):
events = self.parse_tap('1..1\n\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_unexpected(self):
events = self.parse_tap('1..1\ninvalid\nok 1')
self.assert_plan(events, count=1, late=False)
self.assert_error(events)
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_last(events)
def test_version(self):
events = self.parse_tap('TAP version 13\n')
self.assert_version(events, version=13)
self.assert_last(events)
events = self.parse_tap('TAP version 12\n')
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap('1..0\nTAP version 13\n')
self.assert_plan(events, count=0, late=False, skipped=True)
self.assert_error(events)
self.assert_last(events)
def test_yaml(self):
events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_test(events, number=2, name='', result=TestResult.OK)
self.assert_last(events)
events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_error(events)
self.assert_last(events)
events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2')
self.assert_test(events, number=1, name='', result=TestResult.OK)
self.assert_error(events)
self.assert_test(events, number=2, name='', result=TestResult.FAIL)
self.assert_last(events)
def _clang_at_least(compiler, minver: str, apple_minver: str) -> bool:
"""
check that Clang compiler is at least a specified version, whether AppleClang or regular Clang
Parameters
----------
compiler:
Meson compiler object
minver: str
Clang minimum version
apple_minver: str
AppleCLang minimum version
Returns
-------
at_least: bool
Clang is at least the specified version
"""
if isinstance(compiler, (mesonbuild.compilers.AppleClangCCompiler,
mesonbuild.compilers.AppleClangCPPCompiler)):
return version_compare(compiler.version, apple_minver)
return version_compare(compiler.version, minver)
def unset_envs():
# For unit tests we must fully control all command lines
# so that there are no unexpected changes coming from the
# environment, for example when doing a package build.
varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.compilers.compilers.cflags_mapping.values())
for v in varnames:
if v in os.environ:
del os.environ[v]
def convert_args(argv):
# If we got passed a list of tests, pass it on
pytest_args = ['-v'] if '-v' in argv else []
test_list = []
for arg in argv:
if arg.startswith('-'):
continue
# ClassName.test_name => 'ClassName and test_name'
if '.' in arg:
arg = ' and '.join(arg.split('.'))
test_list.append(arg)
if test_list:
pytest_args += ['-k', ' or '.join(test_list)]
return pytest_args
def main():
unset_envs()
try:
import pytest # noqa: F401
# Need pytest-xdist for `-n` arg
import xdist # noqa: F401
if sys.version_info.major <= 3 and sys.version_info.minor <= 5:
raise ImportError('pytest with python <= 3.5 is causing issues on the CI')
pytest_args = ['-n', 'auto', './run_unittests.py']
pytest_args += convert_args(sys.argv[1:])
return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode
except ImportError:
print('pytest-xdist not found, using unittest instead')
pass
# All attempts at locating pytest failed, fall back to plain unittest.
cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
'TAPParserTests',
'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
'WindowsTests', 'DarwinTests']
return unittest.main(defaultTest=cases, buffer=True)
if __name__ == '__main__':
raise SystemExit(main())
| 46.077663 | 199 | 0.583563 |
82d2bf864523d911226f35fa8aac37cf10e20580 | 790 | py | Python | markov_chain/fetch_data.py | anthonycomfort/markov_chain | 56dc89651a963b73411f47d66fbf5858189aa8b0 | [
"MIT"
] | null | null | null | markov_chain/fetch_data.py | anthonycomfort/markov_chain | 56dc89651a963b73411f47d66fbf5858189aa8b0 | [
"MIT"
] | 2 | 2016-08-29T18:22:06.000Z | 2016-08-29T18:23:01.000Z | markov_chain/fetch_data.py | anthonycomfort/markov_chain | 56dc89651a963b73411f47d66fbf5858189aa8b0 | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup
import urllib2
import re
def createSoup(URL):
response = urllib2.urlopen(URL)
raw_html = response.read()
soup = BeautifulSoup(raw_html, 'html.parser')
new_soup = soup.find('div', {'class' : re.compile('post-content')})
#creates a variable that contains all content within a div that has the string, 'post-content' in its name
formatted_soup = ''.join(text for text in new_soup.find_all(text=True) if text.parent.name == 'p')
#creates a variable that joins the text of all text tagged with 'p'- so, the content of the article
updated_soup = re.sub('[^A-Za-z]+',' ', formatted_soup)
#runs a regex on the text to get rid of special characters and numbers
#Need to parse this into an array, find lengths of 1 and remove.
return updated_soup
| 46.470588 | 108 | 0.73038 |
ee5beacb409148cde4bd62e9a89659ded08c9140 | 667 | py | Python | minifold/singleton.py | vishalbelsare/minifold | f1d90999cef46715a556f93dbead6ed987c4fdd0 | [
"BSD-3-Clause"
] | 15 | 2018-09-03T09:40:59.000Z | 2021-07-16T16:14:46.000Z | src/singleton.py | Infinite-Blue-1042/minifold | cd0aa9207f9e1819ed2ecbb24373cdcfe27abd16 | [
"BSD-3-Clause"
] | null | null | null | src/singleton.py | Infinite-Blue-1042/minifold | cd0aa9207f9e1819ed2ecbb24373cdcfe27abd16 | [
"BSD-3-Clause"
] | 8 | 2019-01-25T07:18:59.000Z | 2021-04-07T17:54:54.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the minifold project.
# https://github.com/nokia/minifold
__author__ = "Marc-Olivier Buob"
__maintainer__ = "Marc-Olivier Buob"
__email__ = "marc-olivier.buob@nokia-bell-labs.com"
__copyright__ = "Copyright (C) 2018, Nokia"
__license__ = "BSD-3"
# Based on https://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
class Singleton(type):
s_instances = dict()
def __call__(cls, *args, **kwargs):
if cls not in cls.s_instances:
cls.s_instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls.s_instances[cls]
| 30.318182 | 85 | 0.68066 |
1609a1a1bc83eda5c52f46b8007388c80b7b293e | 19,307 | py | Python | photonpy/smlm/ui/app.py | qnano/photonpy | 9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4 | [
"MIT"
] | 5 | 2021-04-29T21:06:05.000Z | 2022-03-23T03:45:25.000Z | photonpy/smlm/ui/app.py | qnano/photonpy | 9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4 | [
"MIT"
] | null | null | null | photonpy/smlm/ui/app.py | qnano/photonpy | 9c03a1c9f4c2177c9c6fb3f2f16dfec2306006d4 | [
"MIT"
] | 1 | 2021-06-18T12:39:28.000Z | 2021-06-18T12:39:28.000Z |
import sys,os
import numpy as np
import tqdm
from PyQt5.QtWidgets import QApplication, QLineEdit, QFileDialog, QDialog,QVBoxLayout,QMessageBox,QCheckBox
from PyQt5 import QtGui
from PyQt5 import QtCore, QtWidgets
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
from PyQt5 import QtCore, QtWidgets
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from photonpy.smlm.dataset import Dataset
from photonpy.smlm.ui import main_ui, linklocs_ui
from photonpy.smlm.ui.progressbar import ProgressBar
from photonpy.smlm.ui.qtplot import PlotDialog
import threading
import json
import functools
import pyqtgraph as pg
import photonpy.smlm.process_movie as process_movie
import photonpy.smlm.extract_rois as extract_rois
from photonpy.smlm.util import imshow_hstack
from photonpy.smlm.ui.drift_correct_dlg import DriftCorrectionDialog
import matplotlib as mpl
#mpl.use('svg')
new_rc_params = {
# "font.family": 'Times',
"font.size": 15,
"font.serif": [],
"svg.fonttype": 'none'} #to store text as text, not as path
mpl.rcParams.update(new_rc_params)
class MplCanvas(FigureCanvasQTAgg):
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
super(MplCanvas, self).__init__(fig)
#import photonpy.simflux.locs_to_pattern as simflux_pattern
#ript(Run In Plotting Thread) decorator
def ript(function):
def ript_this(*args, **kwargs):
global send_queue, return_queue, plot_thread
if threading.currentThread() == plot_thread: #if called from the plotting thread -> execute
return function(*args, **kwargs)
else: #if called from a diffrent thread -> send function to queue
send_queue.put(functools.partial(function, *args, **kwargs))
return_parameters = return_queue.get(True) # blocking (wait for return value)
return return_parameters
return ript_this
def showMessage(txt):
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setText(txt)
msg.exec_()
def createDatasetViewer(ds:Dataset):
# Interpret image data as row-major instead of col-major
pg.setConfigOptions(imageAxisOrder='row-major')
img = ds.renderGaussianSpots(10, 0.5)
## Create window with ImageView widget
win = QtGui.QDialog()
win.resize(800,800)
layout = QVBoxLayout(win)
imv = pg.ImageView()
layout.addWidget(imv)
#win.setCentralWidget(imv)
win.show()
name = ds['locs_path']
win.setWindowTitle(f'Viewing {name}')
## Add time-varying signal
"""
sig = np.zeros(data.shape[0])
sig[30:] += np.exp(-np.linspace(1,10, 70))
sig[40:] += np.exp(-np.linspace(1,10, 60))
sig[70:] += np.exp(-np.linspace(1,10, 30))
sig = sig[:,np.newaxis,np.newaxis] * 3
data[:,50:60,30:40] += sig
"""
imv.setImage(img)
## Display the data and assign each frame a time value from 1.0 to 3.0
#imv.setImage(data, xvals=np.linspace(1., 3., data.shape[0]))
## Set a custom color map
colors = [
(0, 0, 0),
(45, 5, 61),
(84, 42, 55),
(150, 87, 60),
(208, 171, 141),
(255, 255, 255)
]
cmap = pg.ColorMap(pos=np.linspace(0.0, 1.0, 6), color=colors)
imv.setColorMap(cmap)
return win
class LinkLocsDialog(QDialog):
def __init__(self, parent):
super().__init__(parent)
self.ui = linklocs_ui.Ui_Dialog()
self.ui.setupUi(self)
self.ui.btnBrowse.clicked.connect(self._onBrowse)
self.ui.btnEstimate.clicked.connect(self.estimate)
def setLocsFile(self,fn):
self.ui.txtLocsFile.setText(fn)
def _onBrowse(self):
options = QFileDialog.Options()
# options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(self,"", "","All Files (*);;HDF5 Files (*.hdf5)", options=options)
if fileName:
self.ui.txtLocsFile.setText(fileName)
def estimate(self):
from utils.link_locs import estimate_on_time
maxdist = self.ui.maxDistance.value()
frameskip = self.ui.frameskip.value()
fig,bins,framecounts = estimate_on_time(self.ui.txtLocsFile.text(),maxdist,frameskip)
import photonpy.smlm.ui.qtplot as qtplot
plotdlg=qtplot.PlotDialog(fig,self)
plotdlg.setModal(True)
plotdlg.show()
def getWidgetValues(widgets):
d={}
for w in widgets:
if type(w) == QtWidgets.QDoubleSpinBox or type(w) == QtWidgets.QSpinBox:
v = w.value()
elif type(w) == QLineEdit:
v = w.text()
elif type(w) == QCheckBox:
v = w.isChecked()
else:
continue
d[w.objectName()] = v
return d
def setWidgetValues(widgets,values):
for w in widgets:
if w.objectName() in values:
v = values[w.objectName()]
if type(w) == QtWidgets.QDoubleSpinBox or type(w) == QtWidgets.QSpinBox:
w.setValue(v)
elif type(w) == QLineEdit:
w.setText(v)
elif type(w) == QCheckBox:
w.setChecked(v)
class Window(QDialog):
localizeDone = QtCore.pyqtSignal()
localizeFailed = QtCore.pyqtSignal([str])
roiExtractionDone = QtCore.pyqtSignal()
datasets = []
def __init__(self):
super().__init__()
self.title = 'Photonpy localization microscopy analysis toolbox'
self.viewers = []
self.ui = main_ui.Ui_Dialog()
ui=self.ui
ui.setupUi(self)
ui.btnBrowseTiff.clicked.connect(self.onBrowseTiff)
ui.btnLocalize.clicked.connect(self.localize)
ui.btnLinkLocs.clicked.connect(self.linklocs)
ui.btnBrowseCameraDarkFrames.clicked.connect(self.onBrowseCameraDarkFrames)
ui.btnBrowseCameraLightFrames.clicked.connect(self.onBrowseCameraLightFrames)
ui.btnBrowseROIs.clicked.connect(self.onBrowseROIFile)
ui.btnRCC.clicked.connect(self.onDriftCorrectRCC)
ui.btnMinEntropyDrift.clicked.connect(self.onDriftCorrectMinEntropy)
ui.btnExtractROIs.clicked.connect(self.onExtractROIs)
ui.checkBoxPerPixelCamCalib.toggled.connect(self.onPerPixelCamCalibChanged)
self.onPerPixelCamCalibChanged()
ui.btnViewSelected.clicked.connect(self.onViewSelected)
ui.btnLoad.clicked.connect(self.onLoadLocs)
self.localizeFailed.connect(self.onLocalizeFailed)
self.localizeDone.connect(self.onLocalizeDone)
self.roiExtractionDone.connect(self.onROIExtractionDone)
self.cfgFile = os.path.dirname(__file__) + '/ui-cfg.json'
self.cfgWidgets = {
ui.roisize,
ui.gain,
ui.offset,
ui.detectionThreshold,
ui.pixelsize,
ui.spotDetectionPSFSigma,
ui.spinSigmaFitFramesPerBin,
ui.tiffPath,
ui.txtCameraDarkFrames,
ui.txtCameraLightFrames,
ui.startFrame,
ui.maxLinkDistance,
ui.maxLinkFrameskip,
ui.txtROIFile,
ui.roiExtractMinSpotFrames,
ui.roiExtractSpotFrames,
ui.roiExtractAppend,
ui.maxLinkDistanceIntensity,
ui.checkBoxPerPixelCamCalib,
ui.spinSpotDetectorUseMeanImage,
ui.spinNumFrames,
ui.chiSquareThreshold,
ui.spinSumFrames,
ui.rccFramesPerBin,
ui.minEntFramesPerBin,
ui.minEntMaxSpots
}
self.load()
@property
def selectedDataset(self):
idx = self.ui.listDatasets.currentIndex().row()
return self.datasets[idx]
def onViewSelected(self):
ds = self.selectedDataset
self.viewers.append(createDatasetViewer(ds))
def onDriftCorrectRCC(self):
fpb = self.ui.rccFramesPerBin.value()
ds = self.selectedDataset.copy()
drift = ds.estimateDriftRCC(framesPerBin=fpb, maxdrift=5)
ds.applyDrift(drift)
path = os.path.splitext( ds['imagefile'])[0]+"_undrifted_rcc.hdf5"
ds.save(path)
ds['locs_path'] = path
self.datasets.append(ds)
self.updateList()
def onDriftCorrectMinEntropy(self):
fpb = self.ui.minEntFramesPerBin.value()
maxspots = self.ui.minEntMaxSpots.value()
ds = self.selectedDataset.copy()
path_noext = os.path.splitext( ds['locs_path'])[0]
rcc_fpb = self.ui.rccFramesPerBin.value()
coarseFPB = self.ui.minEntCoarseFPB.value()
if coarseFPB==0:
coarseFPB=None
coarseSigmaM = self.ui.minEntCoarseSigmaMultiplier.value()
sigma = ds.data.crlb.pos.mean(0) * coarseSigmaM
drift, prec = ds.estimateDriftMinEntropy(framesPerBin=fpb,
pixelsize = self.ui.pixelsize.value(),
maxdrift = 5, maxspots = maxspots,
initializeWithRCC = ds.numFrames//rcc_fpb,
coarseFramesPerBin = coarseFPB,
coarseSigma = sigma,
outputfn = path_noext+"_drift_dme")
ds.applyDrift(drift)
path = path_noext+"_undrifted_dme.hdf5"
ds.save(path)
ds['locs_path'] = path
self.datasets.append(ds)
self.updateList()
def onPerPixelCamCalibChanged(self):
v = self.ui.checkBoxPerPixelCamCalib.checkState()
self.ui.offset.setEnabled(not v)
self.ui.gain.setEnabled(not v)
self.ui.txtCameraDarkFrames.setEnabled(v)
self.ui.txtCameraLightFrames.setEnabled(v)
def load(self):
path = os.path.abspath(self.cfgFile)
print(f"Loading UI state from {path}")
if os.path.exists(self.cfgFile):
with open(self.cfgFile,'r') as f:
d = json.load(f)
setWidgetValues(self.cfgWidgets,d)
def save(self):
d = getWidgetValues(self.cfgWidgets)
with open(self.cfgFile,'w') as f:
json.dump(d,f,indent=4)
def closeEvent(self,event):
self.save()
def linklocs(self):
dlg = LinkLocsDialog(self)
dlg.setLocsFile(self.ui.smlmLocsFile.text())
dlg.show()
def updatePaths(self):
tiff_path = self.ui.tiffPath.text()
def onBrowseCameraDarkFrames(self):
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(self,"Browse movie containing dark calibration:", "","All Files (*);;TIFF File (*.tif)", options=options)
if fileName:
self.ui.txtCameraDarkFrames.setText(fileName)
def onBrowseCameraLightFrames(self):
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(self,"Browse movie containing light frames for calibration:", "","All Files (*);;TIFF File (*.tif)", options=options)
if fileName:
self.ui.txtCameraLightFrames.setText(fileName)
def onBrowseROIFile(self):
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(self,"Browse ROI file", "","All Files (*);;TIFF File (*.tif)", options=options)
if fileName:
self.ui.txtROIFile.setText(fileName)
def onBrowseTiff(self):
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(self,"Browse TIFF", "","All Files (*);;TIFF File (*.tif)", options=options)
if fileName:
self.ui.tiffPath.setText(fileName)
self.updatePaths()
def onLoadLocs(self):
options = QFileDialog.Options()
filename, _ = QFileDialog.getOpenFileName(self,"Browse ROI file", "","Picasso compatible HDF5 (*.hdf5);;Thunderstorm CSV (*.csv)", options=options)
if filename:
try:
ds = Dataset.load(filename)
self.result = ds
self.datasets = [ds]
self.updateList()
except ValueError as e:
showMessage(f'Error: {str(e)}')
def onExtractROIs(self):
locs_fn = self.ui.smlmLocsFile.text()
tiff_path = self.ui.tiffPath.text()
rois_path = self.ui.txtROIFile.text()
pbar = ProgressBar("Extracting ROIs and estimating spot background and intensity")
def progress_update(msg,done):
if msg is not None:
pbar.setMsg.emit(msg)
if done is not None:
pbar.update.emit(done)
return not pbar.abortPressed
cfg = self.getConfig()
cfg = {**cfg,
'maxlinkdistXY': self.ui.maxLinkDistance.value(),
'maxlinkdistI': self.ui.maxLinkDistanceIntensity.value(),
'maxlinkframeskip': self.ui.maxLinkFrameskip.value()
}
maxroiframes = self.ui.roiExtractSpotFrames.value()
minroiframes = self.ui.roiExtractMinSpotFrames.value()
appendFrames = self.ui.roiExtractAppend.value()
def process_thread():
self.rois,self.roiframes = extract_rois.extract_rois(rois_path, tiff_path, cfg, minroiframes,
maxroiframes, appendFrames, locs_fn, progress_update)
if not pbar.abortPressed:
self.roiExtractionDone.emit()
t = threading.Thread(target=process_thread)
t.start()
pbar.show()
def onViewROIs(self):
rois_path = self.ui.txtROIFile.text()
roidata = extract_rois.ROIData.load(rois_path)
plt.figure()
for k in range(20):
imshow_hstack(roidata.frames[k])
def updateList(self):
model = QtGui.QStandardItemModel()
self.ui.listDatasets.setModel(model)
for d in self.datasets:
item = QtGui.QStandardItem(f"{d['locs_path']} - {d.info()}")
model.appendRow(item)
def getConfig(self):
offset = self.ui.offset.value()
gain = self.ui.gain.value()
if self.ui.checkBoxPerPixelCamCalib.isChecked():
offset = self.ui.txtCameraDarkFrames.text()
gain = self.ui.txtCameraLightFrames.text()
if len(offset) == 0:
showMessage('Need to provide movie with dark frames')
return
if len(gain) == 0:
showMessage('Need to provide movie with light frames')
return
cfg = {
'roisize': self.ui.roisize.value(),
'threshold': self.ui.detectionThreshold.value(),
'sigmaframesperbin': self.ui.spinSigmaFitFramesPerBin.value(),
'gain': gain,
'maxframes': self.ui.spinNumFrames.value(),
'offset': offset,
'startframe': self.ui.startFrame.value(),
'pixelsize': self.ui.pixelsize.value(),
'spotdetectsigma': self.ui.spotDetectionPSFSigma.value(),
'sumframes': self.ui.spinSumFrames.value()
}
chisq = self.ui.chiSquareThreshold.value()
if chisq > 0 :
cfg['maxchisq'] = chisq
return cfg
def localize(self):
tiff_path = self.ui.tiffPath.text()
if not os.path.exists(tiff_path):
return
cfg = self.getConfig()
if cfg is None:
return
locs_fn = os.path.splitext(tiff_path)[0]+".hdf5"
self.ui.labelLocsInfo.setText('')
pbar = ProgressBar("Running spot detection and 2D Gaussian localization...")
def progress_update(msg,done):
if msg is not None:
pbar.setMsg.emit(msg)
if done is not None:
pbar.update.emit(done)
return not pbar.abortPressed
def localize_thread():
print (f"Localize thread: {threading.get_ident()}")
try:
self.localizer = process_movie.Localizer2D()
self.localizer.process(tiff_path, cfg, locs_fn, progress_update)
self.tiff_path = tiff_path
if not pbar.abortPressed:
self.localizeDone.emit()
except ValueError as e:
self.localizeFailed.emit(str(e))
if True:
t = threading.Thread(target=localize_thread)
t.start()
else: #debug -- skip the threading
self.localizer = process_movie.Localizer2D()
self.localizer.process(tiff_path, cfg, locs_fn, progress_update)
self.localizeDone.emit()
pbar.show()
@QtCore.pyqtSlot(str)
def onLocalizeFailed(self, msg):
showMessage(f'Error: {msg}')
@QtCore.pyqtSlot()
def onLocalizeDone(self):
print("localize done")
self.localizer.plotChiSquare()
self.localizer.plotSigmaTimeSeries()
self.localizer.plotIntensityHistogram()
self.result = self.localizer.result
#img = self.result.renderGaussianSpots(20, 1)
#plt.figure()
#plt.imshow(img)
self.viewers.append (createDatasetViewer(self.result))
if 'sigma' in self.result.dtypeEstim.fields:
sx = self.result.data.estim.sigma[:,0]
sy = self.result.data.estim.sigma[:,1]
self.ui.psfSigmaX.setValue(np.median(sx))
self.ui.psfSigmaY.setValue(np.median(sy))
fig = plt.figure(figsize=(8,5))
plt.hist([sx,sy],label=['Sigma X','Sigma Y'],range=(1,3),bins=100)
plt.legend()
plt.xlabel('PSF Sigma [pixels]')
plt.show()
#PlotDialog(fig).show()
self.datasets = [ self.result ]
self.updateList()
#self.ui.labelLocsInfo.setText(self.datasets[0].info())
@QtCore.pyqtSlot()
def onROIExtractionDone(self):
print("roi extraction done")
def run_ui():
app = QApplication.instance()
if app is None:
app = QApplication(sys.argv)
wnd = Window()
wnd.show()
wnd.activateWindow()
app.exec_()
wnd = None
#del tqdm # prevent exception at exit about not being able to join thread
del app # prevent IPython+Qt issue https://github.com/spyder-ide/spyder/issues/2970
if __name__ == '__main__':
print('Opening UI')
run_ui() | 34.476786 | 167 | 0.589838 |
87906f7865b973ee8c57d001dc04343448f48083 | 245 | py | Python | flow/core/kernel/vehicle/__init__.py | syuntoku14/flow | 3a1157cde31d0b7d6a3cc2f91eef0ec9ea53575e | [
"MIT"
] | null | null | null | flow/core/kernel/vehicle/__init__.py | syuntoku14/flow | 3a1157cde31d0b7d6a3cc2f91eef0ec9ea53575e | [
"MIT"
] | null | null | null | flow/core/kernel/vehicle/__init__.py | syuntoku14/flow | 3a1157cde31d0b7d6a3cc2f91eef0ec9ea53575e | [
"MIT"
] | null | null | null | from flow.core.kernel.vehicle.base import KernelVehicle
from flow.core.kernel.vehicle.traci import TraCIVehicle
from flow.core.kernel.vehicle.aimsun import AimsunKernelVehicle
__all__ = ['KernelVehicle', 'TraCIVehicle', 'AimsunKernelVehicle']
| 35 | 66 | 0.828571 |
46d25ec0a5b0380ed71f1616648f248db607d049 | 8,586 | py | Python | docs/conf.py | pyarnold/asyncio-redis | 2da0328e21855ddac2c97d664b4a2dc237d3f5a6 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2020-12-18T01:07:51.000Z | 2020-12-18T01:07:51.000Z | docs/conf.py | pyarnold/asyncio-redis | 2da0328e21855ddac2c97d664b4a2dc237d3f5a6 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | docs/conf.py | pyarnold/asyncio-redis | 2da0328e21855ddac2c97d664b4a2dc237d3f5a6 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | # -*- coding: utf-8 -*-
#
# asyncio_redis documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 31 08:50:13 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Take signatures from docstrings.
autodoc_docstring_signature = True
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'asyncio_redis'
copyright = u'2013, Jonathan Slenders'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
html_theme = 'default'
else:
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
html_theme = 'pyramid'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'asyncio_redisdoc'
# -- Options for LaTeX output --------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'asyncio_redis.tex', u'asyncio\\_redis Documentation',
u'Jonathan Slenders', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'asyncio_redis', u'asyncio_redis Documentation',
[u'Jonathan Slenders'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'asyncio_redis', u'asyncio_redis Documentation',
u'Jonathan Slenders', 'asyncio_redis', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| 31.8 | 80 | 0.717447 |
f7359e904faa026a802269f620739354e64f4bbc | 62 | py | Python | app/tests.py | nibinn/Rnd | e306b3da3af5e9da66d11436bc7abf3a77f50573 | [
"MIT"
] | 1 | 2022-03-30T15:28:33.000Z | 2022-03-30T15:28:33.000Z | authentication/tests.py | piyushjain-pj/pneumonia_and_covid_prediction_tool | b5f503b1672b1093c4bd6f9e053d6024e4f73f9d | [
"MIT"
] | 14 | 2020-06-05T18:37:13.000Z | 2022-03-11T23:26:12.000Z | authentication/tests.py | piyushjain-pj/pneumonia_and_covid_prediction_tool | b5f503b1672b1093c4bd6f9e053d6024e4f73f9d | [
"MIT"
] | null | null | null |
from django.test import TestCase
# Create your tests here.
| 10.333333 | 32 | 0.758065 |
cf2665144a589fd50b589a84367c9bda251884a7 | 957 | py | Python | src/Sanga/media/peoplenews.py | allenyummy/Sanga | ff4cc60e0fd05cac49bdf15ad8a57dfedcf75fd0 | [
"MIT"
] | 2 | 2021-09-01T13:39:58.000Z | 2021-09-01T13:41:49.000Z | src/Sanga/media/peoplenews.py | allenyummy/Sanga | ff4cc60e0fd05cac49bdf15ad8a57dfedcf75fd0 | [
"MIT"
] | 3 | 2021-08-31T06:00:23.000Z | 2021-09-01T08:52:41.000Z | src/Sanga/media/peoplenews.py | allenyummy/Sanga | ff4cc60e0fd05cac49bdf15ad8a57dfedcf75fd0 | [
"MIT"
] | null | null | null | # encoding=utf-8
# Author: Yu-Lun Chiang
# Description: Get news
import logging
from typing import Dict, List, Union
from bs4 import BeautifulSoup
from .base import BaseMediaNewsCrawler
from ..struct import NewsStruct
logger = logging.getLogger(__name__)
class PeopleNews(BaseMediaNewsCrawler):
"""Web Crawler for PeopleNews News"""
def getInfo(self, link: str) -> NewsStruct:
return super().getInfo(link)
@staticmethod
def _get_keywords(
script_info: Dict[str, str],
soup: BeautifulSoup,
) -> Union[List[str], None]:
keywords = None
logger.debug(f"KEYWORDS: {keywords}")
return keywords
@staticmethod
def _get_content(
soup: BeautifulSoup,
) -> str:
content_list = soup.find("div", itemprop="articleBody").find_all("p")
content = "\n".join([c.text for c in content_list])
logger.debug(f"CONTENT:\n {content}")
return content
| 23.341463 | 77 | 0.657262 |
c1594310272d009b4034dbd4ded7d50bda72aa7b | 10,852 | py | Python | tests/test_onnx_v2.py | Fcc-Roy/transformers | 0fe17f375a4f0fdd9aea260d0645ccfd4896e958 | [
"Apache-2.0"
] | null | null | null | tests/test_onnx_v2.py | Fcc-Roy/transformers | 0fe17f375a4f0fdd9aea260d0645ccfd4896e958 | [
"Apache-2.0"
] | null | null | null | tests/test_onnx_v2.py | Fcc-Roy/transformers | 0fe17f375a4f0fdd9aea260d0645ccfd4896e958 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from tempfile import NamedTemporaryFile
from unittest import TestCase
from unittest.mock import patch
from parameterized import parameterized
from transformers import AutoConfig, AutoTokenizer, is_torch_available
from transformers.onnx import (
EXTERNAL_DATA_FORMAT_SIZE_LIMIT,
OnnxConfig,
ParameterFormat,
export,
validate_model_outputs,
)
from transformers.onnx.config import OnnxConfigWithPast
if is_torch_available():
from transformers.onnx.features import FeaturesManager
from transformers.onnx.utils import compute_effective_axis_dimension, compute_serialized_parameters_size
from transformers.testing_utils import require_onnx, require_torch, slow
@require_onnx
class OnnxUtilsTestCaseV2(TestCase):
"""
Cover all the utilities involved to export ONNX models
"""
@require_torch
@patch("transformers.onnx.convert.is_torch_onnx_dict_inputs_support_available", return_value=False)
def test_ensure_pytorch_version_ge_1_8_0(self, mock_is_torch_onnx_dict_inputs_support_available):
"""
Ensure we raise an Exception if the pytorch version is unsupported (< 1.8.0)
"""
self.assertRaises(AssertionError, export, None, None, None, None, None)
mock_is_torch_onnx_dict_inputs_support_available.assert_called()
def test_compute_effective_axis_dimension(self):
"""
When exporting ONNX model with dynamic axis (batch or sequence) we set batch_size and/or sequence_length = -1.
We cannot generate an effective tensor with axis dim == -1, so we trick by using some "fixed" values
(> 1 to avoid ONNX squeezing the axis).
This test ensure we are correctly replacing generated batch / sequence tensor with axis > 1
"""
# Dynamic axis (batch, no token added by the tokenizer)
self.assertEqual(compute_effective_axis_dimension(-1, fixed_dimension=2, num_token_to_add=0), 2)
# Static axis (batch, no token added by the tokenizer)
self.assertEqual(compute_effective_axis_dimension(0, fixed_dimension=2, num_token_to_add=0), 2)
# Dynamic axis (sequence, token added by the tokenizer 2 (no pair))
self.assertEqual(compute_effective_axis_dimension(0, fixed_dimension=8, num_token_to_add=2), 6)
self.assertEqual(compute_effective_axis_dimension(0, fixed_dimension=8, num_token_to_add=2), 6)
# Dynamic axis (sequence, token added by the tokenizer 3 (pair))
self.assertEqual(compute_effective_axis_dimension(0, fixed_dimension=8, num_token_to_add=3), 5)
self.assertEqual(compute_effective_axis_dimension(0, fixed_dimension=8, num_token_to_add=3), 5)
def test_compute_parameters_serialized_size(self):
"""
This test ensures we compute a "correct" approximation of the underlying storage requirement (size) for all the
parameters for the specified parameter's dtype.
"""
self.assertEqual(compute_serialized_parameters_size(2, ParameterFormat.Float), 2 * ParameterFormat.Float.size)
def test_flatten_output_collection_property(self):
"""
This test ensures we correctly flatten nested collection such as the one we use when returning past_keys.
past_keys = Tuple[Tuple]
ONNX exporter will export nested collections as ${collection_name}.${level_idx_0}.${level_idx_1}...${idx_n}
"""
self.assertEqual(
OnnxConfig.flatten_output_collection_property("past_key", [[0], [1], [2]]),
{
"past_key.0": 0,
"past_key.1": 1,
"past_key.2": 2,
},
)
class OnnxConfigTestCaseV2(TestCase):
"""
Cover the test for models default.
Default means no specific features is being enabled on the model.
"""
@patch.multiple(OnnxConfig, __abstractmethods__=set())
def test_use_external_data_format(self):
"""
External data format is required only if the serialized size of the parameters if bigger than 2Gb
"""
TWO_GB_LIMIT = EXTERNAL_DATA_FORMAT_SIZE_LIMIT
# No parameters
self.assertFalse(OnnxConfig.use_external_data_format(0))
# Some parameters
self.assertFalse(OnnxConfig.use_external_data_format(1))
# Almost 2Gb parameters
self.assertFalse(OnnxConfig.use_external_data_format((TWO_GB_LIMIT - 1) // ParameterFormat.Float.size))
# Exactly 2Gb parameters
self.assertTrue(OnnxConfig.use_external_data_format(TWO_GB_LIMIT))
# More than 2Gb parameters
self.assertTrue(OnnxConfig.use_external_data_format((TWO_GB_LIMIT + 1) // ParameterFormat.Float.size))
class OnnxConfigWithPastTestCaseV2(TestCase):
"""
Cover the tests for model which have use_cache feature (i.e. "with_past" for ONNX)
"""
SUPPORTED_WITH_PAST_CONFIGS = {}
# SUPPORTED_WITH_PAST_CONFIGS = {
# ("BART", BartConfig),
# ("GPT2", GPT2Config),
# # ("T5", T5Config)
# }
@patch.multiple(OnnxConfigWithPast, __abstractmethods__=set())
def test_use_past(self):
"""
Ensure the use_past variable is correctly being set
"""
for name, config in OnnxConfigWithPastTestCaseV2.SUPPORTED_WITH_PAST_CONFIGS:
with self.subTest(name):
self.assertFalse(
OnnxConfigWithPast.from_model_config(config()).use_past,
"OnnxConfigWithPast.from_model_config() should not use_past",
)
self.assertTrue(
OnnxConfigWithPast.with_past(config()).use_past,
"OnnxConfigWithPast.from_model_config() should use_past",
)
@patch.multiple(OnnxConfigWithPast, __abstractmethods__=set())
def test_values_override(self):
"""
Ensure the use_past variable correctly set the `use_cache` value in model's configuration
"""
for name, config in OnnxConfigWithPastTestCaseV2.SUPPORTED_WITH_PAST_CONFIGS:
with self.subTest(name):
# without past
onnx_config_default = OnnxConfigWithPast.from_model_config(config())
self.assertIsNotNone(onnx_config_default.values_override, "values_override should not be None")
self.assertIn("use_cache", onnx_config_default.values_override, "use_cache should be present")
self.assertFalse(
onnx_config_default.values_override["use_cache"], "use_cache should be False if not using past"
)
# with past
onnx_config_default = OnnxConfigWithPast.with_past(config())
self.assertIsNotNone(onnx_config_default.values_override, "values_override should not be None")
self.assertIn("use_cache", onnx_config_default.values_override, "use_cache should be present")
self.assertTrue(
onnx_config_default.values_override["use_cache"], "use_cache should be False if not using past"
)
PYTORCH_EXPORT_MODELS = {
("albert", "hf-internal-testing/tiny-albert"),
("bert", "bert-base-cased"),
("ibert", "kssteven/ibert-roberta-base"),
("camembert", "camembert-base"),
("distilbert", "distilbert-base-cased"),
("roberta", "roberta-base"),
("xlm-roberta", "xlm-roberta-base"),
("layoutlm", "microsoft/layoutlm-base-uncased"),
}
PYTORCH_EXPORT_WITH_PAST_MODELS = {
("gpt2", "gpt2"),
("gpt-neo", "EleutherAI/gpt-neo-125M"),
}
PYTORCH_EXPORT_SEQ2SEQ_WITH_PAST_MODELS = {
("bart", "facebook/bart-base"),
("mbart", "sshleifer/tiny-mbart"),
("t5", "t5-small"),
("marian", "Helsinki-NLP/opus-mt-en-de"),
}
def _get_models_to_test(export_models_list):
models_to_test = []
if not is_torch_available():
# Returning some dummy test that should not be ever called because of the @require_torch decorator.
# The reason for not returning an empty list is because parameterized.expand complains when it's empty.
return [("dummy", "dummy", "dummy", "dummy", OnnxConfig.from_model_config)]
for (name, model) in export_models_list:
for feature, onnx_config_class_constructor in FeaturesManager.get_supported_features_for_model_type(
name
).items():
models_to_test.append((f"{name}_{feature}", name, model, feature, onnx_config_class_constructor))
return sorted(models_to_test)
class OnnxExportTestCaseV2(TestCase):
"""
Integration tests ensuring supported models are correctly exported
"""
def _pytorch_export(self, test_name, name, model_name, feature, onnx_config_class_constructor):
from transformers.onnx import export
tokenizer = AutoTokenizer.from_pretrained(model_name)
config = AutoConfig.from_pretrained(model_name)
# Useful for causal lm models that do not use pad tokens.
if not getattr(config, "pad_token_id", None):
config.pad_token_id = tokenizer.eos_token_id
model_class = FeaturesManager.get_model_class_for_feature(feature)
model = model_class.from_config(config)
onnx_config = onnx_config_class_constructor(model.config)
with NamedTemporaryFile("w") as output:
try:
onnx_inputs, onnx_outputs = export(
tokenizer, model, onnx_config, onnx_config.default_onnx_opset, Path(output.name)
)
validate_model_outputs(
onnx_config,
tokenizer,
model,
Path(output.name),
onnx_outputs,
onnx_config.atol_for_validation,
)
except (RuntimeError, ValueError) as e:
self.fail(f"{name}, {feature} -> {e}")
@parameterized.expand(_get_models_to_test(PYTORCH_EXPORT_MODELS))
@slow
@require_torch
def test_pytorch_export(self, test_name, name, model_name, feature, onnx_config_class_constructor):
self._pytorch_export(test_name, name, model_name, feature, onnx_config_class_constructor)
@parameterized.expand(_get_models_to_test(PYTORCH_EXPORT_WITH_PAST_MODELS))
@slow
@require_torch
def test_pytorch_export_with_past(self, test_name, name, model_name, feature, onnx_config_class_constructor):
self._pytorch_export(test_name, name, model_name, feature, onnx_config_class_constructor)
@parameterized.expand(_get_models_to_test(PYTORCH_EXPORT_SEQ2SEQ_WITH_PAST_MODELS))
@slow
@require_torch
def test_pytorch_export_seq2seq_with_past(
self, test_name, name, model_name, feature, onnx_config_class_constructor
):
self._pytorch_export(test_name, name, model_name, feature, onnx_config_class_constructor)
| 41.262357 | 119 | 0.686694 |
c54b0cb56af0ff40c2ef523746b702e6ea2ab4d3 | 476 | py | Python | distributed_run.py | semantic-multimedia-caddy/ImageCaptioning.pytorch | 085402901dafe9b169783d7ad2b30f95284c9a98 | [
"MIT"
] | null | null | null | distributed_run.py | semantic-multimedia-caddy/ImageCaptioning.pytorch | 085402901dafe9b169783d7ad2b30f95284c9a98 | [
"MIT"
] | null | null | null | distributed_run.py | semantic-multimedia-caddy/ImageCaptioning.pytorch | 085402901dafe9b169783d7ad2b30f95284c9a98 | [
"MIT"
] | null | null | null | import argparse
import torch
import time
from torch import multiprocessing as mp
import captioning.utils.opts as opts
from distributed_utils import setup, cleanup
from train_distributed import train
def run():
opt = opts.parse_opt()
# mp.spawn(train, args=(opt, opt.world_size), nprocs=1, join=True)
now = time.time()
train(opt)
duration = time.time() - now
print(f"Duration: {duration} sec(s).")
if __name__ == "__main__":
run()
| 20.695652 | 70 | 0.682773 |
d74f5c890d947990b760a9958b529d98e0c9191f | 998 | py | Python | gopatch/packages/urls.py | porala/python | 41213189a9b35b5b8c40c048f4d6cd3f8e5f25f4 | [
"DOC"
] | 1 | 2020-01-15T11:04:16.000Z | 2020-01-15T11:04:16.000Z | gopatch/packages/urls.py | porala/python | 41213189a9b35b5b8c40c048f4d6cd3f8e5f25f4 | [
"DOC"
] | 2 | 2021-03-31T19:36:19.000Z | 2021-06-10T22:29:26.000Z | gopatch/packages/urls.py | porala/python | 41213189a9b35b5b8c40c048f4d6cd3f8e5f25f4 | [
"DOC"
] | null | null | null | # Copyright 2012 VPAC, http://www.vpac.org
# Copyright 2013-2016 Marcus Furlong <furlongm@gmail.com>
#
# This file is part of Patchman.
#
# Patchman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 only.
#
# Patchman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Patchman. If not, see <http://www.gnu.org/licenses/>
from __future__ import unicode_literals
from django.conf.urls import url
from packages import views
app_name = 'packages'
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^(?P<packagename>[_+-.\w]+)/$', views.package_detail,
name='package_detail'),
]
| 32.193548 | 70 | 0.741483 |
b3f8a077b8e6766b6f0d083703698f48f2e1e6f9 | 5,849 | py | Python | saminda/cipres-airavata/sdk/scripts/remote_resource/trestles/gordon_submit_v2.py | SciGaP/DEPRECATED-Cipres-Airavata-POC | a2f3dce75ce6c8e23c8cdde14ece708b5a0ad4ab | [
"Apache-2.0"
] | null | null | null | saminda/cipres-airavata/sdk/scripts/remote_resource/trestles/gordon_submit_v2.py | SciGaP/DEPRECATED-Cipres-Airavata-POC | a2f3dce75ce6c8e23c8cdde14ece708b5a0ad4ab | [
"Apache-2.0"
] | null | null | null | saminda/cipres-airavata/sdk/scripts/remote_resource/trestles/gordon_submit_v2.py | SciGaP/DEPRECATED-Cipres-Airavata-POC | a2f3dce75ce6c8e23c8cdde14ece708b5a0ad4ab | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import gordon_lib_v2 as lib
import sys
import os
def main(argv=None):
"""
Usage is:
submit.py [--account <chargecode>] [--url <url>] -- <commandline>
Run from the working dir of the job which must contain (in addition
to the job files) a file named scheduler.conf with scheduler properties for the job.
<chargecode>, if present, gives the project to charge the job to.
Url is the url of the submitting website including the taskid parameter.
Returns 0 with "jobid=<jobid>" on stdout if job submitted ok
Returns 1 with multiline error message on stdout if error.
Returns 2 for the specific error of queue limit exceeded.
"""
#COMMAND LINE PARSING
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--account', metavar="ACCOUNT", type=str, default=lib.account,
help="The account string to use when submitting jobs. Default is read from config files.")
parser.add_argument('--url', metavar="URL", dest="URL", type=str,
help="Notification URL")
try:
cmdline_options, cmdline = parser.parse_known_args(argv)
cmdline = cmdline[1:] if not ('--' in cmdline) else cmdline[cmdline.index('--')+1:]
except Exception as e:
print "There was a problem submitting your job"
print e
sys.exit(1)
account = cmdline_options.account
url = cmdline_options.URL
#cmdline as an array (and already set)
tooltype = lib.getToolType(cmdline)
# On gordon for the time being we need to ignore the chargecode
account = lib.account
scheduler_properties = lib.getProperties("scheduler.conf")
# print scheduler_properties
scheduler_info = lib.schedulerInfo(scheduler_properties, tooltype)
# print scheduler_info
# If this is a "direct" run type job we don't need to create a qsub script, we'll just run batch_ommand.cmdline.
if scheduler_info["is_direct"]:
return lib.submitDirectJob(account, url, lib.email, lib.jobname, cmdline)
runtime = int(scheduler_info["runtime"])
useLocalDisk = False
"""
Workaround for problems with file io on oasis and longer mrbayes runs. Instead of running on
oasis, we'll copy the working dir to the compute nodes local storage and copy the results back
when the job completes. Since many mrbayes jobs timeout we need a special trick to copy results
of jobs that timeout: Right before we launch mrbayes we launch a shell script in the background
that sleeps a few min less than the job's runtime and then copies the results. If mrbayes terminates
normally the background sleep is killed automatically.
"""
if (tooltype == "mrbayes" and runtime > 60):
useLocalDisk = True
# I'm backing out the workaround by setting useLocalDisk to false.
useLocalDisk = False
# Write the command line to a file, batch_command.cmdline.
rfile = open(lib.cmdfile, "w")
# On Gordon we have to use bash, not sh
rfile.write("#!/bin/bash\n")
rfile.writelines((" ".join(cmdline), "\n"))
rfile.close()
os.chmod(lib.cmdfile, 0744);
# Create the qsub script
rfile = open(lib.runfile, "w")
# On Gordon we have to use bash, not sh
text = """#!/bin/bash
#PBS -q %s
#PBS -N %s
#PBS -l walltime=00:%d:00
#PBS -o scheduler_stdout.txt
#PBS -e scheduler_stderr.txt
#PBS -W umask=0007
##PBS -V
#PBS -v QOS=%d
#PBS -M %s
#PBS -m ae
#PBS -A %s
""" % (scheduler_info["queue"], lib.jobname, scheduler_info["runtime"], scheduler_info["qos"], lib.email, account)
rfile.write(text)
text = "#PBS -l nodes=%d:ppn=%d:native\n" % (scheduler_info["nodes"], scheduler_info["ppn"])
rfile.write(text)
rfile.write("cd %s\n" % (lib.jobdir, lib.local_jobdir)[useLocalDisk])
if useLocalDisk == True:
# Note that it's critical that newlines in the text string are all within the double
# quotes; otherwise the echo command line would be split across lines and make no sense.
text = """"Due to filesystem problems intermediate results for longer mrbayes runs
will not be available while the job is running. The result files will be
available when mrbayes finishes.
We're working to find a solution." """
rfile.write("echo %s > %s/INTERMEDIATE_RESULTS_README.TXT\n" % (text, lib.jobdir))
rfile.write("cp -r %s/* .\n" % lib.jobdir);
sleepTime = int(scheduler_info["runtime"]) - 10
rfile.write("sleep_cp.sh %s %s &\n" % (sleepTime, lib.jobdir))
text = """
source /etc/profile.d/modules.sh
export MODULEPATH=/home/diag/jpg/modulefiles/gordon/applications:$MODULEPATH
echo Job starting at `date` > start.txt
curl %s\&status=START
export CIPRES_THREADSPP=%d
export CIPRES_NP=%d
%s 1>stdout.txt 2>stderr.txt
echo Job finished at `date` > done.txt
qstat -f $PBS_JOBID | grep Job
qstat -f $PBS_JOBID | grep resources
""" % (url,
int(scheduler_info["threads_per_process"]),
int(scheduler_info["mpi_processes"]),
lib.cmdfile)
rfile.write(text)
if (useLocalDisk):
text = """
echo "Job completed, starting to copy working directory."
echo "mkdir %s.complete"
mkdir %s.complete
echo "cp -r * %s.complete"
cp -r * %s.complete
echo "mv %s %s.sleep"
mv %s %s.sleep
echo "mv %s.complete %s"
mv %s.complete %s
echo "rm -rf %s.sleep"
rm -rf %s.sleep
echo "Finished copying working directory."
""" % (lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir)
rfile.write(text)
rfile.write("curl %s\&status=DONE\n" % url)
rfile.close()
return lib.submitJob()
return 0
if __name__ == "__main__":
sys.exit(main())
| 32.675978 | 174 | 0.672252 |
e9a7c425c6d96042020e87b132d5731f717cffe6 | 6,532 | py | Python | spinup/my_env/rtd/grid_mdp_v1.py | muzi2018/DRL_RTD | 518100c4c48f66fd38ef9877f3e4903c9cc2a237 | [
"MIT"
] | null | null | null | spinup/my_env/rtd/grid_mdp_v1.py | muzi2018/DRL_RTD | 518100c4c48f66fd38ef9877f3e4903c9cc2a237 | [
"MIT"
] | null | null | null | spinup/my_env/rtd/grid_mdp_v1.py | muzi2018/DRL_RTD | 518100c4c48f66fd38ef9877f3e4903c9cc2a237 | [
"MIT"
] | null | null | null | import logging
import random
import gym
logger = logging.getLogger(__name__)
class GridEnv1(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 2
}
def __init__(self):
self.states = range(1,17) #状态空间
self.x=[150,250,350,450] * 4
self.y=[450] * 4 + [350] * 4 + [250] * 40 + [150] * 4
self.terminate_states = dict() #终止状态为字典格式
self.terminate_states[11] = 1
self.terminate_states[12] = 1
self.terminate_states[15] = 1
self.actions = ['n','e','s','w']
self.rewards = dict(); #回报的数据结构为字典
self.rewards['8_s'] = -1.0
self.rewards['13_w'] = -1.0
self.rewards['7_s'] = -1.0
self.rewards['10_e'] = -1.0
self.rewards['14_4'] = 1.0
self.t = dict(); #状态转移的数据格式为字典
self.t['1_s'] = 5
self.t['1_e'] = 2
self.t['2_w'] = 1
self.t['2_e'] = 3
self.t['3_s'] = 6
self.t['3_w'] = 2
self.t['3_e'] = 4
self.t['4_w'] = 3
self.t['4_s'] = 7
self.t['5_s'] = 8
self.t['6_n'] = 3
self.t['6_s'] = 10
self.t['6_e'] = 7
self.t['7_w'] = 6
self.t['7_n'] = 4
self.t['7_s'] = 11
self.t['8_n'] = 5
self.t['8_e'] = 9
self.t['8_s'] = 12
self.t['9_w'] = 8
self.t['9_e'] = 10
self.t['9_s'] = 13
self.t['10_w'] = 9
self.t['10_n'] = 6
self.t['10_e'] = 11
self.t['10_s'] = 14
self.t['10_w'] = 9
self.t['13_n'] = 9
self.t['13_e'] = 14
self.t['13_w'] = 12
self.t['14_n'] = 10
self.t['14_e'] = 15
self.t['14_w'] = 13
self.gamma = 0.8 #折扣因子
self.viewer = None
self.state = None
def _seed(self, seed=None):
self.np_random, seed = random.seeding.np_random(seed)
return [seed]
def getTerminal(self):
return self.terminate_states
def getGamma(self):
return self.gamma
def getStates(self):
return self.states
def getAction(self):
return self.actions
def getTerminate_states(self):
return self.terminate_states
def setAction(self,s):
self.state=s
def step(self, action):
#系统当前状态
state = self.state
if state in self.terminate_states:
return state, 0, True, {}
key = "%d_%s"%(state, action) #将状态和动作组成字典的键值
#状态转移
if key in self.t:
next_state = self.t[key]
else:
next_state = state
self.state = next_state
is_terminal = False
if next_state in self.terminate_states:
is_terminal = True
if key not in self.rewards:
r = 0.0
else:
r = self.rewards[key]
return next_state, r, is_terminal,{}
def reset(self):
self.state = self.states[int(random.random() * len(self.states))]
return self.state
def render(self, mode='human'):
from gym.envs.classic_control import rendering
screen_width = 600
screen_height = 600
if self.viewer is None:
self.viewer = rendering.Viewer(screen_width, screen_height)
#创建网格世界
self.line1 = rendering.Line((100,100),(500,100))
self.line2 = rendering.Line((100, 200), (500, 200))
self.line3 = rendering.Line((100, 300), (500, 300))
self.line4 = rendering.Line((100, 400), (500, 400))
self.line5 = rendering.Line((100, 500), (500, 500))
self.line6 = rendering.Line((100, 100), (100, 500))
self.line7 = rendering.Line((200, 100), (200, 500))
self.line8 = rendering.Line((300, 100), (300, 500))
self.line9 = rendering.Line((400, 100), (400, 500))
self.line10 = rendering.Line((500, 100), (500, 500))
#创建石柱
self.shizhu = rendering.make_circle(40)
self.circletrans = rendering.Transform(translation=(250,350))
self.shizhu.add_attr(self.circletrans)
self.shizhu.set_color(0.8,0.6,0.4)
#创建第一个火坑
self.fire1 = rendering.make_circle(40)
self.circletrans = rendering.Transform(translation=(450, 250))
self.fire1.add_attr(self.circletrans)
self.fire1.set_color(1, 0, 0)
#创建第二个火坑
self.fire2 = rendering.make_circle(40)
self.circletrans = rendering.Transform(translation=(150, 150))
self.fire2.add_attr(self.circletrans)
self.fire2.set_color(1, 0, 0)
#创建宝石
self.diamond = rendering.make_circle(40)
self.circletrans = rendering.Transform(translation=(450, 150))
self.diamond.add_attr(self.circletrans)
self.diamond.set_color(0, 0, 1)
#创建机器人
self.robot= rendering.make_circle(30)
self.robotrans = rendering.Transform()
self.robot.add_attr(self.robotrans)
self.robot.set_color(0, 1, 0)
self.line1.set_color(0, 0, 0)
self.line2.set_color(0, 0, 0)
self.line3.set_color(0, 0, 0)
self.line4.set_color(0, 0, 0)
self.line5.set_color(0, 0, 0)
self.line6.set_color(0, 0, 0)
self.line7.set_color(0, 0, 0)
self.line8.set_color(0, 0, 0)
self.line9.set_color(0, 0, 0)
self.line10.set_color(0, 0, 0)
self.viewer.add_geom(self.line1)
self.viewer.add_geom(self.line2)
self.viewer.add_geom(self.line3)
self.viewer.add_geom(self.line4)
self.viewer.add_geom(self.line5)
self.viewer.add_geom(self.line6)
self.viewer.add_geom(self.line7)
self.viewer.add_geom(self.line8)
self.viewer.add_geom(self.line9)
self.viewer.add_geom(self.line10)
self.viewer.add_geom(self.shizhu)
self.viewer.add_geom(self.fire1)
self.viewer.add_geom(self.fire2)
self.viewer.add_geom(self.diamond)
self.viewer.add_geom(self.robot)
if self.state is None:
return None
self.robotrans.set_translation(self.x[self.state-1], self.y[self.state- 1])
return self.viewer.render(return_rgb_array=mode == 'rgb_array')
def close(self):
if self.viewer:
self.viewer.close()
| 30.523364 | 83 | 0.535977 |
2f9d794d55aee981a0585b3d8653978dfe907f2e | 3,296 | py | Python | lifelib/projects/solvency2/model/PV/__init__.py | fumitoh/lifelib | 01b6fec4453b309808c1c7ca6867c7dce50668dc | [
"MIT"
] | 77 | 2018-03-02T05:21:43.000Z | 2022-03-26T20:29:59.000Z | lifelib/projects/solvency2/model/PV/__init__.py | dayeoni-1376/lifelib | e65ba42843e8ae5f00ea795a8bb29ccd6e99ba54 | [
"MIT"
] | 10 | 2018-02-17T03:07:20.000Z | 2021-11-15T13:40:15.000Z | lifelib/projects/solvency2/model/PV/__init__.py | dayeoni-1376/lifelib | e65ba42843e8ae5f00ea795a8bb29ccd6e99ba54 | [
"MIT"
] | 24 | 2018-03-12T20:01:06.000Z | 2022-03-07T06:06:18.000Z | """Present Value mix-in Space
This Space serves as a base Space for :mod:`~model.simplelife.Projection`
Space, and it contains Cells to take the present value of projected cashflows.
.. blockdiag::
blockdiag {
default_node_color="#D5E8D4";
default_linecolor="#628E47";
BaseProj[style=dotted]
BaseProj <- OuterProj [hstyle=generalization]
PresentValue[style=dotted]
PresentValue <- OuterProj [hstyle=generalization];
}
"""
from modelx.serialize.jsonvalues import *
_formula = None
_bases = []
_allow_none = None
_spaces = []
# ---------------------------------------------------------------------------
# Cells
def InterestNetCF(t):
"""Interest accreted on pv of net cashflows"""
if t > last_t:
return 0
else:
return (PV_NetCashflow(t)
- PremIncome(t)
+ ExpsTotal(t)) * DiscRate(t)
def PV_BenefitDeath(t):
"""Present value of death benefits"""
if t > last_t:
return 0
else:
return (-BenefitDeath(t) + PV_BenefitDeath(t+1)) / (1 + DiscRate(t))
def PV_BenefitMat(t):
"""Present value of matuirty benefits"""
if t > last_t:
return 0
else:
return (-BenefitMat(t) + PV_BenefitMat(t+1)) / (1 + DiscRate(t))
def PV_BenefitSurr(t):
"""Present value of surrender benefits"""
if t > last_t:
return 0
else:
return (-BenefitSurr(t) + PV_BenefitSurr(t+1)) / (1 + DiscRate(t))
def PV_BenefitTotal(t):
"""Present value of total benefits"""
if t > last_t:
return 0
else:
return (-BenefitTotal(t) + PV_BenefitTotal(t+1)) / (1 + DiscRate(t))
def PV_Check(t):
return PV_NetCashflow(t) - PV_NetCashflowForCheck(t)
def PV_ExpsAcq(t):
"""Present value of acquisition expenses"""
if t > last_t:
return 0
else:
return - ExpsAcq(t) + PV_ExpsAcq(t+1) / (1 + DiscRate(t))
def PV_ExpsCommTotal(t):
"""Present value of commission expenses"""
if t > last_t:
return 0
else:
return - ExpsCommTotal(t) + PV_ExpsCommTotal(t+1) / (1 + DiscRate(t))
def PV_ExpsMaint(t):
"""Present value of maintenance expenses"""
if t > last_t:
return 0
else:
return - ExpsMaint(t) + PV_ExpsMaint(t+1) / (1 + DiscRate(t))
def PV_ExpsTotal(t):
"""Present value of total expenses"""
if t > last_t:
return 0
else:
return - ExpsTotal(t) + PV_ExpsTotal(t+1) / (1 + DiscRate(t))
def PV_NetCashflow(t):
"""Present value of net cashflow"""
return (PV_PremIncome(t)
+ PV_ExpsTotal(t)
+ PV_BenefitTotal(t))
def PV_NetCashflowForCheck(t):
"""Present value of net cashflow"""
if t > last_t:
return 0
else:
return (PremIncome(t)
- ExpsTotal(t)
- BenefitTotal(t) / (1 + DiscRate(t))
+ PV_NetCashflow(t+1) / (1 + DiscRate(t)))
def PV_PremIncome(t):
"""Present value of premium income"""
if t > last_t:
return 0
else:
return PremIncome(t) + PV_PremIncome(t+1) / (1 + DiscRate(t))
def PV_SumInsurIF(t):
"""Present value of insurance in-force"""
if t > last_t:
return 0
else:
return InsurIF_Beg1(t) + PV_SumInsurIF(t+1) / (1 + DiscRate(t))
| 22.888889 | 78 | 0.583738 |
0f8ed425472f6cd48fcd9945072e112ebb52c762 | 1,001 | py | Python | d2lbook/rst_test.py | AnirudhDagar/d2l-book | c168ec5b4a1a8b440f8bf79bb2a588d3c28c40f5 | [
"Apache-2.0"
] | 115 | 2019-04-20T01:10:40.000Z | 2022-03-23T21:19:53.000Z | d2lbook/rst_test.py | AnirudhDagar/d2l-book | c168ec5b4a1a8b440f8bf79bb2a588d3c28c40f5 | [
"Apache-2.0"
] | 21 | 2019-07-26T12:35:48.000Z | 2022-03-31T08:39:32.000Z | d2lbook/rst_test.py | AnirudhDagar/d2l-book | c168ec5b4a1a8b440f8bf79bb2a588d3c28c40f5 | [
"Apache-2.0"
] | 63 | 2019-04-20T17:12:03.000Z | 2022-03-14T15:01:36.000Z | from d2lbook import notebook
from d2lbook import rst
import unittest
import nbconvert
_markdown_src = r'''
# Test
:label:`test`
first para
python is good
another para
This is :eqref:`sec_1`
```python2
1+2+3
```
python3 is better
- here
- haha
```{.input .python}
1+2+3
```
```{.input .python}
#@tab python2
1+2+3
```
```bash
````
aa
````
```
## Section 2
:label:`sec_2`
```eval_rst
.. only:: html
Table of Contents
-----------------
```
```toc
:numbered:
:maxdepth: 2
install
user/index
develop/index
```

:width:`400px`
$x=1$, :numref:`sec_2`
'''
class TestRst(unittest.TestCase):
# TODO(mli) add some asserts
def test_convert_notebook(self):
nb = notebook.read_markdown(_markdown_src)
body, _ = rst.convert_notebook(nb, {})
lines = body.split('\n')
for l in lines:
if l.startswith(':math:`x=1`'):
self.assertEqual(l, ':math:`x=1`, :numref:`sec_2`')
| 12.5125 | 67 | 0.6004 |
a21f5d252f714a953a47e269efe996a05bd36d5e | 23,404 | py | Python | typeclasses/accounts.py | dvoraen/arxcode | a89f2004fca10e0b1f1995c2420dd3ffbf08774b | [
"MIT"
] | 42 | 2018-08-12T00:55:24.000Z | 2021-12-24T15:16:08.000Z | typeclasses/accounts.py | dvoraen/arxcode | a89f2004fca10e0b1f1995c2420dd3ffbf08774b | [
"MIT"
] | 312 | 2018-10-22T23:03:27.000Z | 2022-02-06T13:02:58.000Z | typeclasses/accounts.py | dvoraen/arxcode | a89f2004fca10e0b1f1995c2420dd3ffbf08774b | [
"MIT"
] | 42 | 2018-08-12T00:41:48.000Z | 2022-01-27T14:03:16.000Z | """
Player
The Player represents the game "account" and each login has only one
Player object. A Player is what chats on default channels but has no
other in-game-world existance. Rather the Player puppets Objects (such
as Characters) in order to actually participate in the game world.
Guest
Guest players are simple low-level accounts that are created/deleted
on the fly and allows users to test the game without the committment
of a full registration. Guest accounts are deactivated by default; to
activate them, add the following line to your settings file:
GUEST_ENABLED = True
You will also need to modify the connection screen to reflect the
possibility to connect with a guest account. The setting file accepts
several more options for customizing the Guest account system.
"""
from evennia import DefaultAccount
from typeclasses.mixins import MsgMixins, InformMixin
from web.character.models import PlayerSiteEntry
_MUDINFO_CHANNEL = None
class Account(InformMixin, MsgMixins, DefaultAccount):
"""
This class describes the actual OOC player (i.e. the user connecting
to the MUD). It does NOT have visual appearance in the game world (that
is handled by the character which is connected to this). Comm channels
are attended/joined using this object.
It can be useful e.g. for storing configuration options for your game, but
should generally not hold any character-related info (that's best handled
on the character level).
Can be set using BASE_PLAYER_TYPECLASS.
* available properties
key (string) - name of player
name (string)- wrapper for user.username
aliases (list of strings) - aliases to the object. Will be saved to database as AliasDB entries
but returned as strings.
dbref (int, read-only) - unique #id-number. Also "id" can be used.
date_created (string) - time stamp of object creation
permissions (list of strings) - list of permission strings
user (User, read-only) - django User authorization object
obj (Object) - game object controlled by player. 'character' can also be used.
sessions (list of Sessions) - sessions connected to this player
is_superuser (bool, read-only) - if the connected user is a superuser
* Handlers
locks - lock-handler: use locks.add() to add new lock strings
db - attribute-handler: store/retrieve database attributes on this self.db.myattr=val, val=self.db.myattr
ndb - non-persistent attribute handler: same as db but does not create a database entry when storing data
scripts - script-handler. Add new scripts to object with scripts.add()
cmdset - cmdset-handler. Use cmdset.add() to add new cmdsets to object
nicks - nick-handler. New nicks with nicks.add().
* Helper methods
msg(text=None, **kwargs)
swap_character(new_character, delete_old_character=False)
execute_cmd(raw_string, session=None)
search(ostring, global_search=False, attribute_name=None, use_nicks=False, location=None, ignore_errors=False,
player=False)
is_typeclass(typeclass, exact=False)
swap_typeclass(new_typeclass, clean_attributes=False, no_default=True)
access(accessing_obj, access_type='read', default=False)
check_permstring(permstring)
* Hook methods (when re-implementation, remember methods need to have self as first arg)
basetype_setup()
at_player_creation()
- note that the following hooks are also found on Objects and are
usually handled on the character level:
at_init()
at_cmdset_get(**kwargs)
at_first_login()
at_post_login(session=None)
at_disconnect()
at_message_receive()
at_message_send()
at_server_reload()
at_server_shutdown()
"""
def __str__(self):
return self.name
def at_account_creation(self):
"""
This is called once, the very first time
the player is created (i.e. first time they
register with the game). It's a good place
to store attributes all players should have,
like configuration values etc.
"""
# set an (empty) attribute holding the characters this player has
lockstring = (
"attrread:perm(Wizards);attredit:perm(Wizards);attrcreate:perm(Wizards)"
)
self.attributes.add("_playable_characters", [], lockstring=lockstring)
self.db.mails = []
self.db.readmails = set()
# noinspection PyBroadException
def at_post_login(self, session=None):
"""
Called at the end of the login process, just before letting
them loose. This is called before an eventual Character's
at_post_login hook.
:type self: AccountDB
:type session: Session
"""
self.db._last_puppet = self.char_ob or self.db._last_puppet
super(Account, self).at_post_login(session)
if self.tags.get("new_mail"):
self.msg("{y*** You have new mail. ***{n")
self.announce_informs()
pending = self.db.pending_messages or []
for msg in pending:
self.msg(msg, options={"box": True})
self.attributes.remove("pending_messages")
if self.assigned_to.filter(status=1, priority__lte=5):
self.msg(
"{yYou have unresolved tickets assigned to you. Use @job/mine to view them.{n"
)
self.check_motd()
self.check_petitions()
# in this mode we should have only one character available. We
# try to auto-connect to it by calling the @ic command
# (this relies on player.db._last_puppet being set)
self.execute_cmd("@bbsub/quiet story updates")
address = self.sessions.all()[-1].address
if isinstance(address, tuple):
address = address[0]
PlayerSiteEntry.add_site_for_player(self.char_ob, address)
try:
from commands.base_commands.bboards import get_unread_posts
get_unread_posts(self)
except Exception:
pass
try:
if self.roster.frozen:
self.roster.frozen = False
self.roster.save()
if self.roster.roster.name == "Inactive":
from web.character.models import Roster
try:
active = Roster.objects.active
self.roster.roster = active
self.roster.save()
except Roster.DoesNotExist:
pass
watched_by = self.char_ob.db.watched_by or []
if self.sessions.count() == 1:
if not self.db.hide_from_watch:
for watcher in watched_by:
watcher.msg(
"{wA player you are watching, {c%s{w, has connected.{n"
% self
)
self.db.afk = ""
except AttributeError:
pass
# noinspection PyBroadException
def announce_informs(self):
"""Lets us know if we have unread informs"""
msg = ""
try:
unread = self.informs.filter(read_by__isnull=True).count()
if unread:
msg += (
"{w*** You have %s unread informs. Use @informs to read them. ***{n\n"
% unread
)
for org in self.current_orgs:
if not org.access(self, "informs"):
continue
unread = org.informs.exclude(read_by=self).count()
if unread:
msg += "{w*** You have %s unread informs for %s. ***{n\n" % (
unread,
org,
)
except Exception:
pass
if msg:
self.msg(msg)
def is_guest(self):
"""
Overload in guest object to return True
"""
return False
def at_first_login(self):
"""
Only called once, the very first
time the user logs in.
"""
self.execute_cmd("addcom pub=public")
pass
def mail(self, message, subject=None, sender=None, receivers=None):
"""
Sends a mail message to player.
"""
from django.utils import timezone
sentdate = timezone.now().strftime("%x %X")
mail = (sender, subject, message, sentdate, receivers)
if not self.db.mails:
self.db.mails = []
self.db.mails.append(mail)
if sender:
from_str = " from {c%s{y" % sender.capitalize()
else:
from_str = ""
self.msg(
"{yYou have new mail%s. Use {w'mail %s' {yto read it.{n"
% (from_str, len(self.db.mails))
)
self.tags.add("new_mail")
def get_fancy_name(self):
"""Ensures that our name is capitalized"""
return self.key.capitalize()
# noinspection PyAttributeOutsideInit
def set_name(self, value):
self.key = value
name = property(get_fancy_name, set_name)
def send_or_queue_msg(self, message):
"""Sends a message to us if we're online or queues it for later"""
if self.is_connected:
self.msg(message, options={"box": True})
return
pending = self.db.pending_messages or []
pending.append(message)
self.db.pending_messages = pending
def get_all_sessions(self):
"""Retrieves our connected sessions"""
return self.sessions.all()
@property
def public_orgs(self):
"""
Return public organizations we're in.
"""
try:
return self.Dominion.public_orgs
except AttributeError:
return []
@property
def current_orgs(self):
"""Returns our current organizations we're a member of"""
try:
return self.Dominion.current_orgs
except AttributeError:
return []
@property
def secret_orgs(self):
"""Returns any secret orgs we're a member of"""
try:
return self.Dominion.secret_orgs
except AttributeError:
return []
@property
def active_memberships(self):
"""Returns our active memberships"""
try:
return self.Dominion.memberships.filter(deguilded=False)
except AttributeError:
return []
@property
def assets(self):
"""Returns the holder for all our assets/prestige/etc"""
return self.Dominion.assets
def get_resource_amt(self, rtype) -> int:
"""Retrieves how much of a given resource this player has."""
try:
amt = getattr(self.assets, rtype)
except AttributeError:
return 0
else:
return amt
def pay_resources(self, rtype, amt):
"""
Attempt to pay resources. If we don't have enough,
return False.
"""
try:
assets = self.assets
current = getattr(assets, rtype)
if current < amt:
return False
setattr(assets, rtype, current - amt)
assets.save()
return True
except AttributeError:
return False
def gain_resources(self, rtype, amt):
"""
Attempt to gain resources. If something goes wrong, we return 0. We call pay_resources with a negative
amount, and if returns true, we return the amount to show what we gained.
"""
if self.pay_resources(rtype, -amt):
return amt
return 0
def get_material_amt(self, material_type) -> int:
"""Retrieves how much of a given material this player has."""
from django.core.exceptions import ObjectDoesNotExist
try:
material = self.assets.owned_materials.get(type=material_type)
except ObjectDoesNotExist:
return 0
else:
return material.amount
def pay_materials(self, material_type, amount):
"""
Attempts to pay materials of the given type and amount
Args:
material_type (CraftingMaterialType): Material type we're paying with
amount: amount we're spending
Returns:
False if we were able to spend, True otherwise
"""
from django.core.exceptions import ObjectDoesNotExist
assets = self.assets
try:
if amount < 0:
material, _ = assets.owned_materials.get_or_create(type=material_type)
else:
material = assets.owned_materials.get(type=material_type)
if material.amount < amount:
return False
material.amount -= amount
material.save()
return True
except ObjectDoesNotExist:
return False
def gain_materials(self, material_type, amount):
"""Similar to gain_resources, call pay_materials with negative amount to gain it"""
return self.pay_materials(material_type, -amount)
def pay_action_points(self, amt, can_go_over_cap=False):
"""
Attempt to pay action points. If we don't have enough,
return False.
"""
try:
if self.roster.action_points != self.char_ob.roster.action_points:
self.roster.refresh_from_db(fields=("action_points",))
self.char_ob.roster.refresh_from_db(fields=("action_points",))
if self.roster.action_points < amt:
return False
self.roster.action_points -= amt
if (
self.roster.action_points > self.roster.max_action_points
and not can_go_over_cap
):
self.roster.action_points = self.roster.max_action_points
self.roster.save()
if amt > 0:
verb = "use"
else:
verb = "gain"
amt = abs(amt)
self.msg(
"{wYou %s %s action points and have %s remaining this week.{n"
% (verb, amt, self.roster.action_points)
)
return True
except AttributeError:
return False
@property
def retainers(self):
"""Returns queryset of retainer agents"""
try:
return self.assets.agents.filter(unique=True)
except AttributeError:
return []
@property
def agents(self):
"""Returns queryset of any agents we own"""
try:
return self.assets.agents.all()
except AttributeError:
return []
def get_absolute_url(self):
"""Returns our absolute URL for the webpage for our character"""
try:
return self.char_ob.get_absolute_url()
except AttributeError:
pass
def at_post_disconnect(self):
"""After disconnection is complete, delete NAttributes."""
if not self.sessions.all():
watched_by = self.char_ob and self.char_ob.db.watched_by or []
if watched_by and not self.db.hide_from_watch:
for watcher in watched_by:
watcher.msg(
"{wA player you are watching, {c%s{w, has disconnected.{n"
% self.key.capitalize()
)
self.previous_log = self.current_log
self.current_log = []
self.db.lookingforrp = False
temp_muted = self.db.temp_mute_list or []
for channel in temp_muted:
channel.unmute(self)
self.attributes.remove("temp_mute_list")
try:
self.char_ob.nattributes.clear()
except AttributeError:
pass
self.nattributes.clear()
def log_message(self, from_obj, text):
"""Logs messages if we're not in private for this session"""
from evennia.utils.utils import make_iter
if not self.tags.get("private_mode"):
text = text.strip()
from_obj = make_iter(from_obj)[0]
tup = (from_obj, text)
if (
tup not in self.current_log
and from_obj != self
and from_obj != self.char_ob
):
self.current_log.append((from_obj, text))
@property
def current_log(self):
"""Temporary messages for this session"""
if self.ndb.current_log is None:
self.ndb.current_log = []
return self.ndb.current_log
@current_log.setter
def current_log(self, val):
self.ndb.current_log = val
@property
def previous_log(self):
"""Log of our past session"""
if self.db.previous_log is None:
self.db.previous_log = []
return self.db.previous_log
@previous_log.setter
def previous_log(self, val):
self.db.previous_log = val
@property
def flagged_log(self):
"""Messages flagged for GM notice"""
if self.db.flagged_log is None:
self.db.flagged_log = []
return self.db.flagged_log
@flagged_log.setter
def flagged_log(self, val):
self.db.flagged_log = val
def report_player(self, player):
"""Reports a player for GM attention"""
charob = player.char_ob
log = []
for line in list(self.previous_log) + list(self.current_log):
if line[0] == charob or line[0] == player:
log.append(line)
self.flagged_log = log
@property
def allow_list(self):
"""List of players allowed to interact with us"""
if self.db.allow_list is None:
self.db.allow_list = []
return self.db.allow_list
@property
def block_list(self):
"""List of players who should not be allowed to interact with us"""
if self.db.block_list is None:
self.db.block_list = []
return self.db.block_list
@property
def clues_shared_modifier_seed(self):
"""Seed value for clue sharing costs"""
from world.traits.models import Trait
seed = 0
pc = self.char_ob
for stat in Trait.get_valid_stat_names(Trait.SOCIAL):
seed += pc.traits.get_stat_value(stat)
# do not be nervous. I love you. <3
seed += sum(
[
pc.traits.get_skill_value(ob)
for ob in Trait.get_valid_skill_names(Trait.SOCIAL)
]
)
seed += pc.traits.get_skill_value("investigation") * 3
return seed
@property
def clue_cost(self):
"""Total cost for clues"""
return int(100.0 / float(self.clues_shared_modifier_seed + 1)) + 1
@property
def participated_actions(self):
"""Actions we participated in"""
from world.dominion.models import PlotAction
from django.db.models import Q
dompc = self.Dominion
return PlotAction.objects.filter(
Q(assistants=dompc) | Q(dompc=dompc)
).distinct()
@property
def past_participated_actions(self):
"""Actions we participated in previously"""
from world.dominion.models import PlotAction
return self.participated_actions.filter(status=PlotAction.PUBLISHED).distinct()
def show_online(self, caller, check_puppet=False):
"""
Checks if we're online and caller has privileges to see that
Args:
caller: Player checking if we're online
check_puppet: Whether to check if we're currently puppeting our character object
Returns:
True if they see us as online, False otherwise.
"""
if not self.char_ob:
return True
return self.char_ob.show_online(caller, check_puppet)
@property
def player_ob(self):
"""Maybe this should return self? Will need to think about that. Inherited from mixins"""
return self
@property
def char_ob(self):
"""Returns our character object if any"""
try:
return self.roster.character
except AttributeError:
pass
@property
def editable_theories(self):
"""Theories we have permission to edit"""
ids = [ob.theory.id for ob in self.theory_permissions.filter(can_edit=True)]
return self.known_theories.filter(id__in=ids)
@property
def past_actions(self):
"""Actions we created that have been finished in the past"""
return self.Dominion.past_actions
@property
def recent_actions(self):
"""Actions we created that have submitted recently"""
return self.Dominion.recent_actions
@property
def recent_assists(self):
"""Actions we assisted recently"""
return self.Dominion.recent_assists
def get_current_praises_and_condemns(self):
"""Current praises given by this player character"""
from server.utils.arx_utils import get_week
return self.Dominion.praises_given.filter(week=get_week())
def check_motd(self):
"""Checks for a message of the day and sends it to us."""
from evennia.server.models import ServerConfig
motd = ServerConfig.objects.conf(key="MESSAGE_OF_THE_DAY")
msg = ""
if motd:
msg += "|yServer Message of the Day:|n %s\n\n" % motd
for membership in self.active_memberships:
org = membership.organization
if not membership.has_seen_motd and org.motd:
msg += "|wMessage of the Day for %s:|n %s\n" % (org, org.motd)
membership.has_seen_motd = True
membership.save()
self.msg(msg)
def check_petitions(self):
"""Checks if we have any unread petition posts"""
try:
unread = self.Dominion.petitionparticipation_set.filter(unread_posts=True)
if unread:
unread_ids = [str(ob.petition.id) for ob in unread]
self.msg(
"{wThe following petitions have unread messages:{n %s"
% ", ".join(unread_ids)
)
except AttributeError:
pass
def _send_to_connect_channel(self, message):
"""
Helper method for loading and sending to the comm channel
dedicated to connection messages.
Args:
message (str): A message to send to the connect channel.
"""
from django.conf import settings
from evennia.utils import logger
from evennia.comms.models import ChannelDB
from django.utils import timezone
global _MUDINFO_CHANNEL
if not _MUDINFO_CHANNEL:
try:
_MUDINFO_CHANNEL = ChannelDB.objects.filter(
db_key=settings.CHANNEL_MUDINFO["key"]
)[0]
except Exception:
logger.log_trace()
now = timezone.now()
now = "%02i-%02i-%02i(%02i:%02i)" % (
now.year,
now.month,
now.day,
now.hour,
now.minute,
)
if _MUDINFO_CHANNEL:
_MUDINFO_CHANNEL.tempmsg(f"[{_MUDINFO_CHANNEL.key}, {now}]: {message}")
else:
logger.log_info(f"[{now}]: {message}")
| 33.820809 | 115 | 0.596009 |
2da3a022d1cb591baa7975a8c0ce369b245c0eed | 2,338 | py | Python | app/consumer/consumer.py | guhuajun/rabbitmq-ha-demo | 4fd31d9eaf769c7179360c268c3888af28801219 | [
"MIT"
] | null | null | null | app/consumer/consumer.py | guhuajun/rabbitmq-ha-demo | 4fd31d9eaf769c7179360c268c3888af28801219 | [
"MIT"
] | null | null | null | app/consumer/consumer.py | guhuajun/rabbitmq-ha-demo | 4fd31d9eaf769c7179360c268c3888af28801219 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# pylint: disable=
# Ref: https://pika.readthedocs.io/en/stable/examples/blocking_consume_recover_multiple_hosts.html
import re
import random
import os
import logging
import time
from datetime import datetime
import pika
from retry import retry
if __name__ == "__main__":
# change logging config
logging.basicConfig(level=logging.INFO,
format='[%(asctime)s.%(msecs)03d][%(filename)s:%(lineno)d][%(levelname)s]%(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger(__file__)
# Assuming there are two hosts: rabbitmq2, and rabbitmq3
node2 = pika.URLParameters('amqp://172.24.0.11')
node3 = pika.URLParameters('amqp://172.24.0.12')
all_endpoints = [node2, node3]
def on_message(ch, method, properties, body):
seq_num = int(re.findall('\d+', str(body))[0])
if seq_num % 500 == 0:
logger.info('Consumed %s', str(body))
# insert a randon delay when acking messages
# https://stackoverflow.com/questions/22061082/getting-pika-exceptions-connectionclosed-error-while-using-rabbitmq-in-python
# delay_seconds = random.randint(1, 3) * 0.1
# connection.sleep(delay_seconds)
ch.basic_ack(delivery_tag=method.delivery_tag)
@retry(pika.exceptions.AMQPConnectionError, delay=10, jitter=(1, 3))
def consume():
random.shuffle(all_endpoints)
connection = pika.BlockingConnection(all_endpoints)
channel = connection.channel()
channel.basic_qos(prefetch_count=1)
# This queue is intentionally non-durable. See http://www.rabbitmq.com/ha.html#non-mirrored-queue-behavior-on-node-failure
# to learn more.
channel.queue_declare('test')
channel.basic_consume('test', on_message)
try:
channel.start_consuming()
except KeyboardInterrupt:
channel.stop_consuming()
connection.close()
except pika.exceptions.ConnectionClosedByBroker:
# Uncomment this to make the example not attempt recovery
# from server-initiated connection closure, including
# when the node is stopped cleanly
# except pika.exceptions.ConnectionClosedByBroker:
# pass
pass
consume()
| 34.382353 | 132 | 0.652267 |
a6019e9806260fce7b6c50516d7de9e96e238718 | 4,568 | py | Python | patchsim_experiment/new_exp/91_exp/pylib/tracer.py | poracle100/poracle-experiments | 2582e7a0b0380bac810d49a75eb33f7a0626d6d8 | [
"Apache-2.0"
] | null | null | null | patchsim_experiment/new_exp/91_exp/pylib/tracer.py | poracle100/poracle-experiments | 2582e7a0b0380bac810d49a75eb33f7a0626d6d8 | [
"Apache-2.0"
] | null | null | null | patchsim_experiment/new_exp/91_exp/pylib/tracer.py | poracle100/poracle-experiments | 2582e7a0b0380bac810d49a75eb33f7a0626d6d8 | [
"Apache-2.0"
] | 2 | 2021-03-29T08:13:34.000Z | 2021-04-23T08:51:18.000Z | import time
import os
from unidiff import PatchSet
btrace_home=os.path.abspath("./lib/btrace")
def extract_trace(src,tgt,start,end):
s=''
f=open(src)
for line in f:
if line.startswith('---'):
cur=line.strip().split(':')[1]
cur=int(cur)
if cur>=start and cur<=end:
s+=line
f.close()
f=open(tgt,'w')
f.write(s)
f.close()
def run(project,bugid,patch_no,tests,randoop_tests=[],tmp_tracefile='tmp_c'):
tmp_tracefile+=project+bugid+patch_no+'run_print_trace'
tmp_tracefile=os.path.join(os.getcwd(),tmp_tracefile)
w_buggy=project+str(bugid)+'b'
w_patched=w_buggy+'_'+patch_no
patchfile=os.path.join('../patches',patch_no)
patch = PatchSet.from_filename(patchfile)
source_file=patch[0].source_file
target_file=patch[0].target_file
line_no_list=[]
for hunki in range(len(patch[0])):
for i in range(len(patch[0][hunki])):
if not patch[0][hunki][i].is_context:
line_no_list.append(str(patch[0][hunki][i-1].source_line_no+1))
break
dir_path='../traces/'+w_patched
if(os.path.exists(tmp_tracefile)):
os.system('rm '+tmp_tracefile)
os.system('mkdir '+dir_path)
os.system('mkdir '+os.path.join(dir_path,'buggy'))
os.system('mkdir '+os.path.join(dir_path,'patched'))
os.system('mkdir '+os.path.join(dir_path,'buggy_e'))
os.system('mkdir '+os.path.join(dir_path,'patched_e'))
patch_info_file="fdsa.txt"
os.system("rm -rf "+patch_info_file)
os.system('make PatchInfo ARGS="'+os.path.join('../source/',source_file)+' '+patch_info_file+' '+','.join(line_no_list)+'" >/dev/null')
f=open(patch_info_file)
lines=f.readlines()
patched_class=lines[-1].strip()
patched_method,method_signature,start_line,end_line=lines[0].strip().split('\t')
f.close()
start_line=int(start_line)
end_line=int(end_line)
os.system('defects4j compile -w '+w_buggy)
os.system('defects4j compile -w '+w_patched)
f=open("%s/AllLines_pattern.java"%(btrace_home))
s=f.read()
f.close()
s=s.replace('__CLASS__NAME__',patched_class)
f=open("%s/AllLines.java"%(btrace_home),'w')
f.write(s)
f.close()
os.system("cd %s && ./btracec AllLines.java"%(btrace_home))
jvmargs=" -a -Djvmargs=\-javaagent:%s/btrace\-agent.jar=noserver,debug=true,scriptOutputFile=%s,script=%s/AllLines.class" % (btrace_home, tmp_tracefile, btrace_home)
for test in tests:
test=test.strip()
os.system('timeout 90 defects4j test -n -t '+test+' -w '+w_buggy+jvmargs)
if os.path.exists(tmp_tracefile):
extract_trace(tmp_tracefile,os.path.join(dir_path,'buggy_e','__'.join(test.split('::'))),start_line,end_line)
os.system('mv '+tmp_tracefile+' '+os.path.join(dir_path,'buggy','__'.join(test.split('::'))))
os.system('timeout 90 defects4j test -n -t '+test+' -w '+w_patched+jvmargs)
if os.path.exists(tmp_tracefile):
extract_trace(tmp_tracefile,os.path.join(dir_path,'patched_e','__'.join(test.split('::'))),start_line,end_line)
os.system('mv '+tmp_tracefile+' '+os.path.join(dir_path,'patched','__'.join(test.split('::'))))
cmpl_flag=True
testfile='../test_gen_randoop/'+project+'/randoop/'+bugid+'/'+project+'-'+bugid+'b-randoop.'+bugid+'.tar.bz2'
for Test_Case in randoop_tests:
test='Randoop.'+Test_Case.strip()
if(cmpl_flag):
os.system('timeout 90 defects4j test -s '+testfile+' -t '+Test_Case.strip()+' -w '+w_buggy+jvmargs)
else:
os.system('timeout 90 defects4j test -n -s '+testfile+' -t '+Test_Case.strip()+' -w '+w_buggy+jvmargs)
if os.path.exists(tmp_tracefile):
extract_trace(tmp_tracefile,os.path.join(dir_path,'buggy_e','__'.join(test.split('::'))),start_line,end_line)
os.system('mv '+tmp_tracefile+' '+os.path.join(dir_path,'buggy','__'.join(test.split('::'))))
if(cmpl_flag):
os.system('timeout 90 defects4j test -s '+testfile+' -t '+Test_Case.strip()+' -w '+w_patched+jvmargs)
else:
os.system('timeout 90 defects4j test -n -s '+testfile+' -t '+Test_Case.strip()+' -w '+w_patched+jvmargs)
if os.path.exists(tmp_tracefile):
extract_trace(tmp_tracefile,os.path.join(dir_path,'patched_e','__'.join(test.split('::'))),start_line,end_line)
os.system('mv '+tmp_tracefile+' '+os.path.join(dir_path,'patched','__'.join(test.split('::'))))
cmpl_flag=False
| 41.908257 | 169 | 0.635508 |
e566b81eb31b83ccd21721f17d0cf94440b9df6f | 6,831 | py | Python | utils.py | duxinkang/WS-DAN.PyTorch | 87779124f619ceeb445ddfb0246c8a22ff324db4 | [
"MIT"
] | 1 | 2021-12-14T01:05:19.000Z | 2021-12-14T01:05:19.000Z | utils.py | duxinkang/WS-DAN.PyTorch | 87779124f619ceeb445ddfb0246c8a22ff324db4 | [
"MIT"
] | null | null | null | utils.py | duxinkang/WS-DAN.PyTorch | 87779124f619ceeb445ddfb0246c8a22ff324db4 | [
"MIT"
] | null | null | null | """Utils
Created: Nov 11,2019 - Yuchong Gu
Revised: Dec 03,2019 - Yuchong Gu
"""
import torch
import random
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
##############################################
# Center Loss for Attention Regularization
##############################################
class CenterLoss(nn.Module):
def __init__(self):
super(CenterLoss, self).__init__()
self.l2_loss = nn.MSELoss(reduction='sum')
def forward(self, outputs, targets):
return self.l2_loss(outputs, targets) / outputs.size(0)
##################################
# Metric
##################################
class Metric(object):
pass
class AverageMeter(Metric):
def __init__(self, name='loss'):
self.name = name
self.reset()
def reset(self):
self.scores = 0.
self.total_num = 0.
def __call__(self, batch_score, sample_num=1):
self.scores += batch_score
self.total_num += sample_num
return self.scores / self.total_num
class TopKAccuracyMetric(Metric):
def __init__(self, topk=(1,)):
self.name = 'topk_accuracy'
self.topk = topk
self.maxk = max(topk)
self.reset()
def reset(self):
self.corrects = np.zeros(len(self.topk))
self.num_samples = 0.
def __call__(self, output, target):
"""Computes the precision@k for the specified values of k"""
self.num_samples += target.size(0)
_, pred = output.topk(self.maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
for i, k in enumerate(self.topk):
correct_k = correct[:k].view(-1).float().sum(0)
self.corrects[i] += correct_k.item()
return self.corrects * 100. / self.num_samples
##################################
# Callback
##################################
class Callback(object):
def __init__(self):
pass
def on_epoch_begin(self):
pass
def on_epoch_end(self, *args):
pass
class ModelCheckpoint(Callback):
def __init__(self, savepath, monitor='val_topk_accuracy', mode='max'):
self.savepath = savepath
self.monitor = monitor
self.mode = mode
self.reset()
super(ModelCheckpoint, self).__init__()
def reset(self):
if self.mode == 'max':
self.best_score = float('-inf')
else:
self.best_score = float('inf')
def set_best_score(self, score):
if isinstance(score, np.ndarray):
self.best_score = score[0]
else:
self.best_score = score
def on_epoch_begin(self):
pass
def on_epoch_end(self, logs, net, **kwargs):
current_score = logs[self.monitor]
if isinstance(current_score, np.ndarray):
current_score = current_score[0]
if (self.mode == 'max' and current_score > self.best_score) or \
(self.mode == 'min' and current_score < self.best_score):
self.best_score = current_score
if isinstance(net, torch.nn.DataParallel):
state_dict = net.module.state_dict()
else:
state_dict = net.state_dict()
for key in state_dict.keys():
state_dict[key] = state_dict[key].cpu()
if 'feature_center' in kwargs:
feature_center = kwargs['feature_center']
feature_center = feature_center.cpu()
torch.save({
'logs': logs,
'state_dict': state_dict,
'feature_center': feature_center}, self.savepath)
else:
torch.save({
'logs': logs,
'state_dict': state_dict}, self.savepath)
##################################
# augment function
##################################
def batch_augment(images, attention_map, mode='crop', theta=0.5, padding_ratio=0.1):
batches, _, imgH, imgW = images.size()
if mode == 'crop':
crop_images = []
for batch_index in range(batches):
atten_map = attention_map[batch_index:batch_index + 1]
if isinstance(theta, tuple):
theta_c = random.uniform(*theta) * atten_map.max()
else:
theta_c = theta * atten_map.max()
crop_mask = F.upsample_bilinear(atten_map, size=(imgH, imgW)) >= theta_c
nonzero_indices = torch.nonzero(crop_mask[0, 0, ...])
height_min = max(int(nonzero_indices[:, 0].min().item() - padding_ratio * imgH), 0)
height_max = min(int(nonzero_indices[:, 0].max().item() + padding_ratio * imgH), imgH)
width_min = max(int(nonzero_indices[:, 1].min().item() - padding_ratio * imgW), 0)
width_max = min(int(nonzero_indices[:, 1].max().item() + padding_ratio * imgW), imgW)
crop_images.append(
F.upsample_bilinear(images[batch_index:batch_index + 1, :, height_min:height_max, width_min:width_max],
size=(imgH, imgW)))
crop_images = torch.cat(crop_images, dim=0)
return crop_images
elif mode == 'drop':
drop_masks = []
for batch_index in range(batches):
atten_map = attention_map[batch_index:batch_index + 1]
if isinstance(theta, tuple):
theta_d = random.uniform(*theta) * atten_map.max()
else:
theta_d = theta * atten_map.max()
drop_masks.append(F.upsample_bilinear(atten_map, size=(imgH, imgW)) < theta_d)
drop_masks = torch.cat(drop_masks, dim=0)
drop_images = images * drop_masks.float()
return drop_images
else:
raise ValueError('Expected mode in [\'crop\', \'drop\'], but received unsupported augmentation method %s' % mode)
##################################
# transform in dataset
##################################
def get_transform(resize, phase='train'):
if phase == 'train':
return transforms.Compose([
transforms.Resize(size=(int(resize[0] / 0.875), int(resize[1] / 0.875))),
transforms.RandomCrop(resize),
transforms.RandomHorizontalFlip(0.5),
transforms.ColorJitter(brightness=0.126, saturation=0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
else:
return transforms.Compose([
transforms.Resize(size=(int(resize[0] / 0.875), int(resize[1] / 0.875))),
transforms.CenterCrop(resize),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
| 33.160194 | 121 | 0.556141 |
b4679889ff7ee1000ac255d7caaccac85ff1f4b0 | 442 | py | Python | flask_app.py | hbradleyiii/spa-base | ac79535d63778dc0793b1e2d7ecac5547927798d | [
"MIT"
] | null | null | null | flask_app.py | hbradleyiii/spa-base | ac79535d63778dc0793b1e2d7ecac5547927798d | [
"MIT"
] | null | null | null | flask_app.py | hbradleyiii/spa-base | ac79535d63778dc0793b1e2d7ecac5547927798d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
flask_app.py
~~~~~~~~~~~~~~
The app entry point for `flask` commands.
"""
from app import cli, create_app
from app.models import db, Email, User
app = create_app()
cli.init_app(app)
@app.shell_context_processor
def make_shell_context():
"""Shell context for the command line environment."""
return {
'db': db,
'session': db.session,
'Email': Email,
'User': User
}
| 17 | 57 | 0.604072 |
3ed8975aaf940fecd400cc9ead705172473b0467 | 7,565 | py | Python | tests/infer/mcmc/test_nuts.py | mashrikt/pyro | 144771db63d5f72e2fec6f5a6d866f1f153e7b80 | [
"MIT"
] | null | null | null | tests/infer/mcmc/test_nuts.py | mashrikt/pyro | 144771db63d5f72e2fec6f5a6d866f1f153e7b80 | [
"MIT"
] | null | null | null | tests/infer/mcmc/test_nuts.py | mashrikt/pyro | 144771db63d5f72e2fec6f5a6d866f1f153e7b80 | [
"MIT"
] | 1 | 2018-10-02T18:50:33.000Z | 2018-10-02T18:50:33.000Z | from __future__ import absolute_import, division, print_function
import logging
import os
import pytest
import torch
import pyro
import pyro.distributions as dist
from pyro.infer import EmpiricalMarginal
from pyro.infer.mcmc.mcmc import MCMC
from pyro.infer.mcmc.nuts import NUTS
from tests.common import assert_equal
from .test_hmc import TEST_CASES, TEST_IDS, T, rmse
logger = logging.getLogger(__name__)
T2 = T(*TEST_CASES[2].values)._replace(num_samples=800, warmup_steps=200)
TEST_CASES[2] = pytest.param(*T2, marks=pytest.mark.skipif(
'CI' in os.environ and os.environ['CI'] == 'true', reason='Slow test - skip on CI'))
T3 = T(*TEST_CASES[3].values)._replace(num_samples=1000, warmup_steps=200)
TEST_CASES[3] = pytest.param(*T3, marks=[
pytest.mark.skipif('CI' in os.environ and os.environ['CI'] == 'true',
reason='Slow test - skip on CI')]
)
@pytest.mark.parametrize(
'fixture, num_samples, warmup_steps, hmc_params, expected_means, expected_precs, mean_tol, std_tol',
TEST_CASES,
ids=TEST_IDS)
@pytest.mark.init(rng_seed=34)
@pytest.mark.disable_validation()
def test_nuts_conjugate_gaussian(fixture,
num_samples,
warmup_steps,
hmc_params,
expected_means,
expected_precs,
mean_tol,
std_tol):
pyro.get_param_store().clear()
nuts_kernel = NUTS(fixture.model, hmc_params['step_size'])
mcmc_run = MCMC(nuts_kernel, num_samples, warmup_steps).run(fixture.data)
for i in range(1, fixture.chain_len + 1):
param_name = 'loc_' + str(i)
marginal = EmpiricalMarginal(mcmc_run, sites=param_name)
latent_loc = marginal.mean
latent_std = marginal.variance.sqrt()
expected_mean = torch.ones(fixture.dim) * expected_means[i - 1]
expected_std = 1 / torch.sqrt(torch.ones(fixture.dim) * expected_precs[i - 1])
# Actual vs expected posterior means for the latents
logger.info('Posterior mean (actual) - {}'.format(param_name))
logger.info(latent_loc)
logger.info('Posterior mean (expected) - {}'.format(param_name))
logger.info(expected_mean)
assert_equal(rmse(latent_loc, expected_mean).item(), 0.0, prec=mean_tol)
# Actual vs expected posterior precisions for the latents
logger.info('Posterior std (actual) - {}'.format(param_name))
logger.info(latent_std)
logger.info('Posterior std (expected) - {}'.format(param_name))
logger.info(expected_std)
assert_equal(rmse(latent_std, expected_std).item(), 0.0, prec=std_tol)
def test_logistic_regression():
dim = 3
true_coefs = torch.arange(1, dim+1)
data = torch.randn(2000, dim)
labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample()
def model(data):
coefs_mean = torch.zeros(dim)
coefs = pyro.sample('beta', dist.Normal(coefs_mean, torch.ones(dim)))
y = pyro.sample('y', dist.Bernoulli(logits=(coefs * data).sum(-1)), obs=labels)
return y
nuts_kernel = NUTS(model, step_size=0.0855)
mcmc_run = MCMC(nuts_kernel, num_samples=500, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites='beta')
assert_equal(rmse(true_coefs, posterior.mean).item(), 0.0, prec=0.1)
def test_beta_bernoulli():
def model(data):
alpha = torch.tensor([1.1, 1.1])
beta = torch.tensor([1.1, 1.1])
p_latent = pyro.sample("p_latent", dist.Beta(alpha, beta))
pyro.sample("obs", dist.Bernoulli(p_latent), obs=data)
return p_latent
true_probs = torch.tensor([0.9, 0.1])
data = dist.Bernoulli(true_probs).sample(sample_shape=(torch.Size((1000,))))
nuts_kernel = NUTS(model, step_size=0.02)
mcmc_run = MCMC(nuts_kernel, num_samples=500, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
assert_equal(posterior.mean, true_probs, prec=0.02)
def test_gamma_normal():
def model(data):
rate = torch.tensor([1.0, 1.0])
concentration = torch.tensor([1.0, 1.0])
p_latent = pyro.sample('p_latent', dist.Gamma(rate, concentration))
pyro.sample("obs", dist.Normal(3, p_latent), obs=data)
return p_latent
true_std = torch.tensor([0.5, 2])
data = dist.Normal(3, true_std).sample(sample_shape=(torch.Size((2000,))))
nuts_kernel = NUTS(model, step_size=0.01)
mcmc_run = MCMC(nuts_kernel, num_samples=200, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
assert_equal(posterior.mean, true_std, prec=0.05)
def test_logistic_regression_with_dual_averaging():
dim = 3
true_coefs = torch.arange(1, dim+1)
data = torch.randn(2000, dim)
labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample()
def model(data):
coefs_mean = torch.zeros(dim)
coefs = pyro.sample('beta', dist.Normal(coefs_mean, torch.ones(dim)))
y = pyro.sample('y', dist.Bernoulli(logits=(coefs * data).sum(-1)), obs=labels)
return y
nuts_kernel = NUTS(model, adapt_step_size=True)
mcmc_run = MCMC(nuts_kernel, num_samples=500, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites='beta')
assert_equal(rmse(true_coefs, posterior.mean).item(), 0.0, prec=0.1)
def test_beta_bernoulli_with_dual_averaging():
def model(data):
alpha = torch.tensor([1.1, 1.1])
beta = torch.tensor([1.1, 1.1])
p_latent = pyro.sample("p_latent", dist.Beta(alpha, beta))
pyro.sample("obs", dist.Bernoulli(p_latent), obs=data)
return p_latent
true_probs = torch.tensor([0.9, 0.1])
data = dist.Bernoulli(true_probs).sample(sample_shape=(torch.Size((1000,))))
nuts_kernel = NUTS(model, adapt_step_size=True)
mcmc_run = MCMC(nuts_kernel, num_samples=500, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites="p_latent")
assert_equal(posterior.mean, true_probs, prec=0.03)
def test_dirichlet_categorical():
def model(data):
concentration = torch.tensor([1.0, 1.0, 1.0])
p_latent = pyro.sample('p_latent', dist.Dirichlet(concentration))
pyro.sample("obs", dist.Categorical(p_latent), obs=data)
return p_latent
true_probs = torch.tensor([0.1, 0.6, 0.3])
data = dist.Categorical(true_probs).sample(sample_shape=(torch.Size((2000,))))
nuts_kernel = NUTS(model, adapt_step_size=True)
mcmc_run = MCMC(nuts_kernel, num_samples=200, warmup_steps=100).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
assert_equal(posterior.mean, true_probs, prec=0.02)
def test_gamma_beta():
def model(data):
alpha_prior = pyro.sample('alpha', dist.Gamma(concentration=1., rate=1.))
beta_prior = pyro.sample('beta', dist.Gamma(concentration=1., rate=1.))
pyro.sample('x', dist.Beta(concentration1=alpha_prior, concentration0=beta_prior), obs=data)
true_alpha = torch.tensor(5.)
true_beta = torch.tensor(1.)
data = dist.Beta(concentration1=true_alpha, concentration0=true_beta).sample(torch.Size((5000,)))
nuts_kernel = NUTS(model, adapt_step_size=True)
mcmc_run = MCMC(nuts_kernel, num_samples=500, warmup_steps=200).run(data)
posterior = EmpiricalMarginal(mcmc_run, sites=['alpha', 'beta'])
assert_equal(posterior.mean, torch.stack([true_alpha, true_beta]), prec=0.05)
| 41.565934 | 104 | 0.669795 |
7d8fecaa80a91efc168185ee6eca487ee376086c | 291 | py | Python | bookContent/bookContent/pipelines.py | hello-chenchen/ebook_scrapy | c64e845c8054e60ecfedeb097b4024bcd575cd1b | [
"MIT"
] | null | null | null | bookContent/bookContent/pipelines.py | hello-chenchen/ebook_scrapy | c64e845c8054e60ecfedeb097b4024bcd575cd1b | [
"MIT"
] | null | null | null | bookContent/bookContent/pipelines.py | hello-chenchen/ebook_scrapy | c64e845c8054e60ecfedeb097b4024bcd575cd1b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class BookcontentPipeline(object):
def process_item(self, item, spider):
return item
| 24.25 | 65 | 0.714777 |
3921a3534e66b4c61a3f9b63a505d607bf74c412 | 6,028 | py | Python | changes/api/project_commit_index.py | bowlofstew/changes | ebd393520e0fdb07c240a8d4e8747281b6186e28 | [
"Apache-2.0"
] | null | null | null | changes/api/project_commit_index.py | bowlofstew/changes | ebd393520e0fdb07c240a8d4e8747281b6186e28 | [
"Apache-2.0"
] | null | null | null | changes/api/project_commit_index.py | bowlofstew/changes | ebd393520e0fdb07c240a8d4e8747281b6186e28 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import, division, unicode_literals
import itertools
from collections import defaultdict
from flask.ext.restful import reqparse
from sqlalchemy.orm import joinedload, contains_eager
from changes.api.base import APIView, error
from changes.config import db
from changes.constants import Cause, Status
from changes.models import Build, Project, Revision, Source, ProjectOption
class ProjectCommitIndexAPIView(APIView):
get_parser = reqparse.RequestParser()
get_parser.add_argument('page', type=int, location='args',
default=1)
get_parser.add_argument('per_page', type=int, location='args',
default=50)
get_parser.add_argument('parent', location='args')
get_parser.add_argument('branch', location='args')
get_parser.add_argument('every_commit', location='args', default=0)
get_parser.add_argument('all_builds', location='args', default=0)
def get(self, project_id):
project = Project.get(project_id)
if not project:
return error('project not found', http_code=404)
args = self.get_parser.parse_args()
# we want to only return commits in the repo that are within the
# project's whitelist
paths = None
if not args.every_commit:
paths = self.get_whitelisted_paths(project)
repo = project.repository
offset = (args.page - 1) * args.per_page
limit = args.per_page + 1 # +1 to tell if there are more revs to get
vcs = repo.get_vcs()
if vcs:
try:
commits = self.get_commits_from_vcs(
repo, vcs, offset, limit, paths, args.parent, args.branch)
except ValueError as err:
return error(err.message)
else:
if args.parent or args.branch:
param = 'Branches' if args.branch else 'Parents'
return error(
'{0} not supported for projects with no repository.'.format(param),
http_code=422)
# TODO: right now this fallback returns every commit for projects
# with whitelisted paths. At the very least, we need to tell the
# frontend about this (perhaps using a response header)
commits = self.get_commits_from_db(repo, offset, limit)
page_links = self.make_links(
current_page=args.page,
has_next_page=len(commits) > args.per_page,
)
# we fetched one extra commit so that we'd know whether to create a
# next link. Delete it
commits = commits[:args.per_page]
builds_map = {}
if commits:
builds_map = self.get_builds_for_commits(
commits, project, args.all_builds)
results = []
for result in commits:
if args.all_builds:
result['builds'] = builds_map.get(result['id'], [])
else:
result['build'] = builds_map.get(result['id'])
results.append(result)
return self.respond(results, serialize=False, links=page_links)
def get_whitelisted_paths(self, project):
whitelist = db.session.query(
ProjectOption.project_id, ProjectOption.name, ProjectOption.value
).filter(
ProjectOption.project_id.in_([project.id]),
ProjectOption.name.in_(['build.file-whitelist'])
).first()
if whitelist:
return whitelist.value.strip().splitlines()
return None
def get_commits_from_vcs(self, repo, vcs, offset, limit, paths, parent, branch):
vcs_log = list(vcs.log(
offset=offset,
limit=limit,
parent=parent,
branch=branch,
paths=paths
))
if not vcs_log:
return []
revisions_qs = list(Revision.query.options(
joinedload('author'),
).filter(
Revision.repository_id == repo.id,
Revision.sha.in_(c.id for c in vcs_log)
))
revisions_map = dict(
(c.sha, d)
for c, d in itertools.izip(revisions_qs, self.serialize(revisions_qs))
)
commits = []
for commit in vcs_log:
if commit.id in revisions_map:
result = revisions_map[commit.id]
else:
result = self.serialize(commit)
commits.append(result)
return commits
def get_commits_from_db(self, repo, offset, limit):
return self.serialize(list(
Revision.query.options(
joinedload('author'),
).filter(
Revision.repository_id == repo.id,
).order_by(Revision.date_created.desc())[offset:offset + limit]
))
def get_builds_for_commits(self, commits, project, all_builds):
builds_qs = list(Build.query.options(
joinedload('author'),
contains_eager('source'),
).join(
Source, Source.id == Build.source_id,
).filter(
Build.source_id == Source.id,
Build.project_id == project.id,
Build.status.in_([Status.finished, Status.in_progress, Status.queued]),
Build.cause != Cause.snapshot,
Source.repository_id == project.repository_id,
Source.revision_sha.in_(c['id'] for c in commits),
Source.patch == None, # NOQA
).order_by(Build.date_created.asc()))
if not all_builds:
# this implicitly only keeps the last build for a revision
return dict(
(b.source.revision_sha, d)
for b, d in itertools.izip(builds_qs, self.serialize(builds_qs))
)
else:
builds_map = defaultdict(list)
for b, d in itertools.izip(builds_qs, self.serialize(builds_qs)):
builds_map[b.source.revision_sha].append(d)
return dict(builds_map)
| 36.533333 | 87 | 0.593729 |
6378ed0590c564f04b180a319cdd0a520944cbf9 | 10,069 | py | Python | modules/tools/navigation/simulator/navigation_view_backend.py | BaiduXLab/apollo | 2764e934b6d0da1342be781447348288ac84c5e9 | [
"Apache-2.0"
] | 22 | 2018-10-10T14:46:32.000Z | 2022-02-28T12:43:43.000Z | modules/tools/navigation/simulator/navigation_view_backend.py | BaiduXLab/apollo | 2764e934b6d0da1342be781447348288ac84c5e9 | [
"Apache-2.0"
] | 5 | 2020-06-13T00:36:33.000Z | 2022-02-10T17:50:43.000Z | modules/tools/navigation/simulator/navigation_view_backend.py | BaiduXLab/apollo | 2764e934b6d0da1342be781447348288ac84c5e9 | [
"Apache-2.0"
] | 12 | 2018-12-24T02:17:19.000Z | 2021-12-06T01:54:09.000Z | #!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import rospy
import math
import thread
import requests
import json
import pyproj
from std_msgs.msg import String
from flask import jsonify
from flask import Flask
from flask import request
from flask_cors import CORS
from numpy.polynomial.polynomial import polyval
from modules.localization.proto import localization_pb2
from modules.planning.proto import planning_pb2
from modules.drivers.proto import mobileye_pb2
from modules.perception.proto import perception_obstacle_pb2
# pip install -U flask-cors
# is currently required in docker
app = Flask(__name__)
CORS(app)
lat = 37.415889
lon = -122.014505
API_KEY = ""
routing_pub = None
mobileye_pb = None
perception_pb = None
planning_pb = None
heading = None
projector = pyproj.Proj(proj='utm', zone=10, ellps='WGS84')
def mobileye_callback(p_mobileye_pb):
global mobileye_pb
mobileye_pb = p_mobileye_pb
def planning_callback(p_planning_pb):
global planning_pb
planning_pb = p_planning_pb
def perception_callback(p_perception_pb):
global perception_pb
perception_pb = p_perception_pb
def localization_callback(localization_pb):
global lat, lon, heading
x = localization_pb.pose.position.x
y = localization_pb.pose.position.y
heading = localization_pb.pose.heading
zone = 10
lon, lat = projector(x, y, inverse=True)
def add_listener():
global routing_pub
rospy.init_node("map_server", anonymous=True)
rospy.Subscriber('/apollo/localization/pose',
localization_pb2.LocalizationEstimate,
localization_callback)
routing_pub = rospy.Publisher('/apollo/navigation/routing',
String, queue_size=1)
rospy.Subscriber('/apollo/sensor/mobileye',
mobileye_pb2.Mobileye,
mobileye_callback)
rospy.Subscriber('/apollo/planning',
planning_pb2.ADCTrajectory,
planning_callback)
rospy.Subscriber('/apollo/perception/obstacles',
perception_obstacle_pb2.PerceptionObstacles,
perception_callback)
@app.route('/', methods=["POST", "GET"])
def current_latlon():
point = {}
point['lat'] = lat
point['lon'] = lon
points = [point]
utm_vehicle_x, utm_vehicle_y = projector(lon, lat)
right_lane = []
left_lane = []
if perception_pb is not None:
right_lane, left_lane = get_lane_markers_from_perception(
utm_vehicle_x, utm_vehicle_y)
else:
if mobileye_pb is not None:
right_lane, left_lane = get_lane_markers_from_mobileye(
utm_vehicle_x, utm_vehicle_y)
points.append(right_lane)
points.append(left_lane)
planned_path = []
if planning_pb is not None:
for traj_point in planning_pb.trajectory_point:
x = traj_point.path_point.x
y = traj_point.path_point.y
newx = x * math.cos(heading) - y * math.sin(heading)
newy = y * math.cos(heading) + x * math.sin(heading)
plon, plat = projector(utm_vehicle_x + newx, utm_vehicle_y + newy,
inverse=True)
planned_path.append({'lat': plat, 'lng': plon})
points.append(planned_path)
return jsonify(points)
def get_lane_markers_from_perception(utm_vehicle_x, utm_vehicle_y):
right_lane = []
left_lane = []
rc0 = perception_pb.lane_marker.right_lane_marker.c0_position
rc1 = perception_pb.lane_marker.right_lane_marker.c1_heading_angle
rc2 = perception_pb.lane_marker.right_lane_marker.c2_curvature
rc3 = perception_pb.lane_marker.right_lane_marker.c3_curvature_derivative
right_lane_marker_range = perception_pb.lane_marker.right_lane_marker.view_range
right_lane_marker_coef = [rc0, rc1, rc2, rc3]
for x in range(int(right_lane_marker_range)):
y = -1 * polyval(x, right_lane_marker_coef)
newx = x * math.cos(heading) - y * math.sin(heading)
newy = y * math.cos(heading) + x * math.sin(heading)
plon, plat = projector(utm_vehicle_x + newx, utm_vehicle_y + newy,
inverse=True)
right_lane.append({'lat': plat, 'lng': plon})
# print right_lane
lc0 = perception_pb.lane_marker.left_lane_marker.c0_position
lc1 = perception_pb.lane_marker.left_lane_marker.c1_heading_angle
lc2 = perception_pb.lane_marker.left_lane_marker.c2_curvature
lc3 = perception_pb.lane_marker.left_lane_marker.c3_curvature_derivative
left_lane_marker_range = perception_pb.lane_marker.left_lane_marker.view_range
left_lane_marker_coef = [lc0, lc1, lc2, lc3]
for x in range(int(left_lane_marker_range)):
y = -1 * polyval(x, left_lane_marker_coef)
newx = x * math.cos(heading) - y * math.sin(heading)
newy = y * math.cos(heading) + x * math.sin(heading)
plon, plat = projector(utm_vehicle_x + newx, utm_vehicle_y + newy,
inverse=True)
left_lane.append({'lat': plat, 'lng': plon})
return right_lane, left_lane
def get_lane_markers_from_mobileye(utm_vehicle_x, utm_vehicle_y):
right_lane = []
left_lane = []
rc0 = mobileye_pb.lka_768.position
rc1 = mobileye_pb.lka_769.heading_angle
rc2 = mobileye_pb.lka_768.curvature
rc3 = mobileye_pb.lka_768.curvature_derivative
right_lane_marker_range = mobileye_pb.lka_769.view_range
right_lane_marker_coef = [rc0, rc1, rc2, rc3]
for x in range(int(right_lane_marker_range)):
y = -1 * polyval(x, right_lane_marker_coef)
newx = x * math.cos(heading) - y * math.sin(heading)
newy = y * math.cos(heading) + x * math.sin(heading)
plon, plat = projector(utm_vehicle_x + newx, utm_vehicle_y + newy,
inverse=True)
right_lane.append({'lat': plat, 'lng': plon})
# print right_lane
lc0 = mobileye_pb.lka_766.position
lc1 = mobileye_pb.lka_767.heading_angle
lc2 = mobileye_pb.lka_766.curvature
lc3 = mobileye_pb.lka_766.curvature_derivative
left_lane_marker_range = mobileye_pb.lka_767.view_range
left_lane_marker_coef = [lc0, lc1, lc2, lc3]
for x in range(int(left_lane_marker_range)):
y = -1 * polyval(x, left_lane_marker_coef)
newx = x * math.cos(heading) - y * math.sin(heading)
newy = y * math.cos(heading) + x * math.sin(heading)
plon, plat = projector(utm_vehicle_x + newx, utm_vehicle_y + newy,
inverse=True)
left_lane.append({'lat': plat, 'lng': plon})
return right_lane, left_lane
@app.route('/routing', methods=["POST", "GET"])
def routing():
content = request.json
start_latlon = str(content["start_lat"]) + "," + str(content["start_lon"])
end_latlon = str(content["end_lat"]) + "," + str(content["end_lon"])
url = "https://maps.googleapis.com/maps/api/directions/json?origin=" + \
start_latlon + "&destination=" + end_latlon + \
"&key=" + API_KEY
res = requests.get(url)
path = []
if res.status_code != 200:
return jsonify(path)
response = json.loads(res.content)
if len(response['routes']) < 1:
return jsonify(path)
steps = response['routes'][0]['legs'][0]['steps']
for step in steps:
start_loc = step['start_location']
end_loc = step['end_location']
path.append({'lat': start_loc['lat'], 'lng': start_loc['lng']})
points = decode_polyline(step['polyline']['points'])
utm_points = []
for point in points:
path.append({'lat': point[0], 'lng': point[1]})
x, y = projector(point[1], point[0])
utm_points.append([x, y])
step['polyline']['points'] = utm_points
path.append({'lat': end_loc['lat'], 'lng': end_loc['lng']})
routing_pub.publish(json.dumps(steps))
return jsonify(path)
def decode_polyline(polyline_str):
index, lat, lng = 0, 0, 0
coordinates = []
changes = {'latitude': 0, 'longitude': 0}
while index < len(polyline_str):
for unit in ['latitude', 'longitude']:
shift, result = 0, 0
while True:
byte = ord(polyline_str[index]) - 63
index += 1
result |= (byte & 0x1f) << shift
shift += 5
if not byte >= 0x20:
break
if (result & 1):
changes[unit] = ~(result >> 1)
else:
changes[unit] = (result >> 1)
lat += changes['latitude']
lng += changes['longitude']
coordinates.append((lat / 100000.0, lng / 100000.0))
return coordinates
def run_flask():
app.run(debug=False, port=5001, host='0.0.0.0')
if __name__ == "__main__":
key_file_name = os.path.dirname(os.path.abspath(__file__)) + \
"/map_api_key/api_key"
try:
f = open(key_file_name, 'r')
with f:
for line in f:
API_KEY = line.replace('\n', "")
break
except IOError:
print "Could not read file:", key_file_name
add_listener()
thread.start_new_thread(run_flask, ())
# app.run(debug=False, port=5001, host='localhost')
rospy.spin()
| 34.248299 | 84 | 0.637402 |
e3326ca8d7b7f34946af23656578298378330c65 | 1,627 | py | Python | intro/part03-08_string_multiplied/test/test_string_multiplied.py | Hannah-Abi/python-pro-21 | 2ce32c4bf118054329d19afdf83c50561be1ada8 | [
"MIT"
] | null | null | null | intro/part03-08_string_multiplied/test/test_string_multiplied.py | Hannah-Abi/python-pro-21 | 2ce32c4bf118054329d19afdf83c50561be1ada8 | [
"MIT"
] | null | null | null | intro/part03-08_string_multiplied/test/test_string_multiplied.py | Hannah-Abi/python-pro-21 | 2ce32c4bf118054329d19afdf83c50561be1ada8 | [
"MIT"
] | null | null | null | import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load_module, reload_module, get_stdout
from functools import reduce
from random import randint
exercise = 'src.string_multiplied'
def format_tuple(d : tuple):
return str(d).replace("'","")
@points('2.string_multiplied')
class StringMultipliedTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect =['a', '1']):
cls.module = load_module(exercise, 'en')
def test_strings(self):
values = [("hiya","1"),("abc",4),("xyx",7),("hello",2),("test",6)]
for test_case in values:
with patch('builtins.input', side_effect = test_case):
try:
reload_module(self.module)
except:
self.assertTrue(False, f"Make sure that your program works correctly with the input {test_case}")
out = get_stdout()
output = out.split("\n")
corr = test_case[0] * int(test_case[1])
self.assertTrue(len(out) > 0, "Your program does not print out anything with the inputs {}".format(test_case))
self.assertTrue(len(output) == 1, f"Instead of printing out only one row in addition to asking for the inputs from the user, your program's print out is now in {len(output)} rows.")
self.assertEqual(out.strip(), corr, f"The print out is incorrect with the inputs {test_case}: your program's print out is\n{out}\nwhen correct print out is\n{corr}")
if __name__ == '__main__':
unittest.main() | 43.972973 | 197 | 0.634911 |
813468b63734c3dcd1e63f0b36eab73e3210827f | 1,251 | py | Python | .mywaflib/waflib/Tools/gdc.py | nkuhlen/log-transform-kernel-density | 377e9196b95cfdc2d53db50796a030eb5d0f019a | [
"BSD-3-Clause"
] | null | null | null | .mywaflib/waflib/Tools/gdc.py | nkuhlen/log-transform-kernel-density | 377e9196b95cfdc2d53db50796a030eb5d0f019a | [
"BSD-3-Clause"
] | null | null | null | .mywaflib/waflib/Tools/gdc.py | nkuhlen/log-transform-kernel-density | 377e9196b95cfdc2d53db50796a030eb5d0f019a | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
from waflib.Tools import ar, d
from waflib.Configure import conf
@conf
def find_gdc(conf):
"""
Find the program gdc and set the variable *D*
"""
conf.find_program('gdc', var='D')
out = conf.cmd_and_log(conf.env.D + ['--version'])
if out.find("gdc") == -1:
conf.fatal("detected compiler is not gdc")
@conf
def common_flags_gdc(conf):
"""
Set the flags required by *gdc*
"""
v = conf.env
# _DFLAGS _DIMPORTFLAGS
# for mory info about the meaning of this dict see dmd.py
v['DFLAGS'] = []
v['D_SRC_F'] = ['-c']
v['D_TGT_F'] = '-o%s'
# linker
v['D_LINKER'] = v['D']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-o%s'
v['DINC_ST'] = '-I%s'
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-l%s'
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L%s'
v['LINKFLAGS_dshlib'] = ['-shared']
v['DHEADER_ext'] = '.di'
v.DFLAGS_d_with_header = '-fintfc'
v['D_HDR_F'] = '-fintfc-file=%s'
def configure(conf):
"""
Configuration for gdc
"""
conf.find_gdc()
conf.load('ar')
conf.load('d')
conf.common_flags_gdc()
conf.d_platform_flags()
| 20.85 | 58 | 0.580336 |
ca4e4807abed2c0d84900a691560c8d54eaa62e7 | 7,309 | py | Python | examples/pwr_run/checkpointing/throughput/random/job38.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/checkpointing/throughput/random/job38.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/checkpointing/throughput/random/job38.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | """
#Trains a ResNet on the CIFAR10 dataset.
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.mobilenet_v2 import MobileNetV2
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
# Training parameters
batch_size = 64
args_lr = 0.0015
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_random/' + job_name + '*'
total_epochs = 83
starting_epoch = 0
# first step is to update the PID
pid = os.getpid()
message = job_name + ' pid ' + str(pid) # 'job50 pid 3333'
send_signal.send(args.node, 10002, message)
if args.resume:
save_file = glob.glob(save_files)[0]
# epochs = int(save_file.split('/')[4].split('_')[1].split('.')[0])
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
n = 3
# Model name, depth and version
model_type = args.tc #'P100_resnet50_he_256_1'
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
message = job_name + ' b_end'
send_signal.send(args.node, 10002, message)
model = keras.models.load_model(save_file)
message = job_name + ' c_end'
send_signal.send(args.node, 10002, message)
else:
print('train from start')
model = models.Sequential()
base_model = MobileNetV2(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
#base_model.summary()
#pdb.set_trace()
model.add(base_model)
model.add(layers.Flatten())
#model.add(layers.BatchNormalization())
#model.add(layers.Dense(128, activation='relu'))
#model.add(layers.Dropout(0.5))
#model.add(layers.BatchNormalization())
#model.add(layers.Dense(64, activation='relu'))
#model.add(layers.Dropout(0.5))
#model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
#model.summary()
print(model_type)
#pdb.set_trace()
current_epoch = 0
################### connects interrupt signal to the process #####################
def terminateProcess(signalNumber, frame):
# first record the wasted epoch time
global epoch_begin_time
if epoch_begin_time == 0:
epoch_waste_time = 0
else:
epoch_waste_time = int(time.time() - epoch_begin_time)
message = job_name + ' waste ' + str(epoch_waste_time) # 'job50 waste 100'
if epoch_waste_time > 0:
send_signal.send(args.node, 10002, message)
print('checkpointing the model triggered by kill -15 signal')
# delete whatever checkpoint that already exists
for f in glob.glob(save_files):
os.remove(f)
model.save('/scratch/li.baol/checkpoint_random/' + job_name + '_' + str(current_epoch) + '.h5')
print ('(SIGTERM) terminating the process')
message = job_name + ' checkpoint'
send_signal.send(args.node, 10002, message)
sys.exit()
signal.signal(signal.SIGTERM, terminateProcess)
#################################################################################
logdir = '/scratch/li.baol/tsrbrd_log/job_runs/' + model_type + '/' + job_name
tensorboard_callback = TensorBoard(log_dir=logdir)#, update_freq='batch')
first_epoch_start = 0
class PrintEpoch(keras.callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
global current_epoch, first_epoch_start
#remaining_epochs = epochs - epoch
current_epoch = epoch
print('current epoch ' + str(current_epoch))
global epoch_begin_time
epoch_begin_time = time.time()
if epoch == starting_epoch and args.resume:
first_epoch_start = time.time()
message = job_name + ' d_end'
send_signal.send(args.node, 10002, message)
elif epoch == starting_epoch:
first_epoch_start = time.time()
if epoch == starting_epoch:
# send signal to indicate checkpoint is qualified
message = job_name + ' ckpt_qual'
send_signal.send(args.node, 10002, message)
def on_epoch_end(self, epoch, logs=None):
if epoch == starting_epoch:
first_epoch_time = int(time.time() - first_epoch_start)
message = job_name + ' 1st_epoch ' + str(first_epoch_time)
send_signal.send(args.node, 10002, message)
progress = round((epoch+1) / round(total_epochs/2), 2)
message = job_name + ' completion ' + str(progress)
send_signal.send(args.node, 10002, message)
my_callback = PrintEpoch()
callbacks = [tensorboard_callback, my_callback]
#[checkpoint, lr_reducer, lr_scheduler, tensorboard_callback]
# Run training
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=round(total_epochs/2),
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks,
initial_epoch=starting_epoch,
verbose=1
)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# send signal to indicate job has finished
message = job_name + ' finish'
send_signal.send(args.node, 10002, message)
| 32.057018 | 118 | 0.692024 |
3318840eabfecd85eec607be21c142b208c94e01 | 2,199 | py | Python | text_models/text_manipulator.py | TheLampshady/cnn_text | 009fce83335550255e63baf9b579a42cce3f5eb6 | [
"MIT"
] | null | null | null | text_models/text_manipulator.py | TheLampshady/cnn_text | 009fce83335550255e63baf9b579a42cce3f5eb6 | [
"MIT"
] | null | null | null | text_models/text_manipulator.py | TheLampshady/cnn_text | 009fce83335550255e63baf9b579a42cce3f5eb6 | [
"MIT"
] | null | null | null | from collections import Counter
import numpy as np
class TextClassifier(object):
def __init__(self, train_data, train_target, test_data, test_target, batch_size=10):
self.train_data = train_data
self.train_target = train_target
self.test_data = test_data
self.test_target = test_target
self.batch_size = batch_size
self.vocab = Counter()
self.total_words = self.build_vocab()
self.batch_count = int(len(train_data) / batch_size)
def build_vocab(self):
for text in self.train_data:
for word in self.format_text(text):
self.vocab[word] += 1
for text in self.test_data:
for word in self.format_text(text):
self.vocab[word] += 1
return len(self.vocab)
@staticmethod
def format_text(text):
"""
Splits Text
:param text:
:return:
"""
return [word.lower() for word in text.split(' ')]
@property
def word_index(self):
if not hasattr(self, "_word_index"):
self._word_index = {
word.lower(): i
for i, word in enumerate(self.vocab)
}
return self._word_index
def get_batch(self, data, target):
batches = []
results = []
for text in data:
layer = np.zeros(self.total_words, dtype=float)
for word in self.format_text(text):
layer[self.word_index[word.lower()]] += 1
batches.append(layer)
for category in target:
y = np.zeros((3), dtype=float)
if category == 0:
y[0] = 1.
elif category == 1:
y[1] = 1.
else:
y[2] = 1.
results.append(y)
return np.array(batches), np.array(results)
def get_train_batch(self, i=0):
start = i * self.batch_size
end = start + self.batch_size
return self.get_batch(
self.train_data[start:end],
self.train_target[start:end]
)
def get_test_batch(self):
return self.get_batch(self.test_data, self.test_target)
| 27.148148 | 88 | 0.552069 |
bd0f92e0d759204b33b6cb9b261531d61134605e | 2,018 | py | Python | ppocr/metrics/rec_metric.py | vinhtq115/PaddleOCR | 5730d5cd48b38b163ff06d0f3c57da5991aa9448 | [
"Apache-2.0"
] | 5 | 2021-01-19T13:27:31.000Z | 2021-01-27T12:55:23.000Z | ppocr/metrics/rec_metric.py | happy0709/PaddleOCR | c3e5522c38fb42ad721de2bae40425589f48b646 | [
"Apache-2.0"
] | 1 | 2021-02-24T05:24:32.000Z | 2021-03-15T06:45:34.000Z | ppocr/metrics/rec_metric.py | happy0709/PaddleOCR | c3e5522c38fb42ad721de2bae40425589f48b646 | [
"Apache-2.0"
] | null | null | null | # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Levenshtein
class RecMetric(object):
def __init__(self, main_indicator='acc', **kwargs):
self.main_indicator = main_indicator
self.reset()
def __call__(self, pred_label, *args, **kwargs):
preds, labels = pred_label
correct_num = 0
all_num = 0
norm_edit_dis = 0.0
for (pred, pred_conf), (target, _) in zip(preds, labels):
norm_edit_dis += Levenshtein.distance(pred, target) / max(
len(pred), len(target))
if pred == target:
correct_num += 1
all_num += 1
# if all_num < 10 and kwargs.get('show_str', False):
# print('{} -> {}'.format(pred, target))
self.correct_num += correct_num
self.all_num += all_num
self.norm_edit_dis += norm_edit_dis
return {
'acc': correct_num / all_num,
'norm_edit_dis': 1 - norm_edit_dis / all_num
}
def get_metric(self):
"""
return metrics {
'acc': 0,
'norm_edit_dis': 0,
}
"""
acc = self.correct_num / self.all_num
norm_edit_dis = 1 - self.norm_edit_dis / self.all_num
self.reset()
return {'acc': acc, 'norm_edit_dis': norm_edit_dis}
def reset(self):
self.correct_num = 0
self.all_num = 0
self.norm_edit_dis = 0
| 33.633333 | 74 | 0.604559 |
b9050089219f9433948dcbed98a4f3150a6666b1 | 18,060 | py | Python | pytorch3d/transforms/rotation_conversions.py | Len-Li/pytorch3d | 95029e7a1211904814b3919cf12fe9d1952fb415 | [
"BSD-3-Clause"
] | 1 | 2021-06-30T14:10:26.000Z | 2021-06-30T14:10:26.000Z | pytorch3d/transforms/rotation_conversions.py | Len-Li/pytorch3d | 95029e7a1211904814b3919cf12fe9d1952fb415 | [
"BSD-3-Clause"
] | null | null | null | pytorch3d/transforms/rotation_conversions.py | Len-Li/pytorch3d | 95029e7a1211904814b3919cf12fe9d1952fb415 | [
"BSD-3-Clause"
] | 1 | 2021-07-29T12:11:44.000Z | 2021-07-29T12:11:44.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import functools
from typing import Optional
import torch
import torch.nn.functional as F
"""
The transformation matrices returned from the functions in this file assume
the points on which the transformation will be applied are column vectors.
i.e. the R matrix is structured as
R = [
[Rxx, Rxy, Rxz],
[Ryx, Ryy, Ryz],
[Rzx, Rzy, Rzz],
] # (3, 3)
This matrix can be applied to column vectors by post multiplication
by the points e.g.
points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point
transformed_points = R * points
To apply the same matrix to points which are row vectors, the R matrix
can be transposed and pre multiplied by the points:
e.g.
points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point
transformed_points = points * R.transpose(1, 0)
"""
def quaternion_to_matrix(quaternions):
"""
Convert rotations given as quaternions to rotation matrices.
Args:
quaternions: quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
r, i, j, k = torch.unbind(quaternions, -1)
two_s = 2.0 / (quaternions * quaternions).sum(-1)
o = torch.stack(
(
1 - two_s * (j * j + k * k),
two_s * (i * j - k * r),
two_s * (i * k + j * r),
two_s * (i * j + k * r),
1 - two_s * (i * i + k * k),
two_s * (j * k - i * r),
two_s * (i * k - j * r),
two_s * (j * k + i * r),
1 - two_s * (i * i + j * j),
),
-1,
)
return o.reshape(quaternions.shape[:-1] + (3, 3))
def _copysign(a, b):
"""
Return a tensor where each element has the absolute value taken from the,
corresponding element of a, with sign taken from the corresponding
element of b. This is like the standard copysign floating-point operation,
but is not careful about negative 0 and NaN.
Args:
a: source tensor.
b: tensor whose signs will be used, of the same shape as a.
Returns:
Tensor of the same shape as a with the signs of b.
"""
signs_differ = (a < 0) != (b < 0)
return torch.where(signs_differ, -a, a)
def _sqrt_positive_part(x):
"""
Returns torch.sqrt(torch.max(0, x))
but with a zero subgradient where x is 0.
"""
ret = torch.zeros_like(x)
positive_mask = x > 0
ret[positive_mask] = torch.sqrt(x[positive_mask])
return ret
def matrix_to_quaternion(matrix):
"""
Convert rotations given as rotation matrices to quaternions.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
Returns:
quaternions with real part first, as tensor of shape (..., 4).
"""
if matrix.size(-1) != 3 or matrix.size(-2) != 3:
raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.")
m00 = matrix[..., 0, 0]
m11 = matrix[..., 1, 1]
m22 = matrix[..., 2, 2]
o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22)
x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22)
y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22)
z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22)
o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2])
o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0])
o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1])
return torch.stack((o0, o1, o2, o3), -1)
def _axis_angle_rotation(axis: str, angle):
"""
Return the rotation matrices for one of the rotations about an axis
of which Euler angles describe, for each value of the angle given.
Args:
axis: Axis label "X" or "Y or "Z".
angle: any shape tensor of Euler angles in radians
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
cos = torch.cos(angle)
sin = torch.sin(angle)
one = torch.ones_like(angle)
zero = torch.zeros_like(angle)
if axis == "X":
R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos)
if axis == "Y":
R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos)
if axis == "Z":
R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one)
return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3))
def euler_angles_to_matrix(euler_angles, convention: str):
"""
Convert rotations given as Euler angles in radians to rotation matrices.
Args:
euler_angles: Euler angles in radians as tensor of shape (..., 3).
convention: Convention string of three uppercase letters from
{"X", "Y", and "Z"}.
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3:
raise ValueError("Invalid input euler angles.")
if len(convention) != 3:
raise ValueError("Convention must have 3 letters.")
if convention[1] in (convention[0], convention[2]):
raise ValueError(f"Invalid convention {convention}.")
for letter in convention:
if letter not in ("X", "Y", "Z"):
raise ValueError(f"Invalid letter {letter} in convention string.")
matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1))
return functools.reduce(torch.matmul, matrices)
def _angle_from_tan(
axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool
):
"""
Extract the first or third Euler angle from the two members of
the matrix which are positive constant times its sine and cosine.
Args:
axis: Axis label "X" or "Y or "Z" for the angle we are finding.
other_axis: Axis label "X" or "Y or "Z" for the middle axis in the
convention.
data: Rotation matrices as tensor of shape (..., 3, 3).
horizontal: Whether we are looking for the angle for the third axis,
which means the relevant entries are in the same row of the
rotation matrix. If not, they are in the same column.
tait_bryan: Whether the first and third axes in the convention differ.
Returns:
Euler Angles in radians for each matrix in data as a tensor
of shape (...).
"""
i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis]
if horizontal:
i2, i1 = i1, i2
even = (axis + other_axis) in ["XY", "YZ", "ZX"]
if horizontal == even:
return torch.atan2(data[..., i1], data[..., i2])
if tait_bryan:
return torch.atan2(-data[..., i2], data[..., i1])
return torch.atan2(data[..., i2], -data[..., i1])
def _index_from_letter(letter: str):
if letter == "X":
return 0
if letter == "Y":
return 1
if letter == "Z":
return 2
def matrix_to_euler_angles(matrix, convention: str):
"""
Convert rotations given as rotation matrices to Euler angles in radians.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
convention: Convention string of three uppercase letters.
Returns:
Euler angles in radians as tensor of shape (..., 3).
"""
if len(convention) != 3:
raise ValueError("Convention must have 3 letters.")
if convention[1] in (convention[0], convention[2]):
raise ValueError(f"Invalid convention {convention}.")
for letter in convention:
if letter not in ("X", "Y", "Z"):
raise ValueError(f"Invalid letter {letter} in convention string.")
if matrix.size(-1) != 3 or matrix.size(-2) != 3:
raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.")
i0 = _index_from_letter(convention[0])
i2 = _index_from_letter(convention[2])
tait_bryan = i0 != i2
if tait_bryan:
central_angle = torch.asin(
matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0)
)
else:
central_angle = torch.acos(matrix[..., i0, i0])
o = (
_angle_from_tan(
convention[0], convention[1], matrix[..., i2], False, tait_bryan
),
central_angle,
_angle_from_tan(
convention[2], convention[1], matrix[..., i0, :], True, tait_bryan
),
)
return torch.stack(o, -1)
def random_quaternions(
n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate random quaternions representing rotations,
i.e. versors with nonnegative real part.
Args:
n: Number of quaternions in a batch to return.
dtype: Type to return.
device: Desired device of returned tensor. Default:
uses the current device for the default tensor type.
requires_grad: Whether the resulting tensor should have the gradient
flag set.
Returns:
Quaternions as tensor of shape (N, 4).
"""
o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad)
s = (o * o).sum(1)
o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None]
return o
def random_rotations(
n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate random rotations as 3x3 rotation matrices.
Args:
n: Number of rotation matrices in a batch to return.
dtype: Type to return.
device: Device of returned tensor. Default: if None,
uses the current device for the default tensor type.
requires_grad: Whether the resulting tensor should have the gradient
flag set.
Returns:
Rotation matrices as tensor of shape (n, 3, 3).
"""
quaternions = random_quaternions(
n, dtype=dtype, device=device, requires_grad=requires_grad
)
return quaternion_to_matrix(quaternions)
def random_rotation(
dtype: Optional[torch.dtype] = None, device=None, requires_grad=False
):
"""
Generate a single random 3x3 rotation matrix.
Args:
dtype: Type to return
device: Device of returned tensor. Default: if None,
uses the current device for the default tensor type
requires_grad: Whether the resulting tensor should have the gradient
flag set
Returns:
Rotation matrix as tensor of shape (3, 3).
"""
return random_rotations(1, dtype, device, requires_grad)[0]
def standardize_quaternion(quaternions):
"""
Convert a unit quaternion to a standard form: one in which the real
part is non negative.
Args:
quaternions: Quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Standardized quaternions as tensor of shape (..., 4).
"""
return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions)
def quaternion_raw_multiply(a, b):
"""
Multiply two quaternions.
Usual torch rules for broadcasting apply.
Args:
a: Quaternions as tensor of shape (..., 4), real part first.
b: Quaternions as tensor of shape (..., 4), real part first.
Returns:
The product of a and b, a tensor of quaternions shape (..., 4).
"""
aw, ax, ay, az = torch.unbind(a, -1)
bw, bx, by, bz = torch.unbind(b, -1)
ow = aw * bw - ax * bx - ay * by - az * bz
ox = aw * bx + ax * bw + ay * bz - az * by
oy = aw * by - ax * bz + ay * bw + az * bx
oz = aw * bz + ax * by - ay * bx + az * bw
return torch.stack((ow, ox, oy, oz), -1)
def quaternion_multiply(a, b):
"""
Multiply two quaternions representing rotations, returning the quaternion
representing their composition, i.e. the versor with nonnegative real part.
Usual torch rules for broadcasting apply.
Args:
a: Quaternions as tensor of shape (..., 4), real part first.
b: Quaternions as tensor of shape (..., 4), real part first.
Returns:
The product of a and b, a tensor of quaternions of shape (..., 4).
"""
ab = quaternion_raw_multiply(a, b)
return standardize_quaternion(ab)
def quaternion_invert(quaternion):
"""
Given a quaternion representing rotation, get the quaternion representing
its inverse.
Args:
quaternion: Quaternions as tensor of shape (..., 4), with real part
first, which must be versors (unit quaternions).
Returns:
The inverse, a tensor of quaternions of shape (..., 4).
"""
return quaternion * quaternion.new_tensor([1, -1, -1, -1])
def quaternion_apply(quaternion, point):
"""
Apply the rotation given by a quaternion to a 3D point.
Usual torch rules for broadcasting apply.
Args:
quaternion: Tensor of quaternions, real part first, of shape (..., 4).
point: Tensor of 3D points of shape (..., 3).
Returns:
Tensor of rotated points of shape (..., 3).
"""
if point.size(-1) != 3:
raise ValueError(f"Points are not in 3D, f{point.shape}.")
real_parts = point.new_zeros(point.shape[:-1] + (1,))
point_as_quaternion = torch.cat((real_parts, point), -1)
out = quaternion_raw_multiply(
quaternion_raw_multiply(quaternion, point_as_quaternion),
quaternion_invert(quaternion),
)
return out[..., 1:]
def axis_angle_to_matrix(axis_angle):
"""
Convert rotations given as axis/angle to rotation matrices.
Args:
axis_angle: Rotations given as a vector in axis angle form,
as a tensor of shape (..., 3), where the magnitude is
the angle turned anticlockwise in radians around the
vector's direction.
Returns:
Rotation matrices as tensor of shape (..., 3, 3).
"""
return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle))
def matrix_to_axis_angle(matrix):
"""
Convert rotations given as rotation matrices to axis/angle.
Args:
matrix: Rotation matrices as tensor of shape (..., 3, 3).
Returns:
Rotations given as a vector in axis angle form, as a tensor
of shape (..., 3), where the magnitude is the angle
turned anticlockwise in radians around the vector's
direction.
"""
return quaternion_to_axis_angle(matrix_to_quaternion(matrix))
def axis_angle_to_quaternion(axis_angle):
"""
Convert rotations given as axis/angle to quaternions.
Args:
axis_angle: Rotations given as a vector in axis angle form,
as a tensor of shape (..., 3), where the magnitude is
the angle turned anticlockwise in radians around the
vector's direction.
Returns:
quaternions with real part first, as tensor of shape (..., 4).
"""
angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True)
half_angles = 0.5 * angles
eps = 1e-6
small_angles = angles.abs() < eps
sin_half_angles_over_angles = torch.empty_like(angles)
sin_half_angles_over_angles[~small_angles] = (
torch.sin(half_angles[~small_angles]) / angles[~small_angles]
)
# for x small, sin(x/2) is about x/2 - (x/2)^3/6
# so sin(x/2)/x is about 1/2 - (x*x)/48
sin_half_angles_over_angles[small_angles] = (
0.5 - (angles[small_angles] * angles[small_angles]) / 48
)
quaternions = torch.cat(
[torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1
)
return quaternions
def quaternion_to_axis_angle(quaternions):
"""
Convert rotations given as quaternions to axis/angle.
Args:
quaternions: quaternions with real part first,
as tensor of shape (..., 4).
Returns:
Rotations given as a vector in axis angle form, as a tensor
of shape (..., 3), where the magnitude is the angle
turned anticlockwise in radians around the vector's
direction.
"""
norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True)
half_angles = torch.atan2(norms, quaternions[..., :1])
angles = 2 * half_angles
eps = 1e-6
small_angles = angles.abs() < eps
sin_half_angles_over_angles = torch.empty_like(angles)
sin_half_angles_over_angles[~small_angles] = (
torch.sin(half_angles[~small_angles]) / angles[~small_angles]
)
# for x small, sin(x/2) is about x/2 - (x/2)^3/6
# so sin(x/2)/x is about 1/2 - (x*x)/48
sin_half_angles_over_angles[small_angles] = (
0.5 - (angles[small_angles] * angles[small_angles]) / 48
)
return quaternions[..., 1:] / sin_half_angles_over_angles
def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor:
"""
Converts 6D rotation representation by Zhou et al. [1] to rotation matrix
using Gram--Schmidt orthogonalization per Section B of [1].
Args:
d6: 6D rotation representation, of size (*, 6)
Returns:
batch of rotation matrices of size (*, 3, 3)
[1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H.
On the Continuity of Rotation Representations in Neural Networks.
IEEE Conference on Computer Vision and Pattern Recognition, 2019.
Retrieved from http://arxiv.org/abs/1812.07035
"""
a1, a2 = d6[..., :3], d6[..., 3:]
b1 = F.normalize(a1, dim=-1)
b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1
b2 = F.normalize(b2, dim=-1)
b3 = torch.cross(b1, b2, dim=-1)
return torch.stack((b1, b2, b3), dim=-2)
def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor:
"""
Converts rotation matrices to 6D rotation representation by Zhou et al. [1]
by dropping the last row. Note that 6D representation is not unique.
Args:
matrix: batch of rotation matrices of size (*, 3, 3)
Returns:
6D rotation representation, of size (*, 6)
[1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H.
On the Continuity of Rotation Representations in Neural Networks.
IEEE Conference on Computer Vision and Pattern Recognition, 2019.
Retrieved from http://arxiv.org/abs/1812.07035
"""
return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6)
| 32.77677 | 84 | 0.616888 |
b99025d0c1e26503cbaedec76f05e38dd6015d81 | 5,435 | py | Python | sknano/core/geometric_regions/tests/test_funcs.py | haidi-ustc/scikit-nano | ef9b24165ba37918b3f520657f7311ba139b3e7d | [
"BSD-2-Clause"
] | 21 | 2016-06-08T18:27:20.000Z | 2022-03-22T08:27:46.000Z | sknano/core/geometric_regions/tests/test_funcs.py | haidi-ustc/scikit-nano | ef9b24165ba37918b3f520657f7311ba139b3e7d | [
"BSD-2-Clause"
] | 8 | 2016-06-24T19:45:58.000Z | 2021-03-25T21:42:29.000Z | sknano/core/geometric_regions/tests/test_funcs.py | scikit-nano/scikit-nano | ef9b24165ba37918b3f520657f7311ba139b3e7d | [
"BSD-2-Clause"
] | 9 | 2016-12-08T16:35:52.000Z | 2021-06-23T17:13:44.000Z | #! /usr/bin/env python
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import nose
from nose.tools import assert_true, assert_equal
import numpy as np
from sknano.core.geometric_regions import Parallelepiped, Cuboid, \
generate_bounding_box
from sknano.core.math import Point, Vector
from sknano.generators import GrapheneGenerator
def test1():
print('generating graphene structure')
graphene = GrapheneGenerator(armchair_edge_length=5,
zigzag_edge_length=5)
lattice = graphene.lattice
print('graphene.bounds:\n{}'.format(graphene.bounds))
print('graphene.centroid:\n{}'.format(graphene.centroid))
print('graphene.lattice:\n{}'.format(lattice))
print('graphene.lattice.a1:\n{}'.format(lattice.a1))
print('graphene.lattice.a2:\n{}'.format(lattice.a2))
print('graphene.lattice.a3:\n{}'.format(lattice.a3))
print('graphene.lattice.orientation_matrix:\n{}'.format(
lattice.orientation_matrix))
print('rotating graphene')
graphene.rotate(angle=-np.pi/2, axis='x')
print('graphene.bounds:\n{}'.format(graphene.bounds))
print('graphene.centroid:\n{}'.format(graphene.centroid))
print('graphene.lattice:\n{}'.format(lattice))
print('graphene.lattice.a1:\n{}'.format(lattice.a1))
print('graphene.lattice.a2:\n{}'.format(lattice.a2))
print('graphene.lattice.a3:\n{}'.format(lattice.a3))
print('graphene.lattice.orientation_matrix:\n{}'.format(
lattice.orientation_matrix))
assert_true(np.allclose(lattice.angles, 3 * [90.0]))
lattice_region = Cuboid(pmax=lattice.lengths)
# lattice_region = Parallelepiped(u=lattice.a * xhat,
# v=lattice.b * yhat,
# w=lattice.c * zhat)
assert_equal(lattice_region.a, lattice.a)
assert_equal(lattice_region.b, lattice.b)
assert_equal(lattice_region.c, lattice.c)
print('lattice_region:\n{}'.format(lattice_region))
print('lattice_region.centroid:\n{}'.format(lattice_region.centroid))
print('\nrotating lattice_region')
lattice_region.rotate(transform_matrix=lattice.orientation_matrix)
# assert_equal(lattice_region.a, lattice.a)
# assert_equal(lattice_region.b, lattice.b)
# assert_equal(lattice_region.c, lattice.c)
print('lattice_region:\n{}'.format(lattice_region))
print('lattice_region.centroid:\n{}'.format(lattice_region.centroid))
print('\ncentering lattice_region on graphene centroid')
tvec = Vector(Point(graphene.centroid) - lattice_region.centroid)
lattice_region.translate(tvec)
# assert_equal(lattice_region.a, lattice.a)
# assert_equal(lattice_region.b, lattice.b)
# assert_equal(lattice_region.c, lattice.c)
print('lattice_region:\n{}'.format(lattice_region))
print('lattice_region.centroid:\n{}'.format(lattice_region.centroid))
bounding_box = generate_bounding_box(from_lattice=lattice,
center=graphene.centroid,
verbose=True)
print('bounding_box:\n{}'.format(bounding_box))
assert_equal(bounding_box, lattice_region)
print('lattice_region.lengths: {}, {}, {}'.format(
lattice_region.a, lattice_region.b, lattice_region.c))
def test2():
graphene = GrapheneGenerator(armchair_edge_length=5,
zigzag_edge_length=5)
graphene.rotate(angle=-np.pi/2, axis='x')
lattice = graphene.lattice
bounding_box1 = Cuboid()
bounding_box2 = Cuboid()
lattice_region1 = Cuboid(pmax=lattice.lengths)
bounding_box1.pmin = lattice_region1.pmin
bounding_box1.pmax = lattice_region1.pmax
a, b, c = lattice.lengths
cos_alpha, cos_beta, cos_gamma = np.cos(np.radians(lattice.angles))
lx = a
xy = b * cos_gamma
xz = c * cos_beta
ly = np.sqrt(b ** 2 - xy ** 2)
yz = (b * c * cos_alpha - xy * xz) / ly
lz = np.sqrt(c ** 2 - xz ** 2 - yz ** 2)
lattice_region2 = \
Parallelepiped(u=Vector(lattice.ortho_matrix[:, 0].A.flatten()),
v=Vector(lattice.ortho_matrix[:, 1].A.flatten()),
w=Vector(lattice.ortho_matrix[:, 2].A.flatten()))
xlo, ylo, zlo = lattice_region2.o
print('xy={}, xz={}, yz={}'.format(xy, xz, yz))
print('lx={}, ly={}, lz={}'.format(lx, ly, lz))
print('xlo={}, ylo={}, zlo={}'.format(xlo, ylo, zlo))
xlo_bound = xlo + min(0.0, xy, xz, xy + xz)
xhi_bound = xlo + lx + max(0.0, xy, xz, xy + xz)
ylo_bound = ylo + min(0.0, yz)
yhi_bound = ylo + ly + max(0.0, yz)
zlo_bound = zlo
zhi_bound = zlo + lz
bounding_box2.pmin = [xlo_bound, ylo_bound, zlo_bound]
bounding_box2.pmax = [xhi_bound, yhi_bound, zhi_bound]
print(bounding_box1)
print(bounding_box2)
[bounding_box.rotate(transform_matrix=lattice.orientation_matrix)
for bounding_box in (bounding_box1, bounding_box2)]
[bounding_box.translate(Vector(Point(graphene.centroid) -
bounding_box.centroid))
for bounding_box in (bounding_box1, bounding_box2)]
[assert_true(bounding_box.pmin <= bounding_box.pmax) for bounding_box
in (bounding_box1, bounding_box2)]
assert_equal(bounding_box1, bounding_box2)
print(bounding_box1)
print(bounding_box2)
if __name__ == '__main__':
nose.runmodule()
| 39.384058 | 73 | 0.663661 |
6d71245abad56aa70c5eab855c9d89b7180222d4 | 4,497 | py | Python | creational/prototype.py | prateeksan/python-design-patterns | f9c6fc3501a5a2c8467980aaf1ba7a9839bf07cf | [
"MIT"
] | 61 | 2017-06-07T22:48:38.000Z | 2022-02-25T01:06:37.000Z | creational/prototype.py | smohant/python-design-patterns | f9c6fc3501a5a2c8467980aaf1ba7a9839bf07cf | [
"MIT"
] | 2 | 2017-06-25T21:08:58.000Z | 2021-04-20T00:41:49.000Z | creational/prototype.py | smohant/python-design-patterns | f9c6fc3501a5a2c8467980aaf1ba7a9839bf07cf | [
"MIT"
] | 12 | 2017-06-08T15:34:52.000Z | 2022-02-24T09:01:05.000Z | """ The Prototye Pattern
Notes:
In the prototype pattern, rather than creating new instances of an object, only
one instance (the breeder) of a class (the prototype) is created and deep-copied
whenever the need arises. This pattern is particularly useful when:
+ The cost of initialization is high.
+ Many objects of the same type are needed but some (or all) properties that are
costly (time/space/bandwidth) to set remain the same across all objects.
The following example considers the use case of a prototype for student report
cards. Let us assume that all report cards for any given year have some universal
data that might take a long time to query and post-process. Let us also assume
that each report card belongs to a student and contains some data that is unique
to the student and can be populated after the universal data has been populated.
This is a good use case for the pattern since we only need one instance of the
ReportCardPrototype per year (the breeder for that year). To create report
card(s) for a particular student, we only need to make one clone of each breeder
associated to the year that the student was enrolled.
"""
import copy
class ReportCardPrototype:
"""The prototype class for report cards."""
def __init__(self, year):
"""Only one instance per year should be constructed."""
self.year = year
self.report = None
self.student_id = None
self._build_general_report()
def set_student(self, s_id):
"""Updates the report data with student specific data. Only clones of
the breeders should call this.
"""
self.student_id = s_id
self._populate_student_data()
def _build_general_report(self):
"""We assume that this method is very costly. The point of the pattern
to call it as rarely as possible.
"""
pass
def _populate_student_data(self):
"""This populates the student data and should only be called by
set_student. All costly computations and queries per clone should be
contained here.
"""
pass
def clone(self):
"""Any clone of the breeders should be made by calling this method."""
# The copy created is a brand new object with its own id and properties.
return copy.deepcopy(self)
def __repr__(self):
return "<ReportCard: student_id: {}, year: {}>".format(self.student_id,
self.year)
class ReportFactory():
"""This is not strictly a part of the prototype pattern but complements it
very well. All instances of the prototype (breeders) are contained in this
class. They can then be interfaced with using the `make()` method. It may
be implemented differently (as a singleton or with a caching model).
"""
_report_breeders = {}
def __init__(self):
"""Further implementation may be added here as per your use case."""
pass
def make(self, s_id, year):
"""Similar to any factory, this method adds a layer of abstraction to
the object creation. In this case, it ensures that the right breeder is
cloned.
"""
if year not in ReportFactory._report_breeders:
ReportFactory._report_breeders[year] = ReportCardPrototype(year)
clone = ReportFactory._report_breeders[year].clone()
clone.set_student(s_id)
return clone
class Student():
"""This class is not pertinent to the prototype pattern but it adds some
elegenace to this example. Simply by instantiating a Student, all its
report cards for all years are automatically generated.
"""
def __init__(self, s_id, years, report_factory):
self.id = s_id
self.years = years
self.report_cards = []
self.report_factory = report_factory
self._get_report_cards()
def _get_report_cards(self):
for year in self.years:
report_card = self.report_factory.make(self.id, year)
self.report_cards.append(report_card)
if __name__ == "__main__":
# The factory acts as an interface to prototype breeders.
factory = ReportFactory()
# Constructing a student automatically clones all breeders for his/her years.
student_1234 = Student(s_id=1234, years=[2015, 2016], report_factory=factory)
student_4321 = Student(s_id=4321, years=[2014, 2015], report_factory=factory)
print(student_1234.report_cards)
print(student_4321.report_cards)
| 35.409449 | 81 | 0.694241 |
d1617d5baab296e71473be3f044cc115998be9b0 | 15,159 | py | Python | ircodec/command.py | kentwait/ircodec | 9558e33475b964b10cedf592b7a4cf2b65456c5a | [
"MIT"
] | 9 | 2019-12-06T06:58:55.000Z | 2021-06-10T21:06:22.000Z | ircodec/command.py | kentwait/ircodec | 9558e33475b964b10cedf592b7a4cf2b65456c5a | [
"MIT"
] | 1 | 2020-05-20T09:08:26.000Z | 2020-05-21T12:47:55.000Z | ircodec/command.py | kentwait/ircodec | 9558e33475b964b10cedf592b7a4cf2b65456c5a | [
"MIT"
] | 1 | 2021-03-07T16:49:07.000Z | 2021-03-07T16:49:07.000Z | """
IR command class and functions
"""
import time
import json
import pigpio
from ircodec.signal import Pulse, Gap, PulseClass, GapClass
from ircodec.signal import group_signals
from ircodec.utils import carrier_square_wave_generator
class Command(object):
"""Represents an IR command
"""
def __init__(self, name, ir_signal_list, description=''):
self.name = name
self.signal_list = ir_signal_list
if ir_signal_list and isinstance(ir_signal_list[0], int):
self.signal_list = [Gap(s) if i & 1 else Pulse(s)
for i, s in enumerate(ir_signal_list)]
self.description = description
self.signal_class_list = None
def normalize(self, tolerance=0.1):
"""Classifies signals based on a tolerance and normalizes
the list of signals.
Parameters
----------
tolerance : float
"""
pulse_classes, gap_classes = parse_command(self.signal_list, tolerance=tolerance)
self.normalize_with(pulse_classes, gap_classes)
def normalize_with(self, pulse_classes, gap_classes):
"""Normalizes the list of IR pulses and gaps using a set of
reference pulse classes and gap classes.
Parameters
----------
pulse_classes : list of PulseClass
gap_classes : list of GapClass
"""
self.signal_list, self.signal_class_list = \
normalize_command(self.signal_list, pulse_classes, gap_classes, return_class=True)
def emit(self, emitter_gpio: int, freq=38.0, emit_gap=0.1):
"""Emits the IR command pulses and gaps to a connected
Raspberry Pi using the pigpio daemon.
Parameters
----------
emmitter_gpio : int
GPIO pin to output to
freq : float
Frequency in kHz
emit_gap : float
Gap in seconds
"""
# Create wave
pi = pigpio.pi()
pi.set_mode(emitter_gpio, pigpio.OUTPUT)
pi.wave_add_new()
signals = {}
gaps = {}
wave_list = [0] * len(self.signal_list)
emit_time = time.time()
for i, siglen in enumerate((s.length for s in self.signal_list)):
if i & 1: # Space
if siglen not in gaps:
pi.wave_add_generic([pigpio.pulse(0, 0, siglen)])
gaps[siglen] = pi.wave_create()
wave_list[i] = gaps[siglen]
else: # Mark
if siglen not in signals:
wf = carrier_square_wave_generator(emitter_gpio, freq, siglen)
pi.wave_add_generic(wf)
signals[siglen] = pi.wave_create()
wave_list[i] = signals[siglen]
delay = emit_time - time.time()
if delay > 0.0:
time.sleep(delay)
# Create wave chain
pi.wave_chain(wave_list)
while pi.wave_tx_busy():
time.sleep(0.002)
# emit_time = time.time() + emit_gap
# Remove signal waves
for signal in signals.values():
pi.wave_delete(signal)
# signals = {}
# Remove gap values
for gap in gaps.values():
pi.wave_delete(gap)
# gaps = {}
pi.stop()
time.sleep(emit_gap)
@classmethod
def from_json(cls, data):
if isinstance(data, str):
dct = json.loads(data)
elif isinstance(data, dict):
dct = data
cmd = cls.__new__(cls)
cmd.signal_list = [globals()[sig['type']].from_json(sig) for sig in dct['signal_list']]
cmd.name = dct['name']
cmd.description = dct['description']
cmd.signal_class_list = [globals()[sig_cls['type']].from_json(sig_cls) for sig_cls in dct['signal_class_list']]
return cmd
def to_json(self):
return json.dumps(self, default=lambda o: {**{'type': o.__class__.__name__}, **o.__dict__})
@classmethod
def receive(cls, command_id, receiver_gpio: int, description='', glitch=0.000100,
pre_duration=0.2, post_duration=0.015, length_threshold=10):
"""Receives IR command pulses and gaps from GPIO pin of a connected
Raspberry Pi using the pigpio daemon.
Parameters
----------
command_id : str
Name of the command
receiver_gpio : int
GPIO pin to read signals from
description : str
Short description for the IR command
glitch : float
Ignore edges shorter than this duration (seconds)
pre_duration : float
Expected number of seconds of silence before start of IR signals
post_duration : float
Expected number of seconds of silence after completion of IR signals
length_threshold : float
Reject detected IR command if it has less than this number of pulses
"""
# Convert values seconds to microsends
glitch = int(glitch * 1000 * 1000)
pre_duration = int(pre_duration * 1000 * 1000)
post_duration = int(post_duration * 1000 * 1000)
# Set initial values
fetching_code = False
ir_signal_list = []
in_code = False
last_tick = 0
# Define callback function
def callback(gpio, level, tick):
nonlocal fetching_code, ir_signal_list, in_code, last_tick
if level != pigpio.TIMEOUT:
edge = pigpio.tickDiff(last_tick, tick)
last_tick = tick
if fetching_code == True:
if (edge > pre_duration) and (not in_code): # Start of a code.
in_code = True
pi.set_watchdog(gpio, post_duration) # Start watchdog.
elif (edge > post_duration) and in_code: # End of a code.
in_code = False
pi.set_watchdog(gpio, 0) # Cancel watchdog.
# Finish
if len(ir_signal_list) > length_threshold:
fetching_code = False
else:
ir_signal_list = []
print("Received IR command is too short, please try again")
elif in_code:
ir_signal_list.append(edge)
else:
pi.set_watchdog(gpio, 0) # Cancel watchdog.
if in_code:
in_code = False
# Finish
if len(ir_signal_list) > length_threshold:
fetching_code = False
else:
ir_signal_list = []
print("Received IR command is too short, please try again")
# print(gpio, level, tick)
pi = pigpio.pi()
pi.set_mode(receiver_gpio, pigpio.INPUT) # IR RX connected to this GPIO.
print('Connected to pigpio')
pi.set_glitch_filter(receiver_gpio, glitch) # Ignore glitches.
# Assign a callback function
print('Detecting IR command...')
cb = pi.callback(receiver_gpio, pigpio.EITHER_EDGE, callback)
fetching_code = True
while fetching_code == True:
time.sleep(0.1)
print('Received.')
pi.set_glitch_filter(receiver_gpio, 0) # Cancel glitch filter.
pi.set_watchdog(receiver_gpio, 0) # Cancel watchdog.
pi.stop()
return cls(command_id, [Gap(s) if i & 1 else Pulse(s) for i, s in enumerate(ir_signal_list)],
description=description)
def __repr__(self):
return '{}(name={}, signal_list={})'.format(self.__class__.__name__, self.name, self.signal_list)
class CommandSet(object):
"""Represents a set of IR commands.
For example, a CommandSet can be used to represent
all the commands of a single remote control.
"""
def __init__(self, name, emitter_gpio=None, receiver_gpio=None, description=''):
"""Creates a blank CommandSet
Parameters
----------
emitter_gpio : int
GPIO pin to output to
receiver_gpio : int
GPIO pin to read from
description : str
Short description about the command set, usually describing the
device it controls.
"""
self.name = name
self.emitter_gpio = emitter_gpio
self.receiver_gpio = receiver_gpio
self.commands = dict()
self.description = description
def set_receiver_gpio(self, gpio_pin):
"""Sets the GPIO pin that is connected to the IR receiver
Parameters
----------
gpio_pin : int
"""
self.receiver_gpio = gpio_pin
def set_emitter_gpio(self, gpio_pin):
"""Sets the GPIO pin that is connected to the IR transmitter
Parameters
----------
gpio_pin : int
"""
self.emitter_gpio = gpio_pin
def add(self, command_id, description='', **kwargs):
"""Adds a new IR command to the command set.
This will initiate detection of IR signals from the IR receiver.
Parameters
----------
command_id : int or str
Unique but descriptive used to refer to the command
description : str
Short description about the IR command
kwargs
Keyword arguments used by Command.receive to set-up
receiving IR signals
"""
self.commands[command_id] = \
Command.receive(command_id, self.receiver_gpio, description=description, **kwargs)
self.commands[command_id].normalize()
def remove(self, command_id):
"""Removes an IR command from the command set
Parameters
----------
command_id : int or str
Key to retrieve the command
"""
del self.commands[command_id]
def emit(self, command_id, **kwargs):
"""Emit the IR command for the given command_id.
Parameters
----------
command_id : int or str
Key to retrieve the command
kwargs
Keyword arguments used by Command.emit to set-up
sending of IR signals
"""
self.commands[command_id].emit(self.emitter_gpio, **kwargs)
def send(self, command_id, **kwargs):
"""Send the IR command for the given command_id. Same as emit.
Parameters
----------
command_id : int or str
Key to retrieve the command
kwargs
Keyword arguments used by Command.emit to set-up
sending of IR signals
"""
self.emit(command_id, **kwargs)
@classmethod
def from_json(cls, data):
if isinstance(data, str):
dct = json.loads(data)
elif isinstance(data, dict):
dct = data
cmd_set = globals()[dct['type']].__new__(cls)
cmd_set.name = dct['name']
cmd_set.emitter_gpio = dct['emitter_gpio']
cmd_set.receiver_gpio = dct['receiver_gpio']
cmd_set.description = dct['description']
cmd_set.commands = {k: globals()[cmd['type']].from_json(cmd) for k, cmd in dct['commands'].items()}
return cmd_set
def to_json(self):
return json.dumps(self, default=lambda o: {**{'type': o.__class__.__name__}, **o.__dict__})
@classmethod
def load(cls, path, format='json'):
if format.lower() == 'json':
with open(path, 'r') as reader:
return cls.from_json(json.load(reader))
else:
raise NotImplementedError('selected format ({}) is not available'.format(format))
def save_as(self, path, format='json'):
if format.lower() == 'json':
with open(path, 'w') as writer:
print(self.to_json(), file=writer)
else:
raise NotImplementedError('selected format ({}) is not available'.format(format))
def __repr__(self):
return '{}(name={}, emitter={}, receiver={}, description="{}")\n{}'.format(
self.__class__.__name__, self.name, self.emitter_gpio, self.receiver_gpio,
self.description, repr(self.commands)
)
def parse_command(ir_signal_list, tolerance=0.1):
"""Parses the set of IR pulses and gaps received from
a single command into pulse and gap classes using a
given tolerance value.
Parameters
----------
ir_signal_list : list of Signal
List of pulses and gaps read by the IR receiver for a
single command.
tolerance : float
Relative difference in duration of a signal compared to
the next longer signal when pulses or gaps are arranged
in increasing order. If the next signal falls outside the
tolerance value, the next signal becomes the lower bound for
a new signal class.
Returns
-------
list of PulseClass, list of GapClass
"""
# Separate interleaved pulses and gaps
grouped_pulses = group_signals(ir_signal_list[::2])
grouped_gaps = group_signals(ir_signal_list[1::2])
# Classify into types
pulse_classes = [PulseClass(pulses) for pulses in grouped_pulses]
gap_classes = [GapClass(gaps) for gaps in grouped_gaps]
return pulse_classes, gap_classes
def normalize_command(ir_signal_list, pulse_classes, gap_classes, return_class=False):
"""Creates a normalized series of IR pulses and gaps
for a particular IR command.
Parameters
----------
ir_signal_list : list of Signal
List of pulses and gaps read by the IR receiver for a
single command.
pulse_classes : list of PulseClass
PulseClass objects to compare to to create a normalized command
from an unormalized signal list.
gap_classes : list of GapClass
GapClass objects to compare to to create a normalized command
from an unormalized signal list.
Returns
-------
list of Signal
"""
signal_class_list = []
for pulse, gap in zip(ir_signal_list[:-1:2], ir_signal_list[1::2]):
pulse_type, gap_type = None, None
for ptype in pulse_classes:
if pulse in ptype:
pulse_type = ptype
for gtype in gap_classes:
if gap in gtype:
gap_type = gtype
if pulse_type == None:
raise Exception('Could not normalize pulse: {}'.format(pulse))
if gap_type == None:
raise Exception('Could not normalize gap: {}'.format(gap))
signal_class_list.append(pulse_type)
signal_class_list.append(gap_type)
# Last pulse
pulse = ir_signal_list[-1]
for ptype in pulse_classes:
if pulse in ptype:
pulse_type = ptype
if pulse_type == None:
raise Exception('Could not normalize pulse: {}'.format(pulse))
signal_class_list.append(pulse_type)
normalized_signal_list = [s.normalized() for s in signal_class_list]
if return_class:
return normalized_signal_list, signal_class_list
return normalized_signal_list
| 34.768349 | 120 | 0.588825 |
267242d3cae02fedebcdd5ff2b7c45a5f3ac8913 | 1,198 | py | Python | Triggering-Lambda-from-SQS/send_message.py | Kunal-Karnik/content-lambda-boto3 | 0bde85c1bc036c8000505ff9969be5f7666a12c2 | [
"MIT"
] | null | null | null | Triggering-Lambda-from-SQS/send_message.py | Kunal-Karnik/content-lambda-boto3 | 0bde85c1bc036c8000505ff9969be5f7666a12c2 | [
"MIT"
] | null | null | null | Triggering-Lambda-from-SQS/send_message.py | Kunal-Karnik/content-lambda-boto3 | 0bde85c1bc036c8000505ff9969be5f7666a12c2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
import argparse
import logging
import sys
from time import sleep
import boto3
from faker import Faker
parser = argparse.ArgumentParser()
parser.add_argument("--queue-name", "-q", required=True,
help="SQS queue name")
parser.add_argument("--interval", "-i", required=True,
help="timer interval", type=float)
parser.add_argument("--message", "-m", help="message to send")
parser.add_argument("--log", "-l", default="INFO",
help="logging level")
args = parser.parse_args()
if args.log:
logging.basicConfig(
format='[%(levelname)s] %(message)s', level=args.log)
else:
parser.print_help(sys.stderr)
sqs = boto3.client('sqs')
response = sqs.get_queue_url(QueueName=args.queue_name)
queue_url = response['QueueUrl']
logging.info(queue_url)
while True:
message = args.message
if not args.message:
fake = Faker()
message = fake.text()
logging.info('Sending message: ' + message)
response = sqs.send_message(
QueueUrl=queue_url, MessageBody=message)
logging.info('MessageId: ' + response['MessageId'])
sleep(args.interval)
| 24.44898 | 62 | 0.656093 |
45b04d72707911e508ba26b2d4d0baf7005311df | 348 | py | Python | stu_manage/views.py | DreamingNight/Student-Manage-System | 62245500e080318328b822a21b01db5b77b56685 | [
"MIT"
] | 1 | 2020-09-02T07:33:09.000Z | 2020-09-02T07:33:09.000Z | stu_manage/views.py | DreamingNight/Student-Manalyze-System | 62245500e080318328b822a21b01db5b77b56685 | [
"MIT"
] | null | null | null | stu_manage/views.py | DreamingNight/Student-Manalyze-System | 62245500e080318328b822a21b01db5b77b56685 | [
"MIT"
] | 1 | 2020-09-02T07:22:20.000Z | 2020-09-02T07:22:20.000Z | from django.shortcuts import render
# Create your views here.
from .models import Student
def index(request):
# 主页
return render(request, 'stu_manage/index.html')
def students(request):
# 显示学生名单
stu = Student.objects.order_by('stu_id')
context = {'stu': stu}
return render(request, 'stu_manage/students.html', context)
| 20.470588 | 63 | 0.701149 |
f8e98854a12123438e63e6fd87a44801cfc5c9a1 | 6,475 | py | Python | py-polars/polars/datatypes.py | LemonPy29/polars | 381fe0205cab64c88b985bb0fe50b566f7bf7c82 | [
"MIT"
] | 1 | 2022-01-13T15:49:06.000Z | 2022-01-13T15:49:06.000Z | py-polars/polars/datatypes.py | bfeif/polars | 062172166e4b9e05a51fa29208990ee4ddb21272 | [
"MIT"
] | null | null | null | py-polars/polars/datatypes.py | bfeif/polars | 062172166e4b9e05a51fa29208990ee4ddb21272 | [
"MIT"
] | 1 | 2021-10-12T12:19:37.000Z | 2021-10-12T12:19:37.000Z | import ctypes
import typing as tp
from typing import Any, Callable, Dict, Sequence, Type
import numpy as np
import pyarrow as pa
from _ctypes import _SimpleCData
try:
from polars.polars import PySeries
_DOCUMENTING = False
except ImportError:
_DOCUMENTING = True
__all__ = [
"DataType",
"Int8",
"Int16",
"Int32",
"Int64",
"UInt8",
"UInt16",
"UInt32",
"UInt64",
"Float32",
"Float64",
"Boolean",
"Utf8",
"List",
"Date32",
"Date64",
"Object",
"Categorical",
"DTYPES",
"DTYPE_TO_FFINAME",
"date_like_to_physical",
"dtype_to_ctype",
"pytype_to_polars_type",
]
class DataType:
pass
class Int8(DataType):
pass
class Int16(DataType):
pass
class Int32(DataType):
pass
class Int64(DataType):
pass
class UInt8(DataType):
pass
class UInt16(DataType):
pass
class UInt32(DataType):
pass
class UInt64(DataType):
pass
class Float32(DataType):
pass
class Float64(DataType):
pass
class Boolean(DataType):
pass
class Utf8(DataType):
pass
class List(DataType):
pass
class Date32(DataType):
pass
class Date64(DataType):
pass
class Time32Millisecond(DataType):
pass
class Time32Second(DataType):
pass
class Time64Nanosecond(DataType):
pass
class Time64Microsecond(DataType):
pass
class TimestampNanosecond(DataType):
pass
class TimestampMicrosecond(DataType):
pass
class TimestampMillisecond(DataType):
pass
class TimestampSecond(DataType):
pass
class Object(DataType):
pass
class Categorical(DataType):
pass
# Don't change the order of these!
DTYPES: tp.List[Type[DataType]] = [
Int8,
Int16,
Int32,
Int64,
UInt8,
UInt16,
UInt32,
UInt64,
Float32,
Float64,
Boolean,
Utf8,
List,
Date32,
Date64,
Time64Nanosecond,
Object,
Categorical,
]
DTYPE_TO_FFINAME: Dict[Type[DataType], str] = {
Int8: "i8",
Int16: "i16",
Int32: "i32",
Int64: "i64",
UInt8: "u8",
UInt16: "u16",
UInt32: "u32",
UInt64: "u64",
Float32: "f32",
Float64: "f64",
Boolean: "bool",
Utf8: "str",
List: "list",
Date32: "date32",
Date64: "date64",
Time64Nanosecond: "time64_nanosecond",
Object: "object",
Categorical: "categorical",
}
def date_like_to_physical(dtype: Type[DataType]) -> Type[DataType]:
# TODO: add more
if dtype == Date32:
return Int32
if dtype == Date64:
return Int64
return dtype
def dtype_to_ctype(dtype: Type[DataType]) -> Type[_SimpleCData]: # noqa: F821
ptr_type: Type[_SimpleCData]
if dtype == UInt8:
ptr_type = ctypes.c_uint8
elif dtype == UInt16:
ptr_type = ctypes.c_uint16
elif dtype == UInt32:
ptr_type = ctypes.c_uint
elif dtype == UInt64:
ptr_type = ctypes.c_ulong
elif dtype == Int8:
ptr_type = ctypes.c_int8
elif dtype == Int16:
ptr_type = ctypes.c_int16
elif dtype == Int32:
ptr_type = ctypes.c_int
elif dtype == Int64:
ptr_type = ctypes.c_long
elif dtype == Float32:
ptr_type = ctypes.c_float
elif dtype == Float64:
ptr_type = ctypes.c_double
elif dtype == Date32:
ptr_type = ctypes.c_int
elif dtype == Date64:
ptr_type = ctypes.c_long
else:
raise NotImplementedError
return ptr_type
def pytype_to_polars_type(data_type: Type[Any]) -> Type[DataType]:
polars_type: Type[DataType]
if data_type == int:
polars_type = Int64
elif data_type == str:
polars_type = Utf8
elif data_type == float:
polars_type = Float64
else:
polars_type = data_type
return polars_type
if not _DOCUMENTING:
_POLARS_TYPE_TO_CONSTRUCTOR = {
Float32: PySeries.new_opt_f32,
Float64: PySeries.new_opt_f64,
Int8: PySeries.new_opt_i8,
Int16: PySeries.new_opt_i16,
Int32: PySeries.new_opt_i32,
Int64: PySeries.new_opt_i64,
UInt8: PySeries.new_opt_u8,
UInt16: PySeries.new_opt_u16,
UInt32: PySeries.new_opt_u32,
UInt64: PySeries.new_opt_u64,
Date32: PySeries.new_opt_i32,
Date64: PySeries.new_opt_i32,
Boolean: PySeries.new_opt_bool,
Utf8: PySeries.new_str,
Object: PySeries.new_object,
}
def polars_type_to_constructor(
dtype: Type[DataType],
) -> Callable[[str, Sequence[Any], bool], "PySeries"]:
"""
Get the right PySeries constructor for the given Polars dtype.
"""
try:
return _POLARS_TYPE_TO_CONSTRUCTOR[dtype]
except KeyError:
raise ValueError(f"Cannot construct PySeries for type {dtype}.")
if not _DOCUMENTING:
_NUMPY_TYPE_TO_CONSTRUCTOR = {
np.float32: PySeries.new_f32,
np.float64: PySeries.new_f64,
np.int8: PySeries.new_i8,
np.int16: PySeries.new_i16,
np.int32: PySeries.new_i32,
np.int64: PySeries.new_i64,
np.uint8: PySeries.new_u8,
np.uint16: PySeries.new_u16,
np.uint32: PySeries.new_u32,
np.uint64: PySeries.new_u64,
np.str_: PySeries.new_str,
np.bool_: PySeries.new_bool,
}
def numpy_type_to_constructor(dtype: Type[np.dtype]) -> Callable[..., "PySeries"]:
"""
Get the right PySeries constructor for the given Polars dtype.
"""
try:
return _NUMPY_TYPE_TO_CONSTRUCTOR[dtype]
except KeyError:
return PySeries.new_object
if not _DOCUMENTING:
_PY_TYPE_TO_CONSTRUCTOR = {
float: PySeries.new_opt_f64,
int: PySeries.new_opt_i64,
str: PySeries.new_str,
bool: PySeries.new_opt_bool,
}
def py_type_to_constructor(dtype: Type[Any]) -> Callable[..., "PySeries"]:
"""
Get the right PySeries constructor for the given Python dtype.
"""
try:
return _PY_TYPE_TO_CONSTRUCTOR[dtype]
except KeyError:
return PySeries.new_object
if not _DOCUMENTING:
_PY_TYPE_TO_ARROW_TYPE = {
float: pa.float64(),
int: pa.int64(),
str: pa.large_utf8(),
bool: pa.bool_(),
}
def py_type_to_arrow_type(dtype: Type[Any]) -> pa.lib.DataType:
"""
Convert a Python dtype to an Arrow dtype.
"""
try:
return _PY_TYPE_TO_ARROW_TYPE[dtype]
except KeyError:
raise ValueError(f"Cannot parse dtype {dtype} into arrow dtype.")
| 19.156805 | 82 | 0.632278 |
4ac73c76dc0d57811e09f4d041d3eafa9c529b7d | 9,288 | py | Python | tools/deploy/export_model.py | mmabrouk/detectron2 | 158e395acdb8ca6ed6d488b43475f9ef9d200405 | [
"Apache-2.0"
] | 99 | 2022-03-22T23:54:57.000Z | 2022-03-31T17:05:37.000Z | tools/deploy/export_model.py | mmabrouk/detectron2 | 158e395acdb8ca6ed6d488b43475f9ef9d200405 | [
"Apache-2.0"
] | 3 | 2022-03-23T03:53:16.000Z | 2022-03-26T11:00:55.000Z | tools/deploy/export_model.py | mmabrouk/detectron2 | 158e395acdb8ca6ed6d488b43475f9ef9d200405 | [
"Apache-2.0"
] | 6 | 2022-03-25T02:26:06.000Z | 2022-03-31T02:17:53.000Z | #!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
import argparse
import os
from typing import Dict, List, Tuple
import torch
from torch import Tensor, nn
import detectron2.data.transforms as T
from detectron2.checkpoint import DetectionCheckpointer
from detectron2.config import get_cfg
from detectron2.data import build_detection_test_loader, detection_utils
from detectron2.evaluation import COCOEvaluator, inference_on_dataset, print_csv_format
from detectron2.export import (
Caffe2Tracer,
TracingAdapter,
add_export_config,
dump_torchscript_IR,
scripting_with_instances,
)
from detectron2.modeling import GeneralizedRCNN, RetinaNet, build_model
from detectron2.modeling.postprocessing import detector_postprocess
from detectron2.projects.point_rend import add_pointrend_config
from detectron2.structures import Boxes
from detectron2.utils.env import TORCH_VERSION
from detectron2.utils.file_io import PathManager
from detectron2.utils.logger import setup_logger
def setup_cfg(args):
cfg = get_cfg()
# cuda context is initialized before creating dataloader, so we don't fork anymore
cfg.DATALOADER.NUM_WORKERS = 0
cfg = add_export_config(cfg)
add_pointrend_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
return cfg
def export_caffe2_tracing(cfg, torch_model, inputs):
tracer = Caffe2Tracer(cfg, torch_model, inputs)
if args.format == "caffe2":
caffe2_model = tracer.export_caffe2()
caffe2_model.save_protobuf(args.output)
# draw the caffe2 graph
caffe2_model.save_graph(os.path.join(args.output, "model.svg"), inputs=inputs)
return caffe2_model
elif args.format == "onnx":
import onnx
onnx_model = tracer.export_onnx()
onnx.save(onnx_model, os.path.join(args.output, "model.onnx"))
elif args.format == "torchscript":
ts_model = tracer.export_torchscript()
with PathManager.open(os.path.join(args.output, "model.ts"), "wb") as f:
torch.jit.save(ts_model, f)
dump_torchscript_IR(ts_model, args.output)
# experimental. API not yet final
def export_scripting(torch_model):
assert TORCH_VERSION >= (1, 8)
fields = {
"proposal_boxes": Boxes,
"objectness_logits": Tensor,
"pred_boxes": Boxes,
"scores": Tensor,
"pred_classes": Tensor,
"pred_masks": Tensor,
"pred_keypoints": torch.Tensor,
"pred_keypoint_heatmaps": torch.Tensor,
}
assert args.format == "torchscript", "Scripting only supports torchscript format."
class ScriptableAdapterBase(nn.Module):
# Use this adapter to workaround https://github.com/pytorch/pytorch/issues/46944
# by not retuning instances but dicts. Otherwise the exported model is not deployable
def __init__(self):
super().__init__()
self.model = torch_model
self.eval()
if isinstance(torch_model, GeneralizedRCNN):
class ScriptableAdapter(ScriptableAdapterBase):
def forward(self, inputs: Tuple[Dict[str, torch.Tensor]]) -> List[Dict[str, Tensor]]:
instances = self.model.inference(inputs, do_postprocess=False)
return [i.get_fields() for i in instances]
else:
class ScriptableAdapter(ScriptableAdapterBase):
def forward(self, inputs: Tuple[Dict[str, torch.Tensor]]) -> List[Dict[str, Tensor]]:
instances = self.model(inputs)
return [i.get_fields() for i in instances]
ts_model = scripting_with_instances(ScriptableAdapter(), fields)
with PathManager.open(os.path.join(args.output, "model.ts"), "wb") as f:
torch.jit.save(ts_model, f)
dump_torchscript_IR(ts_model, args.output)
# TODO inference in Python now missing postprocessing glue code
return None
# experimental. API not yet final
def export_tracing(torch_model, inputs):
assert TORCH_VERSION >= (1, 8)
image = inputs[0]["image"]
inputs = [{"image": image}] # remove other unused keys
if isinstance(torch_model, GeneralizedRCNN):
def inference(model, inputs):
# use do_postprocess=False so it returns ROI mask
inst = model.inference(inputs, do_postprocess=False)[0]
return [{"instances": inst}]
else:
inference = None # assume that we just call the model directly
traceable_model = TracingAdapter(torch_model, inputs, inference)
if args.format == "torchscript":
ts_model = torch.jit.trace(traceable_model, (image,))
with PathManager.open(os.path.join(args.output, "model.ts"), "wb") as f:
torch.jit.save(ts_model, f)
dump_torchscript_IR(ts_model, args.output)
elif args.format == "onnx":
with PathManager.open(os.path.join(args.output, "model.onnx"), "wb") as f:
torch.onnx.export(traceable_model, (image,), f, opset_version=11)
logger.info("Inputs schema: " + str(traceable_model.inputs_schema))
logger.info("Outputs schema: " + str(traceable_model.outputs_schema))
if args.format != "torchscript":
return None
if not isinstance(torch_model, (GeneralizedRCNN, RetinaNet)):
return None
def eval_wrapper(inputs):
"""
The exported model does not contain the final resize step, which is typically
unused in deployment but needed for evaluation. We add it manually here.
"""
input = inputs[0]
instances = traceable_model.outputs_schema(ts_model(input["image"]))[0]["instances"]
postprocessed = detector_postprocess(instances, input["height"], input["width"])
return [{"instances": postprocessed}]
return eval_wrapper
def get_sample_inputs(args):
if args.sample_image is None:
# get a first batch from dataset
data_loader = build_detection_test_loader(cfg, cfg.DATASETS.TEST[0])
first_batch = next(iter(data_loader))
return first_batch
else:
# get a sample data
original_image = detection_utils.read_image(args.sample_image, format=cfg.INPUT.FORMAT)
# Do same preprocessing as DefaultPredictor
aug = T.ResizeShortestEdge(
[cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST
)
height, width = original_image.shape[:2]
image = aug.get_transform(original_image).apply_image(original_image)
image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1))
inputs = {"image": image, "height": height, "width": width}
# Sample ready
sample_inputs = [inputs]
return sample_inputs
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Export a model for deployment.")
parser.add_argument(
"--format",
choices=["caffe2", "onnx", "torchscript"],
help="output format",
default="caffe2",
)
parser.add_argument(
"--export-method",
choices=["caffe2_tracing", "tracing", "scripting"],
help="Method to export models",
default="caffe2_tracing",
)
parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file")
parser.add_argument("--sample-image", default=None, type=str, help="sample image for input")
parser.add_argument("--run-eval", action="store_true")
parser.add_argument("--output", help="output directory for the converted model")
parser.add_argument(
"opts",
help="Modify config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
logger = setup_logger()
logger.info("Command line arguments: " + str(args))
PathManager.mkdirs(args.output)
# Disable respecialization on new shapes. Otherwise --run-eval will be slow
torch._C._jit_set_bailout_depth(1)
cfg = setup_cfg(args)
# create a torch model
torch_model = build_model(cfg)
DetectionCheckpointer(torch_model).resume_or_load(cfg.MODEL.WEIGHTS)
torch_model.eval()
# get sample data
sample_inputs = get_sample_inputs(args)
# convert and save model
if args.export_method == "caffe2_tracing":
exported_model = export_caffe2_tracing(cfg, torch_model, sample_inputs)
elif args.export_method == "scripting":
exported_model = export_scripting(torch_model)
elif args.export_method == "tracing":
exported_model = export_tracing(torch_model, sample_inputs)
# run evaluation with the converted model
if args.run_eval:
assert exported_model is not None, (
"Python inference is not yet implemented for "
f"export_method={args.export_method}, format={args.format}."
)
logger.info("Running evaluation ... this takes a long time if you export to CPU.")
dataset = cfg.DATASETS.TEST[0]
data_loader = build_detection_test_loader(cfg, dataset)
# NOTE: hard-coded evaluator. change to the evaluator for your dataset
evaluator = COCOEvaluator(dataset, output_dir=args.output)
metrics = inference_on_dataset(exported_model, data_loader, evaluator)
print_csv_format(metrics)
| 38.539419 | 97 | 0.685078 |
ed7fe4bf9410e742a417aa55671e662fdb02ecdb | 8,427 | py | Python | StockCluster.py | ruthgoon/StockRanker | bd5be3ce0964296684bdd37ed768898f0283b794 | [
"MIT"
] | null | null | null | StockCluster.py | ruthgoon/StockRanker | bd5be3ce0964296684bdd37ed768898f0283b794 | [
"MIT"
] | null | null | null | StockCluster.py | ruthgoon/StockRanker | bd5be3ce0964296684bdd37ed768898f0283b794 | [
"MIT"
] | 1 | 2021-09-28T23:03:10.000Z | 2021-09-28T23:03:10.000Z | from numpy.lib.arraysetops import isin
from numpy.lib.index_tricks import nd_grid
from sklearn.cluster import DBSCAN
from sklearn.neighbors import NearestNeighbors
from kneebow.rotor import Rotor
import numpy as np
import datetime
class StockCluster():
def __init__(self, cluster_input=False):
"""
The ClusterModel class is a DBSCAN model that creates a graph of each stock for each week and clusters
stocks based on their feature vectors, grouping like stocks together.
Parameters:
- cluster_input (list|False) :: If a list is passed it will autoload the clusterer
"""
self.input_data = cluster_input
self.labels = False
def cluster(self, input_val, labels, week_start, week_end, eps=False, min_samples=False):
"""
Uses DBSCAN to create the cluster
Parameters:
- eps (bool|float) :: The Epsilon value for DBSCAN. If False it will be set automatically
- min_samples (bool|float) :: The number of samples to produce. If False, it will be set automatically
Returns:
- clusters (list[dict]) :: Returns a cluster object for each week given the shape of
the input data. A cluster object is a dictionary with the
following keys:
[
week_start (str),
week_end (str),
num_cluster (int),
num_noise (int),
stocks (list)
]
The "stocks" key is a list of stock dicts with the following
keys:
[
token (str),
data (numpy.ndarray),
cluster (int)
]
Where the token is the ticker token for that stock, data is the
data vector for that given data point and the cluster is the ID
of the cluster to which the stock belongs to (-1 for noise).
"""
if not isinstance(input_val, np.ndarray):
raise Exception("Input data not set")
if not min_samples:
min_samples = 4
if eps and type(eps) == float or type(eps) == int:
eps = [eps for _ in range(len(input_val))]
if type(week_start) != str or type(week_end) != str:
raise Exception("Week start/week end must be str")
# generate the dbscan clusters for each week in the input data
clusters = []
current_week = week_start
for i, week in enumerate(input_val):
weekly_labels = labels[i]
week = week[:, ~np.isnan(week).any(axis=0)]
# week should be an np array of shape (n_stocks, n_features)
if not eps:
epsilon = self.optimize_eps(week)
else:
epsilon = eps[i]
# dbscan
model = DBSCAN(eps=epsilon, min_samples=min_samples).fit(week)
num_clusters = len(set(model.labels_)) - \
(1 if -1 in model.labels_ else 0)
weekly_end_date = self._add_days_to_string(current_week)
weekly_cluster = {
"week_start": current_week,
"week_end": weekly_end_date,
"eps": epsilon,
"num_samples": min_samples,
"num_clusters": num_clusters,
"num_noise": sum([1 if i == -1 else 0 for i in model.labels_]),
"stocks": []
}
current_week = weekly_end_date
for i, label in enumerate(model.labels_):
weekly_vector = week[i]
weekly_cluster['stocks'].append({
"token": weekly_labels[i],
"data": weekly_vector,
"cluster": label
})
clusters.append(weekly_cluster)
return clusters
def optimize_eps(self, input_data=False):
"""
This function attempts to optimize the epsilon value for DBSCAN. Epsilon is the distance that one point
must be to another for them to be considered neighbours. Optimizing epsilon involves finding the distance
of any 2 neighbours, ordering in ascending order of distance, and finding the point of maximum curvature.
Returns:
- optimized_eps (list[float]) :: A list of optimized epsilon value
"""
if type(input_data) == bool:
if not input_data:
input_data = self.input_data
else:
raise Exception("Cannot accept true as input_data")
elif type(input_data) != np.ndarray:
raise Exception("Input data must be numpy array")
try:
rotor = Rotor()
rotor.fit_rotate(input_data)
idx = rotor.get_elbow_index()
return input_data[idx]
except Exception as e:
print(e)
return 50
def optimize_num_samples(self, total_num_entities):
return int(0.0133333333 * int(total_num_entities))
def _format_samples(self, norm_vals=False):
"""
Formats the self.input dictionary to be a numpy array of size (n_weeks, n_stocks, n_features),
returns it along with a list of strings containing the ticker codes. Also sets self.input_data
and self.labels to be the output when run automatically
Returns:
- labels, features (np.array, np.array) :: the label and feature np arrays
"""
if not self.input_data:
raise Exception("Input required to be loaded")
weekly_vectors = []
weekly_labels = []
for week in self.input_data:
week_keys = list(week.keys())
ticker_labels = []
num_features = len(week[week_keys[0]])
vect = np.zeros((len(week_keys), 33))
for i, ticker in enumerate(week_keys):
components = week[ticker]
for k, comp in enumerate(components):
try:
vect[i][k] = comp
except:
continue
ticker_labels.append(ticker)
weekly_vectors.append(vect)
weekly_labels.append(ticker_labels)
if norm_vals:
unorm = weekly_vectors
weekly_vectors = []
for week in unorm:
normed = week / np.linalg.norm(week)
weekly_vectors.append(normed)
week_vector = np.array(weekly_vectors)
self.input_data = week_vector
self.labels = weekly_labels
return week_vector, weekly_labels
def _check_date_type(self, date, date_format):
"""
Checks to see if a string obliges by a datetime format. Type safe as well
Parameters:
- date (str) :: A string representation of a datetime object
- date_format (str) :: The format to check against
"""
if type(date) != str:
date = str(date)
try:
test_date_object = datetime.datetime.strptime(date, date_format)
return test_date_object
except:
return False
def _add_days_to_string(self, input_string, n_days=7):
"""
Takes a date string formatted in the form "%Y-%m-%d" and adds n_days to it.
Returns a date string in the same form
"""
date_obj = self._check_date_type(input_string, "%Y-%m-%d")
if not date_obj:
raise Exception("Date string improperly formatted")
# the error above should literally never happen
date_obj = date_obj + datetime.timedelta(days=n_days)
return datetime.datetime.strftime(date_obj, "%Y-%m-%d")
def set_input(self, input_data):
if type(input_data) != list:
raise Exception("Input data must be in list form")
self.input_data = input_data
| 38.655963 | 114 | 0.540762 |
949dee42503a9e1799008b467c997a46b85268d6 | 345 | py | Python | dev/Gems/Blast/houdini/python2.7libs/blastExport/lib/fbxSdk/__init__.py | brianherrera/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 1,738 | 2017-09-21T10:59:12.000Z | 2022-03-31T21:05:46.000Z | dev/Gems/Blast/houdini/python2.7libs/blastExport/lib/fbxSdk/__init__.py | ArchitectureStudios/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 427 | 2017-09-29T22:54:36.000Z | 2022-02-15T19:26:50.000Z | dev/Gems/Blast/houdini/python2.7libs/blastExport/lib/fbxSdk/__init__.py | ArchitectureStudios/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 671 | 2017-09-21T08:04:01.000Z | 2022-03-29T14:30:07.000Z | import sys
import os
print('Fbx imported from application %s using the %s interpreter' % (sys.executable, sys.version))
__bitDepth = 'win64'
__pythonVersion = sys.winver.replace('.', '')
__thisDirectory = os.path.dirname(__file__).replace('\\', '/')
__pydPath = '%s/sdk_2018_%s' % (__thisDirectory, __pythonVersion)
sys.path.append(__pydPath)
| 31.363636 | 98 | 0.733333 |
62b2295e26a7d3b392cb4d1a89e5f4549b284178 | 2,107 | py | Python | pi/rc_control.py | w84264423/raspi-driving-car | a291d33a68315921d589de3b5c502040118faf2e | [
"MIT"
] | 44 | 2017-04-04T13:14:57.000Z | 2021-12-03T16:36:00.000Z | pi/rc_control.py | w84264423/raspi-driving-car | a291d33a68315921d589de3b5c502040118faf2e | [
"MIT"
] | 3 | 2018-11-08T18:30:46.000Z | 2020-03-20T00:33:41.000Z | pi/rc_control.py | w84264423/raspi-driving-car | a291d33a68315921d589de3b5c502040118faf2e | [
"MIT"
] | 18 | 2017-12-06T23:45:02.000Z | 2020-04-01T14:57:08.000Z | import socket
# from rpiGPIO import *
import RPi.GPIO as GPIO
import time
left = 13
right = 15
forward = 11
reverse = 7
t = 0.03
GPIO.setmode(GPIO.BOARD) # Numbers pins by physical location
GPIO.setup(left, GPIO.OUT)
GPIO.output(left, GPIO.HIGH)
GPIO.setup(right, GPIO.OUT)
GPIO.output(right, GPIO.HIGH)
GPIO.setup(forward, GPIO.OUT)
GPIO.output(forward, GPIO.HIGH)
GPIO.setup(reverse, GPIO.OUT)
GPIO.output(reverse, GPIO.HIGH)
class rpiGPIOHelper(object):
def __init__(self):
print "start recving command data......"
self.__data = "pi"
# GPIO.setmode(GPIO.BOARD)
def right(self):
GPIO.output(15,0)
time.sleep(t)
GPIO.output(15,1)
print "pi car right."
def left(self):
GPIO.output(13,0)
time.sleep(t)
GPIO.output(13,1)
print "pi car left."
def up(self):
GPIO.output(11,0)
time.sleep(t)
GPIO.output(11,1)
print "pi car forwarding."
def down(self):
GPIO.output(7,0)
time.sleep(t)
GPIO.output(7,1)
print "pi car backward"
def turnright(self):
GPIO.output(15,0)
GPIO.output(11,0)
time.sleep(t)
GPIO.output(15,1)
GPIO.output(11,1)
print "pi car turnright"
def turnleft(self):
GPIO.output(13,0)
GPIO.output(11,0)
time.sleep(t+0.02)
GPIO.output(13,1)
GPIO.output(11,1)
print "pi car turnleft"
def clean(self):
global recv_turn
GPIO.cleanup()
recv_turn = False
print "Clean Done!!!!"
# constructure class object
gpio_helper = rpiGPIOHelper()
# recv_turn
recv_turn = True
# ============socket================ #
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('172.14.1.126',8004))
# ============socket================ #
while recv_turn:
pre_data = s.recv(1024)
print pre_data
data = pre_data.split('O')[0]
if not data: continue
func = getattr(gpio_helper,data)
func()
# s.sendall(data + " had recvied!")
s.close()
| 19.154545 | 63 | 0.578073 |
164eb4ab279abef27824b9af3a5304281d36b82a | 18,773 | py | Python | dace/codegen/tools/type_inference.py | noah95/dace | 7d5f28d210b2db3fdb112030d5f2b0be0540384b | [
"BSD-3-Clause"
] | null | null | null | dace/codegen/tools/type_inference.py | noah95/dace | 7d5f28d210b2db3fdb112030d5f2b0be0540384b | [
"BSD-3-Clause"
] | null | null | null | dace/codegen/tools/type_inference.py | noah95/dace | 7d5f28d210b2db3fdb112030d5f2b0be0540384b | [
"BSD-3-Clause"
] | 1 | 2021-03-04T13:01:48.000Z | 2021-03-04T13:01:48.000Z | # Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
"""
Type inference: traverses code and returns types for all undefined symbols according to C semantics
infer() has a lenient implementation: if something it not inferred (for example an unsupported construct) it will not
return anything and it will not produce errors
This module is inspired by astunparse: https://github.com/simonpercivall/astunparse
"""
import numpy as np
import ast
from dace import dtypes
from dace.codegen import cppunparse
from dace.symbolic import symbol, SymExpr, symstr
import sympy
import sys
def infer_types(code, symbols=None):
"""
Perform type inference on the given code
:param code: a string, AST, or symbolic expression
:param symbols: optional, already known symbols with their types. This is a dictionary "symbol name" -> dytpes.typeclass:
:return: a dictionary "symbol name" -> dtypes.typeclass of inferred symbols
"""
symbols = symbols or {}
inferred_symbols = {}
if isinstance(code, str):
_dispatch(ast.parse(code), symbols, inferred_symbols)
elif isinstance(code, ast.AST):
_dispatch(code, symbols, inferred_symbols)
elif isinstance(code, sympy.Basic) or isinstance(code, SymExpr):
_dispatch(ast.parse(symstr(code)), symbols, inferred_symbols)
elif isinstance(code, list):
# call infer for any code elements, maintaining a list of inferred_symbols so far
# defined symbols get updated with newly inferred symbols
defined_symbols = symbols.copy()
for c in code:
defined_symbols.update(inferred_symbols)
inf_symbols = infer_types(c, defined_symbols)
inferred_symbols.update(inf_symbols)
return inferred_symbols
def infer_expr_type(code, symbols=None):
"""
Return inferred type of a given expression
:param code: code string (an expression) or symbolic expression
:param symbols: already defined symbols (if any) in a dictionary "symbol name" -> dytpes.typeclass:
:return: inferred type
"""
symbols = symbols or {}
inferred_symbols = {}
if isinstance(code, (str, float, int, complex)):
parsed_ast = ast.parse(str(code))
elif isinstance(code, sympy.Basic) or isinstance(code, SymExpr):
parsed_ast = ast.parse(symstr(code))
else:
raise TypeError(f"Cannot convert type {type(code)} to a Python AST.")
# The parsed AST must only contain one expression
if hasattr(parsed_ast, "body") and isinstance(parsed_ast.body[0], ast.Expr):
return _dispatch(parsed_ast.body[0], symbols, inferred_symbols)
else:
raise TypeError("Expected expression, got: {}".format(type(code)))
def _dispatch(tree, symbols, inferred_symbols):
"""Dispatcher function, dispatching tree type T to method _T."""
try:
tree = iter(tree)
for t in tree:
_dispatch(t, symbols, inferred_symbols)
except TypeError:
current_module = sys.modules[__name__]
meth = getattr(current_module, "_" + tree.__class__.__name__)
return meth(tree, symbols, inferred_symbols)
def _Module(tree, symbols, inferred_symbols):
for stmt in tree.body:
_dispatch(stmt, symbols, inferred_symbols)
def _Interactive(tree, symbols, inferred_symbols):
for stmt in tree.body:
_dispatch(stmt, symbols, inferred_symbols)
def _Expression(tree, symbols, inferred_symbols):
return _dispatch(tree.body, symbols, inferred_symbols)
def _Expr(tree, symbols, inferred_symbols):
return _dispatch(tree.value, symbols, inferred_symbols)
def _dispatch_lhs_tuple(targets, symbols, inferred_symbols):
for target in targets:
_dispatch(target, symbols, inferred_symbols)
def _Assign(t, symbols, inferred_symbols):
# Handle the case of a tuple output
if len(t.targets) > 1:
_dispatch_lhs_tuple(t.targets, symbols, inferred_symbols)
else:
target = t.targets[0]
if isinstance(target, ast.Tuple):
if len(target.elts) > 1:
_dispatch_lhs_tuple(target.elts, symbols, inferred_symbols)
target = target.elts[0]
if not isinstance(
target, (ast.Subscript, ast.Attribute)
) and not target.id in symbols and not target.id in inferred_symbols:
# the target is not already defined: we should try to infer the type looking at the value
inferred_type = _dispatch(t.value, symbols, inferred_symbols)
inferred_symbols[target.id] = inferred_type
inferred_type = _dispatch(target, symbols, inferred_symbols)
_dispatch(t.value, symbols, inferred_symbols)
def _AugAssign(t, symbols, inferred_symbols):
_dispatch(t.target, symbols, inferred_symbols)
# Operations that require a function call
if t.op.__class__.__name__ in cppunparse.CPPUnparser.funcops:
separator, func = cppunparse.CPPUnparser.funcops[
t.op.__class__.__name__]
if not t.target.id in symbols and not t.target.id in inferred_symbols:
_dispatch(t.target, symbols, inferred_symbols)
inferred_type = _dispatch(t.value, symbols, inferred_symbols)
inferred_symbols[t.target.id] = inferred_type
else:
if not t.target.id in symbols and not t.target.id in inferred_symbols:
inferred_type = _dispatch(t.value, symbols, inferred_symbols)
inferred_symbols[t.target.id] = inferred_type
def _AnnAssign(t, symbols, inferred_symbols):
if isinstance(t.target, ast.Tuple):
if len(t.target.elts) > 1:
_dispatch_lhs_tuple(t.target.elts, symbols, inferred_symbols)
else:
target = t.target.elts[0]
else:
target = t.target
# Assignment of the form x: int = 0 is converted to int x = (int)0;
if not target.id in symbols and not target.id in inferred_symbols:
# get the type indicated into the annotation
inferred_type = _dispatch(t.annotation, symbols, inferred_symbols)
inferred_symbols[target.id] = inferred_type
_dispatch(t.annotation, symbols, inferred_symbols)
_dispatch(t.target, symbols, inferred_symbols)
if t.value:
_dispatch(t.annotation, symbols, inferred_symbols)
_dispatch(t.value, symbols, inferred_symbols)
def _Return(t, symbols, inferred_symbols):
if t.value:
_dispatch(t.value, symbols, inferred_symbols)
def _generic_FunctionDef(t, symbols, inferred_symbols):
for deco in t.decorator_list:
_dispatch(deco, symbols, inferred_symbols)
if getattr(t, "returns", False):
if isinstance(t.returns, ast.NameConstant):
if t.returns.value is not None:
_dispatch(t.returns, symbols, inferred_symbols)
else:
_dispatch(t.returns, symbols, inferred_symbols)
_dispatch(t.args, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
def _FunctionDef(t, symbols, inferred_symbols):
_generic_FunctionDef(t, symbols, inferred_symbols)
def _AsyncFunctionDef(t, symbols, inferred_symbols):
_generic_FunctionDef(t, symbols, inferred_symbols)
def _generic_For(t, symbols, inferred_symbols):
if isinstance(t.target, ast.Tuple):
if len(t.target.elts) == 1:
(elt, ) = t.target.elts
if elt.id not in symbols and elt not in inferred_symbols:
inferred_type = _dispatch(elt, symbols, inferred_symbols)
inferred_symbols[elt] = inferred_type
else:
for elt in t.target.elts:
if elt.id not in symbols and elt not in inferred_symbols:
inferred_type = _dispatch(elt, symbols, inferred_symbols)
inferred_symbols[elt] = inferred_type
else:
inferred_type = _dispatch(t.target, symbols, inferred_symbols)
if t.target.id not in symbols and t.target.id not in inferred_symbols:
inferred_symbols[t.target.id] = inferred_type
_dispatch(t.iter, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
def _For(t, symbols, inferred_symbols):
_generic_For(t, symbols, inferred_symbols)
def _AsyncFor(t, symbols, inferred_symbols):
_generic_For(t, symbols, inferred_symbols)
def _If(t, symbols, inferred_symbols):
_dispatch(t.test, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
while (t.orelse and len(t.orelse) == 1 and isinstance(t.orelse[0], ast.If)):
t = t.orelse[0]
_dispatch(t.test, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
# final else
if t.orelse:
_dispatch(t.orelse, symbols, inferred_symbols)
def _While(t, symbols, inferred_symbols):
_dispatch(t.test, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
def _Str(t, symbols, inferred_symbols):
return dtypes.pointer(dtypes.int8)
def _FormattedValue(t, symbols, inferred_symbols):
# FormattedValue(expr value, int? conversion, expr? format_spec)
_dispatch(t.value, symbols, inferred_symbols)
if t.format_spec is not None:
if not isinstance(t.format_spec, ast.Str):
_dispatch(t.format_spec, symbols, inferred_symbols)
def _JoinedStr(t, symbols, inferred_symbols):
for value in t.values:
if not isinstance(value, ast.Str):
_dispatch(value, symbols, inferred_symbols)
return dtypes.pointer(dtypes.int8)
def _Name(t, symbols, inferred_symbols):
if t.id in cppunparse._py2c_reserved:
return dtypes.typeclass(np.result_type(t.id))
else:
# check if this name is a python type, it is in defined_symbols or in local symbols.
# If yes, take the type
inferred_type = None
# if this is a statement generated from a tasklet with a dynamic memlet, it could have a leading * (pointer)
t_id = t.id[1:] if t.id.startswith('*') else t.id
if t_id.strip("()") in cppunparse._py2c_typeconversion:
inferred_type = cppunparse._py2c_typeconversion[t_id.strip("()")]
elif t_id in symbols:
# defined symbols could have dtypes, in case convert it to typeclass
inferred_type = symbols[t_id]
if isinstance(inferred_type, np.dtype):
inferred_type = dtypes.typeclass(inferred_type.type)
elif isinstance(inferred_type, symbol):
inferred_type = inferred_type.dtype
elif t_id in inferred_symbols:
inferred_type = inferred_symbols[t_id]
return inferred_type
def _NameConstant(t, symbols, inferred_symbols):
return dtypes.result_type_of(
dtypes.typeclass(type(t.value)),
dtypes.typeclass(np.min_scalar_type(t.value).name))
def _Constant(t, symbols, inferred_symbols):
# String value
if isinstance(t.value, (str, bytes)):
return dtypes.pointer(dtypes.int8)
# Numeric value
return dtypes.result_type_of(
dtypes.typeclass(type(t.value)),
dtypes.typeclass(np.min_scalar_type(t.value).name))
def _Num(t, symbols, inferred_symbols):
# get the minimum between the minimum type needed to represent this number and the corresponding default data types
# e.g., if num=1, then it will be represented by using the default integer type (int32 if C data types are used)
return dtypes.result_type_of(dtypes.typeclass(type(t.n)),
dtypes.typeclass(np.min_scalar_type(t.n).name))
def _IfExp(t, symbols, inferred_symbols):
_dispatch(t.test, symbols, inferred_symbols)
type_body = _dispatch(t.body, symbols, inferred_symbols)
type_orelse = _dispatch(t.orelse, symbols, inferred_symbols)
return dtypes.result_type_of(type_body, type_orelse)
def _Tuple(t, symbols, inferred_symbols):
for elt in t.elts:
_dispatch(elt, symbols, inferred_symbols)
def _UnaryOp(t, symbols, inferred_symbols):
return _dispatch(t.operand, symbols, inferred_symbols)
def _BinOp(t, symbols, inferred_symbols):
# Operations that require a function call
if t.op.__class__.__name__ in cppunparse.CPPUnparser.funcops:
separator, func = cppunparse.CPPUnparser.funcops[
t.op.__class__.__name__]
# get the type of left and right operands for type inference
type_left = _dispatch(t.left, symbols, inferred_symbols)
type_right = _dispatch(t.right, symbols, inferred_symbols)
# infer type and returns
return dtypes.result_type_of(type_left, type_right)
# Special case for integer power
elif t.op.__class__.__name__ == 'Pow':
if (isinstance(t.right, (ast.Num, ast.Constant))
and int(t.right.n) == t.right.n and t.right.n >= 0):
if t.right.n != 0:
type_left = _dispatch(t.left, symbols, inferred_symbols)
for i in range(int(t.right.n) - 1):
_dispatch(t.left, symbols, inferred_symbols)
return dtypes.result_type_of(type_left, dtypes.typeclass(np.uint32))
else:
type_left = _dispatch(t.left, symbols, inferred_symbols)
type_right = _dispatch(t.right, symbols, inferred_symbols)
return dtypes.result_type_of(type_left, type_right)
else:
# get left and right types for type inference
type_left = _dispatch(t.left, symbols, inferred_symbols)
type_right = _dispatch(t.right, symbols, inferred_symbols)
return dtypes.result_type_of(type_left, type_right)
def _Compare(t, symbols, inferred_symbols):
_dispatch(t.left, symbols, inferred_symbols)
for o, e in zip(t.ops, t.comparators):
if o.__class__.__name__ not in cppunparse.CPPUnparser.cmpops:
continue
_dispatch(e, symbols, inferred_symbols)
def _BoolOp(t, symbols, inferred_symbols):
for v in t.values:
_dispatch(v, symbols, inferred_symbols)
return dtypes.typeclass(bool)
def _Attribute(t, symbols, inferred_symbols):
inferred_type = _dispatch(t.value, symbols, inferred_symbols)
return inferred_type
def _Call(t, symbols, inferred_symbols):
inf_type = _dispatch(t.func, symbols, inferred_symbols)
for e in t.args:
_dispatch(e, symbols, inferred_symbols)
for e in t.keywords:
_dispatch(e, symbols, inferred_symbols)
return inf_type
def _Subscript(t, symbols, inferred_symbols):
inferred_type = _dispatch(t.value, symbols, inferred_symbols)
_dispatch(t.slice, symbols, inferred_symbols)
return inferred_type
def _Index(t, symbols, inferred_symbols):
_dispatch(t.value, symbols, inferred_symbols)
def _Slice(t, symbols, inferred_symbols):
if t.lower:
_dispatch(t.lower, symbols, inferred_symbols)
if t.upper:
_dispatch(t.upper, symbols, inferred_symbols)
if t.step:
_dispatch(t.step, symbols, inferred_symbols)
def _ExtSlice(t, symbols, inferred_symbols):
for d in t.dims:
_dispatch(d, symbols, inferred_symbols)
# argument
def _arg(t, symbols, inferred_symbols):
if t.annotation:
#argument with annotation, we can derive the type
inferred_type = _dispatch(t.annotation, symbols, inferred_symbols)
inferred_symbols[t.arg] = inferred_type
# others
def _arguments(t, symbols, inferred_symbols):
first = True
# normal arguments
defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
for a, d in zip(t.args, defaults):
_dispatch(a, symbols, inferred_symbols)
if d:
_dispatch(d, symbols, inferred_symbols)
# varargs, or bare '*' if no varargs but keyword-only arguments present
if t.vararg or getattr(t, "kwonlyargs", False):
raise SyntaxError('Invalid C++')
# keyword-only arguments
if getattr(t, "kwonlyargs", False):
raise SyntaxError('Invalid C++')
# kwargs
if t.kwarg:
raise SyntaxError('Invalid C++')
def _Lambda(t, symbols, inferred_symbols):
_dispatch(t.args, symbols, inferred_symbols)
_dispatch(t.body, symbols, inferred_symbols)
#####################################################
# Constructs that are not involved in type inference
#####################################################
def _Pass(t, symbols, inferred_symbols):
pass
def _Break(t, symbols, inferred_symbols):
pass
def _Continue(t, symbols, inferred_symbols):
pass
def _Assert(t, symbols, inferred_symbols):
#Nothing to infer
pass
def _Print(t, symbols, inferred_symbols):
#Nothing to infer
pass
def _Raise(t, symbols, inferred_symbols):
pass
def _Try(t, symbols, inferred_symbols):
pass
def _TryExcept(t, symbols, inferred_symbols):
pass
def _TryFinally(t, symbols, inferred_symbols):
pass
def _ExceptHandler(t, symbols, inferred_symbols):
pass
def _Bytes(t, symbols, inferred_symbols):
pass
def _Ellipsis(t, symbols, inferred_symbols):
pass
def _alias(t, symbols, inferred_symbols):
pass
###########################################
# Invalid C/C++ will do not infer anything
##########################################
def _Import(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _ImportFrom(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _Delete(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _Exec(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _Global(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _Nonlocal(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _Yield(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _YieldFrom(t, symbols, inferred_symbols):
# Nothing to infer
pass
def _ClassDef(t, symbols, inferred_symbols):
pass
def _generic_With(t, symbols, inferred_symbols):
pass
def _With(t, symbols, inferred_symbols):
pass
def _AsyncWith(t, symbols, inferred_symbols):
pass
def _Repr(t, symbols, inferred_symbols):
pass
def _List(t, symbols, inferred_symbols):
pass
def _ListComp(t, symbols, inferred_symbols):
pass
def _GeneratorExp(t, symbols, inferred_symbols):
pass
def _SetComp(t, symbols, inferred_symbols):
pass
def _DictComp(t, symbols, inferred_symbols):
pass
def _comprehension(t, symbols, inferred_symbols):
pass
def _Set(t, symbols, inferred_symbols):
pass
def _Dict(t, symbols, inferred_symbols):
pass
def _Starred(t, symbols, inferred_symbols):
pass
def _keyword(t, symbols, inferred_symbols):
pass
def _withitem(t, symbols, inferred_symbols):
pass
def _Await(t, symbols, inferred_symbols):
pass
| 30.525203 | 126 | 0.680126 |
f4692bad3ca86b0470ac848726f18bf09b057839 | 5,860 | py | Python | run_tests_resilience.py | cag-uconn/graphite_multiprog | fc89341c0681fe4d54ddc1dee0b9e110aad8b4be | [
"MIT"
] | null | null | null | run_tests_resilience.py | cag-uconn/graphite_multiprog | fc89341c0681fe4d54ddc1dee0b9e110aad8b4be | [
"MIT"
] | null | null | null | run_tests_resilience.py | cag-uconn/graphite_multiprog | fc89341c0681fe4d54ddc1dee0b9e110aad8b4be | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
import os
sys.path.append("./tools/")
from schedule import *
from config_64 import *
# job info
# Do not use 'localhost' or '127.0.0.1', use the machine name
machines = [
# "cag1",
"cag2",
# "cag3",
# "cag4",
# "cag5",
# "cag6",
# "cag7",
# "cag8",
]
results_dir = "./results/parsec_resilience/LP_2_64_checker_ifelse_inside_stores_ondelay_1"
cfg_file = "carbon_sim.cfg"
benchmark_list = [
# "patricia",
"fft",
# "radix",
# "lu_contiguous",
# "lu_non_contiguous",
# "cholesky",
# "barnes",
# "fmm",
# "ocean_contiguous",
# "ocean_non_contiguous",
# "water-nsquared",
# "water-spatial",
# "raytrace",
# "volrend",
# "blackscholes",
# "swaptions",
# "fluidanimate",
# "canneal",
# "streamcluster",
#"dedup",
# "ferret",
# "bodytrack",
#"facesim",
# "patricia",
# "static_concomp",
# "static_community",
# "matrix_multiply_blocked",
# "tsp",
# "susan"
]
#reexe = "true"
#reexe_onoff_en = "false"
resilience_message = "false"
#latency_hiding_en = "false"
instruction_interval = 100
ssb_size = 8
opport_en = "false"
extra_reexe_delay = 0
extra_reexe_delay_l1miss = 0
#resilience_setup_list= [("false","false","false"),("true","false","false"),("true","false","true"),("true","true","true"),("true","true","false")] #(reexe,onoff,latencyHiding)
#resilience_setup_list=[("false","false","false")]
resilience_setup_list= [("true","true","true")]
quantum =1000
num_cores = 64
cluster_size = 1 #[1, 4, 16, 64, 256]
#P2R_threshold_list = [1] #[1,3] # 3
P2R_threshold = 1
max_R2P_threshold = 16
num_R2P_threshold_levels = 4
page_table_enabled = "false" #"true"
page_size = 4096
l2_cache_homing_policy = "striped" #"r-nuca"
classifier_type = "L1_replica_locality"
classifier_granularity = "cache_line_level"
num_tracked_sharers = 3
num_tracked_cache_lines = 3
interleave_granularity = 64
# Compile all benchmarks first
for benchmark in benchmark_list:
if benchmark in parsec_list:
if (not os.path.exists("tests/parsec/parsec-3.0")):
print "[regress] Creating PARSEC applications directory."
os.system("make setup_parsec")
os.system("make %s_parsec BUILD_MODE=build" % (benchmark))
else:
os.system("make %s_bench_test BUILD_MODE=build" % (benchmark))
# Generate jobs
jobs = []
for benchmark in benchmark_list:
# Generate command
if benchmark in parsec_list:
command = "make %s_parsec" % (benchmark)
else:
command = "make %s_bench_test" % (benchmark)
# Get APP_FLAGS
app_flags = None
if benchmark in app_flags_table:
app_flags = app_flags_table[benchmark]
print command
print app_flags
for resilience_setup in resilience_setup_list:
sim_flags = "--general/total_cores=%i --general/enable_shared_mem=true " % (num_cores) + \
"--page_table/enabled=%s " % (page_table_enabled) + \
"--page_table/page_size=%i " % (page_size) + \
"--caching_protocol/type=locality_aware_protocol " + \
"--caching_protocol/locality_aware_protocol/l2_cache_homing_policy=%s " % (l2_cache_homing_policy) + \
"--caching_protocol/locality_aware_protocol/classifier_type=%s " % (classifier_type) + \
"--caching_protocol/locality_aware_protocol/classifier_granularity=%s " % (classifier_granularity) + \
"--caching_protocol/locality_aware_protocol/num_tracked_sharers=%i " % (num_tracked_sharers) + \
"--caching_protocol/locality_aware_protocol/num_tracked_cache_lines=%i " % (num_tracked_cache_lines) + \
"--caching_protocol/locality_aware_protocol/cluster_size=%i " % (cluster_size) + \
"--caching_protocol/locality_aware_protocol/interleave_granularity=%i " % (interleave_granularity) + \
"--caching_protocol/locality_aware_protocol/core/P2R_threshold=%i " % (P2R_threshold) + \
"--caching_protocol/locality_aware_protocol/core/max_R2P_threshold=%i " % (max_R2P_threshold) + \
"--caching_protocol/locality_aware_protocol/core/num_R2P_threshold_levels=%i " % (num_R2P_threshold_levels) + \
"--reexecution/resilient_cc_en=%s " % (resilience_message) + \
"--reexecution/latency_hiding_en=%s " % (resilience_setup[2]) + \
"--reexecution/instruction_interval=%i " % (instruction_interval) + \
"--reexecution/reexe=%s " % (resilience_setup[0]) + \
"--reexecution/reexe_onoff_en=%s " % (resilience_setup[1]) + \
"--reexecution/ssb_size=%i " % (ssb_size) + \
"--reexecution/opport_en=%s " % (opport_en) + \
"--reexecution/extra_reexe_delay_l1miss=%i " % (extra_reexe_delay_l1miss) + \
"--reexecution/extra_reexe_delay=%i " % (extra_reexe_delay) + \
"--clock_skew_management/lax_barrier/quantum=%i " % (quantum)
sub_dir = "%s--reexe-%s-onoff-%s-latencyHiding-%s" % (benchmark,resilience_setup[0],resilience_setup[1],resilience_setup[2])
print sim_flags
print sub_dir
jobs.append(MakeJob(1, command, cfg_file, results_dir, sub_dir, sim_flags, app_flags, "pin"))
# init
try:
os.makedirs(results_dir)
except OSError:
pass
shutil.copy(cfg_file, results_dir)
# go!
schedule(machines, jobs)
| 35.95092 | 176 | 0.601195 |
9c22fa6cff535b5540c1108706a7e385d6f60d85 | 6,750 | py | Python | python/node.py | yds05238/Chord-DHT | de4ce5f2e81f4a4528b6e5f4d90559fd37ddfbff | [
"MIT"
] | null | null | null | python/node.py | yds05238/Chord-DHT | de4ce5f2e81f4a4528b6e5f4d90559fd37ddfbff | [
"MIT"
] | null | null | null | python/node.py | yds05238/Chord-DHT | de4ce5f2e81f4a4528b6e5f4d90559fd37ddfbff | [
"MIT"
] | null | null | null | import asyncio
from hashlib import sha1
from typing import Any
import logging
import rpc
from abcchord import INode, INodeServer
from errors import InvalidRPC, NodeLeaveError
from network import is_between_ids, ChordNetwork, CHORD_PORT, MAINTENANCE_FREQUENCY
# set up logger
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)-8s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def node_leave_corrector(func, retry_time=MAINTENANCE_FREQUENCY, max_retries=2):
async def wrapper(self, *args, retries=0, **kwargs):
try:
return await func(self, *args, **kwargs)
except NodeLeaveError:
if retries >= max_retries:
raise NodeLeaveError(f"Exceeded maximum retries amount: {max_retries}")
self.local_node.network.remove_left_node(self)
await asyncio.sleep(retry_time)
await wrapper(self, *args, retries=retries + 1, **kwargs)
return wrapper
class Node(INode, INodeServer):
def __init__(self, ip: str, port: int = CHORD_PORT):
super().__init__(ip, port)
self.network = ChordNetwork(self)
self.hash_table = {}
self._predecessor = None
self._server = None
self._alive = False
self._maintenance_task = None
async def store(self, value: bytes) -> int:
if not isinstance(value, bytes):
raise InvalidRPC()
key = self._get_key_from_value(value)
successor = await self._find_successor(key)
if successor == self:
logger.debug(f"({self.id}) - Stored value in local node: {key}")
self.hash_table[key] = value
else:
await successor.store(value)
return key
async def get(self, key: int) -> Any:
successor = await self._find_successor(key)
if successor == self:
return self.hash_table.get(key)
else:
return await successor.get(key)
async def leave(self) -> None:
await self._stop_server()
async def _start_server(self):
self._server = await asyncio.start_server(
self._handle_request, self.ip, self.port
)
self._maintenance_task = asyncio.create_task(self._run_maintenance_task())
self._alive = True
async def _stop_server(self):
if self._maintenance_task:
self._maintenance_task.cancel()
self._server.close()
await self._server.wait_closed()
self._alive = False
async def _is_alive(self):
return self._alive
async def _closest_preceding_finger(self, id: int) -> INode:
for finger in reversed(self.network.finger_table):
if finger.node and is_between_ids(finger.node.id, self.id, id):
return finger.node
return self
@node_leave_corrector
async def _find_successor(self, id: int) -> INode:
predecessor = await self.network._find_predecessor(id)
successor = await predecessor._get_successor()
return successor
async def _notify(self, node: INode) -> None:
if not self._predecessor or is_between_ids(
node.id, self._predecessor.id, self.id
):
self._predecessor = node
async def _update_finger_table(self, node: INode, index: int) -> None:
finger = self.network.finger_table[index]
if not finger.node or is_between_ids(
node.id, finger.start, finger.node.id, first_equality=True
): # Changed from original pseudo-code
finger.node = node
if (
self._predecessor and self._predecessor != self
): # Check for avoiding error or recursive call
await self._predecessor._update_finger_table(node, index)
async def _get_successor(self) -> INode:
for finger in self.network.finger_table:
if finger.node:
return finger.node
return self
async def _set_successor(self, node: INode) -> None:
self.network.finger_table[0].node = node
async def _get_predecessor(self) -> INode:
if self._predecessor:
return self._predecessor
for finger in reversed(self.network.finger_table):
if finger.node:
return finger.node
return self
async def _set_predecessor(self, node: INode) -> None:
self._predecessor = node
async def _handle_request(self, reader, writer) -> None:
opcode = (await reader.readuntil(rpc.SEPARATOR))[:-1]
opcode = int.from_bytes(opcode, "big")
payload_length = (await reader.readuntil(rpc.SEPARATOR))[:-1]
payload_length = int.from_bytes(payload_length, "big")
payload = await reader.read(payload_length)
logger.debug(
f"Request: OPCODE {opcode} - PAYLOAD-LENGTH {payload_length} - PAYLOAD {payload[:20]}"
)
request = rpc.Request(reader, writer, opcode, payload)
await rpc.handle_request(self, request)
writer.close()
async def _run_maintenance_task(self):
while True:
await asyncio.sleep(MAINTENANCE_FREQUENCY)
await self.network.mantain()
def _get_key_from_value(self, value):
hash = sha1()
hash.update(value)
key = int.from_bytes(hash.digest(), "big")
return key
class RemoteNode(INode):
def __init__(self, ip: str, port: int = CHORD_PORT):
super().__init__(ip, port)
async def store(self, value: bytes) -> None:
await rpc.store(self, value)
async def get(self, key: int) -> Any:
return await rpc.get(self, key)
async def _is_alive(self) -> bool:
return await rpc.is_alive(self)
async def _closest_preceding_finger(self, id: int) -> INode:
return await rpc.closest_preceding_finger(self, id)
async def _find_successor(self, id: int) -> INode:
return await rpc.find_successor(self, id)
async def _notify(self, node: INode) -> None:
return await rpc.notify(self, node)
async def _update_finger_table(self, node: INode, index: int) -> None:
return await rpc.update_finger_table(self, node, index)
async def _get_successor(self) -> INode:
return await rpc.get_successor(self)
async def _set_successor(self, node: INode) -> None:
return await rpc.set_successor(self, node)
async def _get_predecessor(self) -> INode:
return await rpc.get_predecessor(self)
async def _set_predecessor(self, node: INode) -> None:
return await rpc.set_predecessor(self, node)
async def leave(self) -> None:
pass
| 34.263959 | 98 | 0.643852 |
98c74c924d0210dec12cf6f37804b73bcf5e3349 | 312 | py | Python | mytools/test_gaussian.py | abcxs/polyrnn | 92eee689fe62585529deb1c44fbf1c889f414fa2 | [
"Apache-2.0"
] | 4 | 2021-06-01T08:33:56.000Z | 2022-02-12T14:54:09.000Z | mytools/test_gaussian.py | abcxs/polyrnn | 92eee689fe62585529deb1c44fbf1c889f414fa2 | [
"Apache-2.0"
] | null | null | null | mytools/test_gaussian.py | abcxs/polyrnn | 92eee689fe62585529deb1c44fbf1c889f414fa2 | [
"Apache-2.0"
] | 1 | 2021-06-01T08:34:00.000Z | 2021-06-01T08:34:00.000Z | # %%
import matplotlib.pyplot as plt
import torch
from mmdet.models.utils import gen_gaussian_target
img = torch.zeros(10, 10, dtype=torch.float32)
centers = [[0, 0], [3, 3], [7, 8]]
for center in centers:
img = gen_gaussian_target(img, center, 1)
img = img.numpy()
print(img)
plt.imshow(img)
plt.show
# %%
| 20.8 | 50 | 0.698718 |
8777493f6029f0c7a50460d927a75903eb90b76b | 94 | py | Python | hw3/hw3-code/python/test.py | zyhhhhhhh/cs446-machine-learning | b07cc86ca6630c7cab72e79a9e22f665dd71f809 | [
"MIT"
] | null | null | null | hw3/hw3-code/python/test.py | zyhhhhhhh/cs446-machine-learning | b07cc86ca6630c7cab72e79a9e22f665dd71f809 | [
"MIT"
] | null | null | null | hw3/hw3-code/python/test.py | zyhhhhhhh/cs446-machine-learning | b07cc86ca6630c7cab72e79a9e22f665dd71f809 | [
"MIT"
] | null | null | null | MAX = 2 ** 63 - 1
MIN = -2 ** 63
result = int((MAX-5)/10)+1
val = 5
print(result>(MAX-val)/10) | 18.8 | 26 | 0.553191 |
3708c714bd909f41d3c06cba89b0943422d339dd | 1,713 | py | Python | StarterApp/Main.py | jepierre/python_apps | ce3350480aed9c091bb92281d13711337b40b2dd | [
"MIT"
] | null | null | null | StarterApp/Main.py | jepierre/python_apps | ce3350480aed9c091bb92281d13711337b40b2dd | [
"MIT"
] | null | null | null | StarterApp/Main.py | jepierre/python_apps | ce3350480aed9c091bb92281d13711337b40b2dd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Starter App
"""
import traceback
import termcolor
__appname__ = "Starter"
import logging
import os
import sys
from PyQt5 import uic
from PyQt5.QtWidgets import QApplication, QMainWindow
app_path = os.path.dirname(__file__)
app_log_path = os.path.join(app_path, "logs")
if not os.path.exists(app_log_path):
os.makedirs(app_log_path)
log_file_name = __appname__ + ".txt"
formatter = "%(asctime)s: %(name)s -%(levelname)s -%(module)s -%(funcName)s -%(lineno)-3d -%(message)s"
logging.basicConfig(
filename=os.path.join(app_log_path, log_file_name), format=formatter
)
logger = logging.getLogger(name="main-gui")
logger.setLevel(logging.DEBUG)
class Main(QMainWindow):
def __init__(self, *args, **kargs):
super().__init__(*args, **kargs)
self.init_ui()
self.show()
def init_ui(self):
logger.debug("test logger")
def exit_app(self):
logger.debug("Exiting")
sys.exit(0)
def main():
# Enable logging on the console
ch = logging.StreamHandler()
ch.setFormatter(logging.Formatter(formatter))
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
# catches errors in gui and print them
def excepthook(etype, value, tb):
if isinstance(value, KeyboardInterrupt):
sys.exit(1)
else:
termcolor.cprint("Sorry, something's wrong! ", "yellow", file=sys.stderr)
# print traceback
traceback.print_exception(etype, value, tb)
# Set global exception handler.
sys.excepthook = excepthook
# Open the app
app = QApplication(sys.argv)
App = Main()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| 22.84 | 103 | 0.657326 |
e74bb594a7e325139b95531ab84996df61cc8d83 | 4,180 | py | Python | deeppavlov/core/common/params.py | xbodx/DeepPavlov | 4b60bf162df4294b8b0db3b72786cdd699c674fa | [
"Apache-2.0"
] | 5,893 | 2018-02-01T18:13:20.000Z | 2022-03-31T19:22:21.000Z | deeppavlov/core/common/params.py | xbodx/DeepPavlov | 4b60bf162df4294b8b0db3b72786cdd699c674fa | [
"Apache-2.0"
] | 749 | 2018-01-31T11:36:02.000Z | 2022-03-30T07:24:22.000Z | deeppavlov/core/common/params.py | xbodx/DeepPavlov | 4b60bf162df4294b8b0db3b72786cdd699c674fa | [
"Apache-2.0"
] | 1,155 | 2018-02-01T10:52:15.000Z | 2022-03-29T02:12:15.000Z | # Copyright 2017 Neural Networks and Deep Learning lab, MIPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
from logging import getLogger
from types import FunctionType
from typing import Any, Dict, Union
from deeppavlov.core.commands.utils import expand_path, parse_config
from deeppavlov.core.common.errors import ConfigError
from deeppavlov.core.common.registry import get_model
from deeppavlov.core.models.component import Component
log = getLogger(__name__)
_refs = {}
def _resolve(val):
if isinstance(val, str) and val.startswith('#'):
component_id, *attributes = val[1:].split('.')
try:
val = _refs[component_id]
except KeyError:
e = ConfigError('Component with id "{id}" was referenced but not initialized'
.format(id=component_id))
log.exception(e)
raise e
attributes = ['val'] + attributes
val = eval('.'.join(attributes))
return val
def _init_param(param, mode):
if isinstance(param, str):
param = _resolve(param)
elif isinstance(param, (list, tuple)):
param = [_init_param(p, mode) for p in param]
elif isinstance(param, dict):
if {'ref', 'class_name', 'config_path'}.intersection(param.keys()):
param = from_params(param, mode=mode)
else:
param = {k: _init_param(v, mode) for k, v in param.items()}
return param
def from_params(params: Dict, mode: str = 'infer', serialized: Any = None, **kwargs) -> Union[Component, FunctionType]:
"""Builds and returns the Component from corresponding dictionary of parameters."""
# what is passed in json:
config_params = {k: _resolve(v) for k, v in params.items()}
# get component by reference (if any)
if 'ref' in config_params:
try:
component = _refs[config_params['ref']]
if serialized is not None:
component.deserialize(serialized)
return component
except KeyError:
e = ConfigError('Component with id "{id}" was referenced but not initialized'
.format(id=config_params['ref']))
log.exception(e)
raise e
elif 'config_path' in config_params:
from deeppavlov.core.commands.infer import build_model
refs = _refs.copy()
_refs.clear()
config = parse_config(expand_path(config_params['config_path']))
model = build_model(config, serialized=serialized)
_refs.clear()
_refs.update(refs)
try:
_refs[config_params['id']] = model
except KeyError:
pass
return model
cls_name = config_params.pop('class_name', None)
if not cls_name:
e = ConfigError('Component config has no `class_name` nor `ref` fields')
log.exception(e)
raise e
obj = get_model(cls_name)
if inspect.isclass(obj):
# find the submodels params recursively
config_params = {k: _init_param(v, mode) for k, v in config_params.items()}
try:
spec = inspect.getfullargspec(obj)
if 'mode' in spec.args + spec.kwonlyargs or spec.varkw is not None:
kwargs['mode'] = mode
component = obj(**dict(config_params, **kwargs))
try:
_refs[config_params['id']] = component
except KeyError:
pass
except Exception:
log.exception("Exception in {}".format(obj))
raise
if serialized is not None:
component.deserialize(serialized)
else:
component = obj
return component
| 34.545455 | 119 | 0.632536 |
c87aa162d56cb08501697c9f4840be3f6e8f1f10 | 820 | py | Python | get_prod_ipv4.py | ddiguy/dns-soa-check | 705d37803d6e61b906f6e71b1e0b299d488626c1 | [
"MIT"
] | null | null | null | get_prod_ipv4.py | ddiguy/dns-soa-check | 705d37803d6e61b906f6e71b1e0b299d488626c1 | [
"MIT"
] | null | null | null | get_prod_ipv4.py | ddiguy/dns-soa-check | 705d37803d6e61b906f6e71b1e0b299d488626c1 | [
"MIT"
] | null | null | null | #!/usr/local/bin/python3.6
import json
import requests
# Gets next available IP address in network using Infoblox REST API
# Ignoring SSL warnings
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
ib_prod_user = 'username'
ib_prod_pass = 'password'
ib_prod_gridmaster = 'FQDN of grid master'
ib_prod_api_base_url = 'https://' + ib_prod_gridmaster + '/wapi/v2.3.1/'
s = requests.Session()
r = s.get(ib_prod_api_base_url + 'ipv4address',
params={'_max_results': str(ib_api_max_dns_get), 'status': 'USED', 'network_view': 'default', 'network': '10.2.10.32/30'},
auth=(ib_prod_user, ib_prod_pass),
headers={'Accept':'application/json', 'Content-Type':'application/json'},
verify=False)
a = json.loads(r.text)
print(a)
| 28.275862 | 124 | 0.753659 |
497302362f828795a82c1fa563e7e10a4561f3f8 | 56,119 | py | Python | ax/plot/scatter.py | mpolson64/Ax-1 | cf9e12cc1253efe0fc893f2620e99337e0927a26 | [
"MIT"
] | 1 | 2022-02-10T10:51:40.000Z | 2022-02-10T10:51:40.000Z | ax/plot/scatter.py | mpolson64/Ax-1 | cf9e12cc1253efe0fc893f2620e99337e0927a26 | [
"MIT"
] | null | null | null | ax/plot/scatter.py | mpolson64/Ax-1 | cf9e12cc1253efe0fc893f2620e99337e0927a26 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numbers
import warnings
from collections import OrderedDict
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import numpy as np
import plotly.graph_objs as go
from ax.core.data import Data
from ax.core.experiment import Experiment
from ax.core.observation import Observation, ObservationFeatures
from ax.modelbridge.base import ModelBridge
from ax.modelbridge.registry import Models
from ax.plot.base import (
CI_OPACITY,
DECIMALS,
AxPlotConfig,
AxPlotTypes,
PlotInSampleArm,
PlotMetric,
PlotOutOfSampleArm,
Z,
)
from ax.plot.color import (
COLORS,
DISCRETE_COLOR_SCALE,
BLUE_SCALE,
rgba,
)
from ax.plot.helper import (
TNullableGeneratorRunsDict,
_format_CI,
_format_dict,
_wrap_metric,
arm_name_to_sort_key,
arm_name_to_tuple,
get_plot_data,
infer_is_relative,
resize_subtitles,
)
from ax.utils.common.typeutils import checked_cast_optional
from ax.utils.stats.statstools import relativize
from plotly import subplots
# type aliases
Traces = List[Dict[str, Any]]
def _error_scatter_data(
arms: Iterable[Union[PlotInSampleArm, PlotOutOfSampleArm]],
y_axis_var: PlotMetric,
x_axis_var: Optional[PlotMetric] = None,
status_quo_arm: Optional[PlotInSampleArm] = None,
) -> Tuple[List[float], Optional[List[float]], List[float], List[float]]:
y_metric_key = "y_hat" if y_axis_var.pred else "y"
y_sd_key = "se_hat" if y_axis_var.pred else "se"
arm_names = [a.name for a in arms]
y = [getattr(a, y_metric_key).get(y_axis_var.metric, np.nan) for a in arms]
y_se = [getattr(a, y_sd_key).get(y_axis_var.metric, np.nan) for a in arms]
# Delta method if relative to status quo arm
if y_axis_var.rel:
if status_quo_arm is None:
raise ValueError("`status_quo_arm` cannot be None for relative effects.")
y_rel, y_se_rel = relativize(
means_t=y,
sems_t=y_se,
mean_c=getattr(status_quo_arm, y_metric_key).get(y_axis_var.metric),
sem_c=getattr(status_quo_arm, y_sd_key).get(y_axis_var.metric),
as_percent=True,
)
y = y_rel.tolist()
y_se = y_se_rel.tolist()
# x can be metric for a metric or arm names
if x_axis_var is None:
x = arm_names
x_se = None
else:
x_metric_key = "y_hat" if x_axis_var.pred else "y"
x_sd_key = "se_hat" if x_axis_var.pred else "se"
x = [getattr(a, x_metric_key).get(x_axis_var.metric, np.nan) for a in arms]
x_se = [getattr(a, x_sd_key).get(x_axis_var.metric, np.nan) for a in arms]
if x_axis_var.rel:
# Delta method if relative to status quo arm
x_rel, x_se_rel = relativize(
means_t=x,
sems_t=x_se,
mean_c=getattr(status_quo_arm, x_metric_key).get(x_axis_var.metric),
sem_c=getattr(status_quo_arm, x_sd_key).get(x_axis_var.metric),
as_percent=True,
)
x = x_rel.tolist()
x_se = x_se_rel.tolist()
return x, x_se, y, y_se
def _error_scatter_trace(
arms: Sequence[Union[PlotInSampleArm, PlotOutOfSampleArm]],
y_axis_var: PlotMetric,
x_axis_var: Optional[PlotMetric] = None,
y_axis_label: Optional[str] = None,
x_axis_label: Optional[str] = None,
status_quo_arm: Optional[PlotInSampleArm] = None,
show_CI: bool = True,
name: str = "In-sample",
color: Tuple[int] = COLORS.STEELBLUE.value,
visible: bool = True,
legendgroup: Optional[str] = None,
showlegend: bool = True,
hoverinfo: str = "text",
show_arm_details_on_hover: bool = True,
show_context: bool = False,
arm_noun: str = "arm",
color_parameter: Optional[str] = None,
color_metric: Optional[str] = None,
) -> Dict[str, Any]:
"""Plot scatterplot with error bars.
Args:
arms (List[Union[PlotInSampleArm, PlotOutOfSampleArm]]):
a list of in-sample or out-of-sample arms.
In-sample arms have observed data, while out-of-sample arms
just have predicted data. As a result,
when passing out-of-sample arms, pred must be True.
y_axis_var: name of metric for y-axis, along with whether
it is observed or predicted.
x_axis_var: name of metric for x-axis,
along with whether it is observed or predicted. If None, arm names
are automatically used.
y_axis_label: custom label to use for y axis.
If None, use metric name from `y_axis_var`.
x_axis_label: custom label to use for x axis.
If None, use metric name from `x_axis_var` if that is not None.
status_quo_arm: the status quo
arm. Necessary for relative metrics.
show_CI: if True, plot confidence intervals.
name: name of trace. Default is "In-sample".
color: color as rgb tuple. Default is
(128, 177, 211), which corresponds to COLORS.STEELBLUE.
visible: if True, trace is visible (default).
legendgroup: group for legends.
showlegend: if True, legend if rendered.
hoverinfo: information to show on hover. Default is
custom text.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is True.
show_context: if True and show_arm_details_on_hover,
context will be included in the hover.
arm_noun: noun to use instead of "arm" (e.g. group)
color_parameter: color points according to the specified parameter,
cannot be used together with color_metric.
color_metric: color points according to the specified metric,
cannot be used together with color_parameter.
"""
if color_metric and color_parameter:
raise RuntimeError(
"color_metric and color_parameter cannot be used at the same time!"
)
if (color_metric or color_parameter) and not all(
isinstance(arm, PlotInSampleArm) for arm in arms
):
raise RuntimeError("Color coding currently only works with in-sample arms!")
# Opportunistically sort if arm names are in {trial}_{arm} format
arms = sorted(arms, key=lambda a: arm_name_to_sort_key(a.name), reverse=True)
x, x_se, y, y_se = _error_scatter_data(
arms=arms,
y_axis_var=y_axis_var,
x_axis_var=x_axis_var,
status_quo_arm=status_quo_arm,
)
labels = []
colors = []
arm_names = [a.name for a in arms]
# No relativization if no x variable.
rel_x = x_axis_var.rel if x_axis_var else False
rel_y = y_axis_var.rel
for i in range(len(arm_names)):
heading = f"<b>{arm_noun.title()} {arm_names[i]}</b><br>"
x_lab = (
"{name}: {estimate}{perc} {ci}<br>".format(
name=x_axis_var.metric if x_axis_label is None else x_axis_label,
estimate=(
round(x[i], DECIMALS) if isinstance(x[i], numbers.Number) else x[i]
),
ci="" if x_se is None else _format_CI(x[i], x_se[i], rel_x),
perc="%" if rel_x else "",
)
if x_axis_var is not None
else ""
)
y_lab = "{name}: {estimate}{perc} {ci}<br>".format(
name=y_axis_var.metric if y_axis_label is None else y_axis_label,
estimate=(
round(y[i], DECIMALS) if isinstance(y[i], numbers.Number) else y[i]
),
ci="" if y_se is None else _format_CI(y[i], y_se[i], rel_y),
perc="%" if rel_y else "",
)
parameterization = (
_format_dict(arms[i].parameters, "Parameterization")
if show_arm_details_on_hover
else ""
)
if color_parameter:
colors.append(arms[i].parameters[color_parameter])
elif color_metric:
# Must be PlotInSampleArm here if no error raised previously
# pyre-ignore[16]: `PlotOutOfSampleArm` has no attribute `y`
colors.append(arms[i].y[color_metric])
context = (
# Expected `Dict[str, Optional[Union[bool, float, str]]]` for 1st anonymous
# parameter to call `ax.plot.helper._format_dict` but got
# `Optional[Dict[str, Union[float, str]]]`.
# pyre-fixme[6]:
_format_dict(arms[i].context_stratum, "Context")
if show_arm_details_on_hover
and show_context # noqa W503
and arms[i].context_stratum # noqa W503
else ""
)
labels.append(
"{arm_name}<br>{xlab}{ylab}{param_blob}{context}".format(
arm_name=heading,
xlab=x_lab,
ylab=y_lab,
param_blob=parameterization,
context=context,
)
)
i += 1
if color_metric or color_parameter:
rgba_blue_scale = [rgba(c) for c in BLUE_SCALE]
marker = {
"color": colors,
"colorscale": rgba_blue_scale,
"colorbar": {"title": color_metric or color_parameter},
"showscale": True,
}
else:
marker = {"color": rgba(color)}
trace = go.Scatter(
x=x,
y=y,
marker=marker,
mode="markers",
name=name,
text=labels,
hoverinfo=hoverinfo,
)
if show_CI:
if x_se is not None:
trace.update(
error_x={
"type": "data",
"array": np.multiply(x_se, Z),
"color": rgba(color, CI_OPACITY),
}
)
if y_se is not None:
trace.update(
error_y={
"type": "data",
"array": np.multiply(y_se, Z),
"color": rgba(color, CI_OPACITY),
}
)
if visible is not None:
trace.update(visible=visible)
if legendgroup is not None:
trace.update(legendgroup=legendgroup)
if showlegend is not None:
trace.update(showlegend=showlegend)
return trace
def _multiple_metric_traces(
model: ModelBridge,
metric_x: str,
metric_y: str,
generator_runs_dict: TNullableGeneratorRunsDict,
rel_x: bool,
rel_y: bool,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
color_parameter: Optional[str] = None,
color_metric: Optional[str] = None,
) -> Traces:
"""Plot traces for multiple metrics given a model and metrics.
Args:
model: model to draw predictions from.
metric_x: metric to plot on the x-axis.
metric_y: metric to plot on the y-axis.
generator_runs_dict: a mapping from
generator run name to generator run.
rel_x: if True, use relative effects on metric_x.
rel_y: if True, use relative effects on metric_y.
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
color_parameter: color points according to the specified parameter,
cannot be used together with color_metric.
color_metric: color points according to the specified metric,
cannot be used together with color_parameter.
"""
metric_names = {metric_x, metric_y}
if color_metric is not None:
metric_names.add(color_metric)
plot_data, _, _ = get_plot_data(
model,
generator_runs_dict if generator_runs_dict is not None else {},
metric_names,
fixed_features=fixed_features,
data_selector=data_selector,
)
status_quo_arm = (
None
if plot_data.status_quo_name is None
else plot_data.in_sample.get(plot_data.status_quo_name)
)
traces = [
_error_scatter_trace(
list(plot_data.in_sample.values()),
x_axis_var=PlotMetric(metric_x, pred=False, rel=rel_x),
y_axis_var=PlotMetric(metric_y, pred=False, rel=rel_y),
status_quo_arm=status_quo_arm,
visible=False,
color_parameter=color_parameter,
color_metric=color_metric,
),
_error_scatter_trace(
list(plot_data.in_sample.values()),
x_axis_var=PlotMetric(metric_x, pred=True, rel=rel_x),
y_axis_var=PlotMetric(metric_y, pred=True, rel=rel_y),
status_quo_arm=status_quo_arm,
visible=True,
color_parameter=color_parameter,
color_metric=color_metric,
),
]
# TODO: Figure out if there's a better way to color code out-of-sample points
for i, (generator_run_name, cand_arms) in enumerate(
(plot_data.out_of_sample or {}).items(), start=1
):
traces.append(
_error_scatter_trace(
list(cand_arms.values()),
x_axis_var=PlotMetric(metric_x, pred=True, rel=rel_x),
y_axis_var=PlotMetric(metric_y, pred=True, rel=rel_y),
status_quo_arm=status_quo_arm,
name=generator_run_name,
color=DISCRETE_COLOR_SCALE[i],
)
)
return traces
def plot_multiple_metrics(
model: ModelBridge,
metric_x: str,
metric_y: str,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel_x: bool = True,
rel_y: bool = True,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
color_parameter: Optional[str] = None,
color_metric: Optional[str] = None,
**kwargs: Any,
) -> AxPlotConfig:
"""Plot raw values or predictions of two metrics for arms.
All arms used in the model are included in the plot. Additional
arms can be passed through the `generator_runs_dict` argument.
Args:
model: model to draw predictions from.
metric_x: metric to plot on the x-axis.
metric_y: metric to plot on the y-axis.
generator_runs_dict: a mapping from
generator run name to generator run.
rel_x: if True, use relative effects on metric_x.
rel_y: if True, use relative effects on metric_y.
data_selector: Function for selecting observations for plotting.
color_parameter: color points according to the specified parameter,
cannot be used together with color_metric.
color_metric: color points according to the specified metric,
cannot be used together with color_parameter.
"""
if color_parameter or color_metric:
layout_offset_x = 0.15
else:
layout_offset_x = 0
rel = checked_cast_optional(bool, kwargs.get("rel"))
if rel is not None:
warnings.warn("Use `rel_x` and `rel_y` instead of `rel`.", DeprecationWarning)
rel_x = rel
rel_y = rel
traces = _multiple_metric_traces(
model,
metric_x,
metric_y,
generator_runs_dict,
rel_x=rel_x,
rel_y=rel_y,
fixed_features=fixed_features,
data_selector=data_selector,
color_parameter=color_parameter,
color_metric=color_metric,
)
num_cand_traces = len(generator_runs_dict) if generator_runs_dict is not None else 0
layout = go.Layout(
title="Objective Tradeoffs",
hovermode="closest",
updatemenus=[
{
"x": 1.25 + layout_offset_x,
"y": 0.67,
"buttons": [
{
"args": [
{
"error_x.width": 4,
"error_x.thickness": 2,
"error_y.width": 4,
"error_y.thickness": 2,
}
],
"label": "Yes",
"method": "restyle",
},
{
"args": [
{
"error_x.width": 0,
"error_x.thickness": 0,
"error_y.width": 0,
"error_y.thickness": 0,
}
],
"label": "No",
"method": "restyle",
},
],
"yanchor": "middle",
"xanchor": "left",
},
{
"x": 1.25 + layout_offset_x,
"y": 0.57,
"buttons": [
{
"args": [
{"visible": ([False, True] + [True] * num_cand_traces)}
],
"label": "Modeled",
"method": "restyle",
},
{
"args": [
{"visible": ([True, False] + [False] * num_cand_traces)}
],
"label": "Observed",
"method": "restyle",
},
],
"yanchor": "middle",
"xanchor": "left",
},
],
annotations=[
{
"x": 1.18 + layout_offset_x,
"y": 0.7,
"xref": "paper",
"yref": "paper",
"text": "Show CI",
"showarrow": False,
"yanchor": "middle",
},
{
"x": 1.18 + layout_offset_x,
"y": 0.6,
"xref": "paper",
"yref": "paper",
"text": "Type",
"showarrow": False,
"yanchor": "middle",
},
],
xaxis={
"title": metric_x + (" (%)" if rel else ""),
"zeroline": True,
"zerolinecolor": "red",
},
yaxis={
"title": metric_y + (" (%)" if rel else ""),
"zeroline": True,
"zerolinecolor": "red",
},
width=800,
height=600,
font={"size": 10},
legend={"x": 1 + layout_offset_x},
)
fig = go.Figure(data=traces, layout=layout)
return AxPlotConfig(data=fig, plot_type=AxPlotTypes.GENERIC)
def plot_objective_vs_constraints(
model: ModelBridge,
objective: str,
subset_metrics: Optional[List[str]] = None,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
infer_relative_constraints: Optional[bool] = False,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
color_parameter: Optional[str] = None,
color_metric: Optional[str] = None,
) -> AxPlotConfig:
"""Plot the tradeoff between an objetive and all other metrics in a model.
All arms used in the model are included in the plot. Additional
arms can be passed through via the `generator_runs_dict` argument.
Fixed features input can be used to override fields of the insample arms
when making model predictions.
Args:
model: model to draw predictions from.
objective: metric to optimize. Plotted on the x-axis.
subset_metrics: list of metrics to plot on the y-axes
if need a subset of all metrics in the model.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
infer_relative_constraints: if True, read relative spec from model's
optimization config. Absolute constraints will not be relativized;
relative ones will be.
Objectives will respect the `rel` parameter.
Metrics that are not constraints will be relativized.
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
color_parameter: color points according to the specified parameter,
cannot be used together with color_metric.
color_metric: color points according to the specified metric,
cannot be used together with color_parameter.
"""
if color_parameter or color_metric:
layout_offset_x = 0.15
else:
layout_offset_x = 0
if subset_metrics is not None:
metrics = subset_metrics
else:
metrics = [m for m in model.metric_names if m != objective]
metric_dropdown = []
if infer_relative_constraints:
rels = infer_is_relative(model, metrics, non_constraint_rel=rel)
if rel:
rels[objective] = True
else:
rels[objective] = False
else:
if rel:
rels = {metric: True for metric in metrics}
rels[objective] = True
else:
rels = {metric: False for metric in metrics}
rels[objective] = False
# set plotted data to the first outcome
plot_data = _multiple_metric_traces(
model,
objective,
metrics[0],
generator_runs_dict,
rel_x=rels[objective],
rel_y=rels[metrics[0]],
fixed_features=fixed_features,
data_selector=data_selector,
color_parameter=color_parameter,
color_metric=color_metric,
)
for metric in metrics:
otraces = _multiple_metric_traces(
model,
objective,
metric,
generator_runs_dict,
rel_x=rels[objective],
rel_y=rels[metric],
fixed_features=fixed_features,
data_selector=data_selector,
color_parameter=color_parameter,
color_metric=color_metric,
)
# Current version of Plotly does not allow updating the yaxis label
# on dropdown (via relayout) simultaneously with restyle
metric_dropdown.append(
{
"args": [
{
"y": [t["y"] for t in otraces],
"error_y.array": [t["error_y"]["array"] for t in otraces],
"text": [t["text"] for t in otraces],
},
{"yaxis.title": metric + (" (%)" if rels[metric] else "")},
],
"label": metric,
"method": "update",
}
)
num_cand_traces = len(generator_runs_dict) if generator_runs_dict is not None else 0
layout = go.Layout(
title="Objective Tradeoffs",
hovermode="closest",
updatemenus=[
{
"x": 1.25 + layout_offset_x,
"y": 0.62,
"buttons": [
{
"args": [
{
"error_x.width": 4,
"error_x.thickness": 2,
"error_y.width": 4,
"error_y.thickness": 2,
}
],
"label": "Yes",
"method": "restyle",
},
{
"args": [
{
"error_x.width": 0,
"error_x.thickness": 0,
"error_y.width": 0,
"error_y.thickness": 0,
}
],
"label": "No",
"method": "restyle",
},
],
"yanchor": "middle",
"xanchor": "left",
},
{
"x": 1.25 + layout_offset_x,
"y": 0.52,
"buttons": [
{
"args": [
{"visible": ([False, True] + [True] * num_cand_traces)}
],
"label": "Modeled",
"method": "restyle",
},
{
"args": [
{"visible": ([True, False] + [False] * num_cand_traces)}
],
"label": "Observed",
"method": "restyle",
},
],
"yanchor": "middle",
"xanchor": "left",
},
{
"x": 1.25 + layout_offset_x,
"y": 0.72,
"yanchor": "middle",
"xanchor": "left",
"buttons": metric_dropdown,
},
],
annotations=[
{
"x": 1.18 + layout_offset_x,
"y": 0.72,
"xref": "paper",
"yref": "paper",
"text": "Y-Axis",
"showarrow": False,
"yanchor": "middle",
},
{
"x": 1.18 + layout_offset_x,
"y": 0.62,
"xref": "paper",
"yref": "paper",
"text": "Show CI",
"showarrow": False,
"yanchor": "middle",
},
{
"x": 1.18 + layout_offset_x,
"y": 0.52,
"xref": "paper",
"yref": "paper",
"text": "Type",
"showarrow": False,
"yanchor": "middle",
},
],
xaxis={
"title": objective + (" (%)" if rels[objective] else ""),
"zeroline": True,
"zerolinecolor": "red",
},
yaxis={
"title": metrics[0] + (" (%)" if rels[metrics[0]] else ""),
"zeroline": True,
"zerolinecolor": "red",
},
width=900,
height=600,
font={"size": 10},
legend={"x": 1 + layout_offset_x},
)
fig = go.Figure(data=plot_data, layout=layout)
return AxPlotConfig(data=fig, plot_type=AxPlotTypes.GENERIC)
def lattice_multiple_metrics(
model: ModelBridge,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
show_arm_details_on_hover: bool = False,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> AxPlotConfig:
"""Plot raw values or predictions of combinations of two metrics for arms.
Args:
model: model to draw predictions from.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is False.
data_selector: Function for selecting observations for plotting.
"""
metrics = model.metric_names
fig = subplots.make_subplots(
rows=len(metrics),
cols=len(metrics),
print_grid=False,
shared_xaxes=False,
shared_yaxes=False,
)
plot_data, _, _ = get_plot_data(
model,
generator_runs_dict if generator_runs_dict is not None else {},
metrics,
data_selector=data_selector,
)
status_quo_arm = (
None
if plot_data.status_quo_name is None
else plot_data.in_sample.get(plot_data.status_quo_name)
)
# iterate over all combinations of metrics and generate scatter traces
for i, o1 in enumerate(metrics, start=1):
for j, o2 in enumerate(metrics, start=1):
if o1 != o2:
# in-sample observed and predicted
obs_insample_trace = _error_scatter_trace(
list(plot_data.in_sample.values()),
x_axis_var=PlotMetric(o1, pred=False, rel=rel),
y_axis_var=PlotMetric(o2, pred=False, rel=rel),
status_quo_arm=status_quo_arm,
showlegend=(i == 1 and j == 2),
legendgroup="In-sample",
visible=False,
show_arm_details_on_hover=show_arm_details_on_hover,
)
predicted_insample_trace = _error_scatter_trace(
list(plot_data.in_sample.values()),
x_axis_var=PlotMetric(o1, pred=True, rel=rel),
y_axis_var=PlotMetric(o2, pred=True, rel=rel),
status_quo_arm=status_quo_arm,
legendgroup="In-sample",
showlegend=(i == 1 and j == 2),
visible=True,
show_arm_details_on_hover=show_arm_details_on_hover,
)
fig.append_trace(obs_insample_trace, j, i)
fig.append_trace(predicted_insample_trace, j, i)
# iterate over models here
for k, (generator_run_name, cand_arms) in enumerate(
(plot_data.out_of_sample or {}).items(), start=1
):
fig.append_trace(
_error_scatter_trace(
list(cand_arms.values()),
x_axis_var=PlotMetric(o1, pred=True, rel=rel),
y_axis_var=PlotMetric(o2, pred=True, rel=rel),
status_quo_arm=status_quo_arm,
name=generator_run_name,
color=DISCRETE_COLOR_SCALE[k],
showlegend=(i == 1 and j == 2),
legendgroup=generator_run_name,
show_arm_details_on_hover=show_arm_details_on_hover,
),
j,
i,
)
else:
# if diagonal is set to True, add box plots
fig.append_trace(
go.Box(
y=[arm.y[o1] for arm in plot_data.in_sample.values()],
name=None,
marker={"color": rgba(COLORS.STEELBLUE.value)},
showlegend=False,
legendgroup="In-sample",
visible=False,
hoverinfo="none",
),
j,
i,
)
fig.append_trace(
go.Box(
y=[arm.y_hat[o1] for arm in plot_data.in_sample.values()],
name=None,
marker={"color": rgba(COLORS.STEELBLUE.value)},
showlegend=False,
legendgroup="In-sample",
hoverinfo="none",
),
j,
i,
)
for k, (generator_run_name, cand_arms) in enumerate(
(plot_data.out_of_sample or {}).items(), start=1
):
fig.append_trace(
go.Box(
y=[arm.y_hat[o1] for arm in cand_arms.values()],
name=None,
marker={"color": rgba(DISCRETE_COLOR_SCALE[k])},
showlegend=False,
legendgroup=generator_run_name,
hoverinfo="none",
),
j,
i,
)
fig["layout"].update(
height=800,
width=960,
font={"size": 10},
hovermode="closest",
legend={
"orientation": "h",
"x": 0,
"y": 1.05,
"xanchor": "left",
"yanchor": "middle",
},
updatemenus=[
{
"x": 0.35,
"y": 1.08,
"xanchor": "left",
"yanchor": "middle",
"buttons": [
{
"args": [
{
"error_x.width": 0,
"error_x.thickness": 0,
"error_y.width": 0,
"error_y.thickness": 0,
}
],
"label": "No",
"method": "restyle",
},
{
"args": [
{
"error_x.width": 4,
"error_x.thickness": 2,
"error_y.width": 4,
"error_y.thickness": 2,
}
],
"label": "Yes",
"method": "restyle",
},
],
},
{
"x": 0.1,
"y": 1.08,
"xanchor": "left",
"yanchor": "middle",
"buttons": [
{
"args": [
{
"visible": (
(
[False, True]
+ [True] * len(plot_data.out_of_sample or {})
)
* (len(metrics) ** 2)
)
}
],
"label": "Modeled",
"method": "restyle",
},
{
"args": [
{
"visible": (
(
[True, False]
+ [False] * len(plot_data.out_of_sample or {})
)
* (len(metrics) ** 2)
)
}
],
"label": "In-sample",
"method": "restyle",
},
],
},
],
annotations=[
{
"x": 0.02,
"y": 1.1,
"xref": "paper",
"yref": "paper",
"text": "Type",
"showarrow": False,
"yanchor": "middle",
"xanchor": "left",
},
{
"x": 0.30,
"y": 1.1,
"xref": "paper",
"yref": "paper",
"text": "Show CI",
"showarrow": False,
"yanchor": "middle",
"xanchor": "left",
},
],
)
# add metric names to axes - add to each subplot if boxplots on the
# diagonal and axes are not shared; else, add to the leftmost y-axes
# and bottom x-axes.
for i, o in enumerate(metrics):
pos_x = len(metrics) * len(metrics) - len(metrics) + i + 1
pos_y = 1 + (len(metrics) * i)
fig["layout"]["xaxis{}".format(pos_x)].update(
title=_wrap_metric(o), titlefont={"size": 10}
)
fig["layout"]["yaxis{}".format(pos_y)].update(
title=_wrap_metric(o), titlefont={"size": 10}
)
# do not put x-axis ticks for boxplots
boxplot_xaxes = []
for trace in fig["data"]:
if trace["type"] == "box":
# stores the xaxes which correspond to boxplot subplots
# since we use xaxis1, xaxis2, etc, in plotly.py
boxplot_xaxes.append("xaxis{}".format(trace["xaxis"][1:]))
else:
# clear all error bars since default is no CI
trace["error_x"].update(width=0, thickness=0)
trace["error_y"].update(width=0, thickness=0)
for xaxis in boxplot_xaxes:
fig["layout"][xaxis]["showticklabels"] = False
return AxPlotConfig(data=fig, plot_type=AxPlotTypes.GENERIC)
# Single metric fitted values
def _single_metric_traces(
model: ModelBridge,
metric: str,
generator_runs_dict: TNullableGeneratorRunsDict,
rel: bool,
show_arm_details_on_hover: bool = True,
showlegend: bool = True,
show_CI: bool = True,
arm_noun: str = "arm",
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> Traces:
"""Plot scatterplots with errors for a single metric (y-axis).
Arms are plotted on the x-axis.
Args:
model: model to draw predictions from.
metric: name of metric to plot.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, plot relative predictions.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is True.
show_legend: if True, show legend for trace.
show_CI: if True, render confidence intervals.
arm_noun: noun to use instead of "arm" (e.g. group)
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
"""
plot_data, _, _ = get_plot_data(
model,
generator_runs_dict or {},
{metric},
fixed_features=fixed_features,
data_selector=data_selector,
)
status_quo_arm = (
None
if plot_data.status_quo_name is None
else plot_data.in_sample.get(plot_data.status_quo_name)
)
traces = [
_error_scatter_trace(
list(plot_data.in_sample.values()),
x_axis_var=None,
y_axis_var=PlotMetric(metric, pred=True, rel=rel),
status_quo_arm=status_quo_arm,
legendgroup="In-sample",
showlegend=showlegend,
show_arm_details_on_hover=show_arm_details_on_hover,
show_CI=show_CI,
arm_noun=arm_noun,
)
]
# Candidates
for i, (generator_run_name, cand_arms) in enumerate(
(plot_data.out_of_sample or {}).items(), start=1
):
traces.append(
_error_scatter_trace(
list(cand_arms.values()),
x_axis_var=None,
y_axis_var=PlotMetric(metric, pred=True, rel=rel),
status_quo_arm=status_quo_arm,
name=generator_run_name,
color=DISCRETE_COLOR_SCALE[i],
legendgroup=generator_run_name,
showlegend=showlegend,
show_arm_details_on_hover=show_arm_details_on_hover,
show_CI=show_CI,
arm_noun=arm_noun,
)
)
return traces
def plot_fitted(
model: ModelBridge,
metric: str,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
custom_arm_order: Optional[List[str]] = None,
custom_arm_order_name: str = "Custom",
show_CI: bool = True,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> AxPlotConfig:
"""Plot fitted metrics.
Args:
model: model to use for predictions.
metric: metric to plot predictions for.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
custom_arm_order: a list of arm names in the
order corresponding to how they should be plotted on the x-axis.
If not None, this is the default ordering.
custom_arm_order_name: name for custom ordering to
show in the ordering dropdown. Default is 'Custom'.
show_CI: if True, render confidence intervals.
data_selector: Function for selecting observations for plotting.
"""
traces = _single_metric_traces(
model,
metric,
generator_runs_dict,
rel,
show_CI=show_CI,
data_selector=data_selector,
)
# order arm name sorting arm numbers within batch
names_by_arm = sorted(
np.unique(np.concatenate([d["x"] for d in traces])),
key=lambda x: arm_name_to_tuple(x),
)
# get arm names sorted by effect size
names_by_effect = list(
OrderedDict.fromkeys(
np.concatenate([d["x"] for d in traces])
.flatten()
.take(np.argsort(np.concatenate([d["y"] for d in traces]).flatten()))
)
)
# options for ordering arms (x-axis)
xaxis_categoryorder = "array"
xaxis_categoryarray = names_by_arm
order_options = [
{
"args": [
{"xaxis.categoryorder": "array", "xaxis.categoryarray": names_by_arm}
],
"label": "Name",
"method": "relayout",
},
{
"args": [
{"xaxis.categoryorder": "array", "xaxis.categoryarray": names_by_effect}
],
"label": "Effect Size",
"method": "relayout",
},
]
# if a custom order has been passed, default to that
if custom_arm_order is not None:
xaxis_categoryorder = "array"
xaxis_categoryarray = custom_arm_order
order_options = [
{
"args": [
{
"xaxis.categoryorder": "array",
"xaxis.categoryarray": custom_arm_order,
}
],
"label": custom_arm_order_name,
"method": "relayout",
}
# Union[List[str...
] + order_options
layout = go.Layout(
title="Predicted Outcomes",
hovermode="closest",
updatemenus=[
{
"x": 1.25,
"y": 0.67,
"buttons": list(order_options),
"yanchor": "middle",
"xanchor": "left",
}
],
yaxis={
"zerolinecolor": "red",
"title": "{}{}".format(metric, " (%)" if rel else ""),
},
xaxis={
"tickangle": 45,
"categoryorder": xaxis_categoryorder,
"categoryarray": xaxis_categoryarray,
},
annotations=[
{
"x": 1.18,
"y": 0.72,
"xref": "paper",
"yref": "paper",
"text": "Sort By",
"showarrow": False,
"yanchor": "middle",
}
],
font={"size": 10},
)
fig = go.Figure(data=traces, layout=layout)
return AxPlotConfig(data=fig, plot_type=AxPlotTypes.GENERIC)
def tile_fitted(
model: ModelBridge,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
show_arm_details_on_hover: bool = False,
show_CI: bool = True,
arm_noun: str = "arm",
metrics: Optional[List[str]] = None,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> AxPlotConfig:
"""Tile version of fitted outcome plots.
Args:
model: model to use for predictions.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is False.
show_CI: if True, render confidence intervals.
arm_noun: noun to use instead of "arm" (e.g. group)
metrics: List of metric names to restrict to when plotting.
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
"""
metrics = metrics or list(model.metric_names)
nrows = int(np.ceil(len(metrics) / 2))
ncols = min(len(metrics), 2)
# make subplots (plot per row)
fig = subplots.make_subplots(
rows=nrows,
cols=ncols,
print_grid=False,
shared_xaxes=False,
shared_yaxes=False,
subplot_titles=tuple(metrics),
horizontal_spacing=0.05,
vertical_spacing=0.30 / nrows,
)
name_order_args: Dict[str, Any] = {}
name_order_axes: Dict[str, Dict[str, Any]] = {}
effect_order_args: Dict[str, Any] = {}
for i, metric in enumerate(metrics):
data = _single_metric_traces(
model,
metric,
generator_runs_dict,
rel,
showlegend=i == 0,
show_arm_details_on_hover=show_arm_details_on_hover,
show_CI=show_CI,
arm_noun=arm_noun,
fixed_features=fixed_features,
data_selector=data_selector,
)
# order arm name sorting arm numbers within batch
names_by_arm = sorted(
np.unique(np.concatenate([d["x"] for d in data])),
key=lambda x: arm_name_to_tuple(x),
)
# get arm names sorted by effect size
names_by_effect = list(
OrderedDict.fromkeys(
np.concatenate([d["x"] for d in data])
.flatten()
.take(np.argsort(np.concatenate([d["y"] for d in data]).flatten()))
)
)
# options for ordering arms (x-axis)
# Note that xaxes need to be references as xaxis, xaxis2, xaxis3, etc.
# for the purposes of updatemenus argument (dropdown) in layout.
# However, when setting the initial ordering layout, the keys should be
# xaxis1, xaxis2, xaxis3, etc. Note the discrepancy for the initial
# axis.
label = "" if i == 0 else i + 1
name_order_args["xaxis{}.categoryorder".format(label)] = "array"
name_order_args["xaxis{}.categoryarray".format(label)] = names_by_arm
effect_order_args["xaxis{}.categoryorder".format(label)] = "array"
effect_order_args["xaxis{}.categoryarray".format(label)] = names_by_effect
name_order_axes["xaxis{}".format(i + 1)] = {
"categoryorder": "array",
"categoryarray": names_by_arm,
"type": "category",
}
name_order_axes["yaxis{}".format(i + 1)] = {
"ticksuffix": "%" if rel else "",
"zerolinecolor": "red",
}
for d in data:
fig.append_trace(d, int(np.floor(i / ncols)) + 1, i % ncols + 1)
order_options = [
{"args": [name_order_args], "label": "Name", "method": "relayout"},
{"args": [effect_order_args], "label": "Effect Size", "method": "relayout"},
]
# if odd number of plots, need to manually remove the last blank subplot
# generated by `subplots.make_subplots`
if len(metrics) % 2 == 1:
fig["layout"].pop("xaxis{}".format(nrows * ncols))
fig["layout"].pop("yaxis{}".format(nrows * ncols))
# allocate 400 px per plot
fig["layout"].update(
margin={"t": 0},
hovermode="closest",
updatemenus=[
{
"x": 0.15,
"y": 1 + 0.40 / nrows,
"buttons": order_options,
"xanchor": "left",
"yanchor": "middle",
}
],
font={"size": 10},
width=650 if ncols == 1 else 950,
height=300 * nrows,
legend={
"orientation": "h",
"x": 0,
"y": 1 + 0.20 / nrows,
"xanchor": "left",
"yanchor": "middle",
},
**name_order_axes,
)
# append dropdown annotations
fig["layout"]["annotations"] = fig["layout"]["annotations"] + (
{
"x": 0.5,
"y": 1 + 0.40 / nrows,
"xref": "paper",
"yref": "paper",
"font": {"size": 14},
"text": "Predicted Outcomes",
"showarrow": False,
"xanchor": "center",
"yanchor": "middle",
},
{
"x": 0.05,
"y": 1 + 0.40 / nrows,
"xref": "paper",
"yref": "paper",
"text": "Sort By",
"showarrow": False,
"xanchor": "left",
"yanchor": "middle",
},
)
fig = resize_subtitles(figure=fig, size=10)
return AxPlotConfig(data=fig, plot_type=AxPlotTypes.GENERIC)
def interact_fitted_plotly(
model: ModelBridge,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
show_arm_details_on_hover: bool = True,
show_CI: bool = True,
arm_noun: str = "arm",
metrics: Optional[List[str]] = None,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> go.Figure:
"""Interactive fitted outcome plots for each arm used in fitting the model.
Choose the outcome to plot using a dropdown.
Args:
model: model to use for predictions.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is True.
show_CI: if True, render confidence intervals.
arm_noun: noun to use instead of "arm" (e.g. group)
metrics: List of metric names to restrict to when plotting.
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
"""
traces_per_metric = (
1 if generator_runs_dict is None else len(generator_runs_dict) + 1
)
metrics = sorted(metrics or model.metric_names)
traces = []
dropdown = []
for i, metric in enumerate(metrics):
data = _single_metric_traces(
model,
metric,
generator_runs_dict,
rel,
showlegend=i == 0,
show_arm_details_on_hover=show_arm_details_on_hover,
show_CI=show_CI,
arm_noun=arm_noun,
fixed_features=fixed_features,
data_selector=data_selector,
)
for d in data:
d["visible"] = i == 0
traces.append(d)
# only the first two traces are visible (corresponding to first outcome
# in dropdown)
is_visible = [False] * (len(metrics) * traces_per_metric)
for j in range((traces_per_metric * i), (traces_per_metric * (i + 1))):
is_visible[j] = True
# on dropdown change, restyle
dropdown.append(
{"args": ["visible", is_visible], "label": metric, "method": "restyle"}
)
layout = go.Layout(
xaxis={"title": arm_noun.title(), "zeroline": False, "type": "category"},
yaxis={
"ticksuffix": "%" if rel else "",
"title": ("Relative " if rel else "") + "Effect",
"zeroline": True,
"zerolinecolor": "red",
},
hovermode="closest",
updatemenus=[
{
"buttons": dropdown,
"x": 0.075,
"xanchor": "left",
"y": 1.1,
"yanchor": "middle",
}
],
annotations=[
{
"font": {"size": 12},
"showarrow": False,
"text": "Metric",
"x": 0.05,
"xanchor": "right",
"xref": "paper",
"y": 1.1,
"yanchor": "middle",
"yref": "paper",
}
],
legend={
"orientation": "h",
"x": 0.065,
"xanchor": "left",
"y": 1.2,
"yanchor": "middle",
},
height=500,
)
if traces_per_metric > 1:
layout["annotations"] = layout["annotations"] + (
{
"font": {"size": 12},
"showarrow": False,
"text": "Arm Source",
"x": 0.05,
"xanchor": "right",
"xref": "paper",
"y": 1.2,
"yanchor": "middle",
"yref": "paper",
},
)
return go.Figure(data=traces, layout=layout)
def interact_fitted(
model: ModelBridge,
generator_runs_dict: TNullableGeneratorRunsDict = None,
rel: bool = True,
show_arm_details_on_hover: bool = True,
show_CI: bool = True,
arm_noun: str = "arm",
metrics: Optional[List[str]] = None,
fixed_features: Optional[ObservationFeatures] = None,
data_selector: Optional[Callable[[Observation], bool]] = None,
) -> AxPlotConfig:
"""Interactive fitted outcome plots for each arm used in fitting the model.
Choose the outcome to plot using a dropdown.
Args:
model: model to use for predictions.
generator_runs_dict: a mapping from
generator run name to generator run.
rel: if True, use relative effects. Default is True.
show_arm_details_on_hover: if True, display
parameterizations of arms on hover. Default is True.
show_CI: if True, render confidence intervals.
arm_noun: noun to use instead of "arm" (e.g. group)
metrics: List of metric names to restrict to when plotting.
fixed_features: Fixed features to use when making model predictions.
data_selector: Function for selecting observations for plotting.
"""
return AxPlotConfig(
data=interact_fitted_plotly(
model=model,
generator_runs_dict=generator_runs_dict,
rel=rel,
show_arm_details_on_hover=show_arm_details_on_hover,
show_CI=show_CI,
arm_noun=arm_noun,
metrics=metrics,
fixed_features=fixed_features,
data_selector=data_selector,
),
plot_type=AxPlotTypes.GENERIC,
)
def tile_observations(
experiment: Experiment,
data: Optional[Data] = None,
rel: bool = True,
metrics: Optional[List[str]] = None,
arm_names: Optional[List[str]] = None,
) -> AxPlotConfig:
"""
Tiled plot with all observed outcomes.
Will plot all observed arms. If data is provided will use that, otherwise
will fetch data from experiment. Will plot all metrics in data unless a
list is provided in metrics. If arm_names is provided will limit the plot
to only arms in that list.
Args:
experiment: Experiment
data: Data to use, otherwise will fetch data from experiment.
rel: Plot relative values, if experiment has status quo.
metrics: Limit results to this set of metrics.
arm_names: Limit results to this set of arms.
Returns: Plot config for the plot.
"""
if data is None:
data = experiment.fetch_data()
if arm_names is not None:
data = Data(data.df[data.df["arm_name"].isin(arm_names)])
m_ts = Models.THOMPSON(
data=data,
search_space=experiment.search_space,
experiment=experiment,
)
return tile_fitted(
model=m_ts,
rel=rel and (experiment.status_quo is not None),
metrics=metrics,
)
| 34.878185 | 88 | 0.520555 |
fa77cbed1eed92dacd9aa7bb30fe2d387cd941d6 | 17,985 | py | Python | bob/db/swan/create.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | bob/db/swan/create.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | bob/db/swan/create.py | bioidiap/bob.db.swan | 676510d47cb08b65be04f51d45746127c36bf2ce | [
"BSD-3-Clause"
] | null | null | null | from os.path import join
import re
import pkg_resources
from bob.io.base import create_directories_safe
from .common import swan_file_metadata
def create_subparser(subparsers):
parser = subparsers.add_parser(
'create', help="Creates the PAD file lists of the dataset.")
parser.set_defaults(func=_create) # action
IDS = (('IDIAP_00055', 'NTNU_00053', 'IDIAP_00049', 'MPH-IND_00043',
'IDIAP_00031', 'NTNU_00045', 'IDIAP_00028', 'MPH-FRA_00001',
'NTNU_00013', 'NTNU_00026', 'NTNU_00032', 'MPH-IND_00001',
'NTNU_00024', 'IDIAP_00054', 'NTNU_00042'),
('IDIAP_00059', 'NTNU_00047', 'MPH-IND_00017', 'NTNU_00031',
'NTNU_00035', 'NTNU_00030', 'MPH-IND_00002', 'NTNU_00004',
'MPH-IND_00005', 'IDIAP_00011', 'MPH-IND_00007', 'IDIAP_00002',
'MPH-IND_00031', 'MPH-FRA_00002', 'IDIAP_00022'),
('NTNU_00041', 'MPH-IND_00024', 'IDIAP_00014', 'IDIAP_00020',
'NTNU_00029', 'MPH-IND_00028', 'MPH-IND_00041', 'IDIAP_00005',
'IDIAP_00029', 'IDIAP_00039', 'IDIAP_00038', 'NTNU_00040',
'IDIAP_00025', 'NTNU_00039', 'IDIAP_00017'),
('NTNU_00044', 'MPH-FRA_00007', 'MPH-IND_00012', 'NTNU_00002',
'IDIAP_00050', 'IDIAP_00034', 'IDIAP_00021', 'NTNU_00046',
'MPH-IND_00020', 'NTNU_00007', 'NTNU_00037', 'NTNU_00010',
'MPH-IND_00036', 'MPH-IND_00034', 'IDIAP_00043'),
('IDIAP_00048', 'MPH-IND_00032', 'IDIAP_00001', 'MPH-IND_00039',
'NTNU_00003', 'MPH-IND_00046', 'MPH-IND_00009', 'MPH-IND_00042',
'NTNU_00008', 'NTNU_00036', 'NTNU_00012', 'NTNU_00038', 'IDIAP_00040',
'IDIAP_00018', 'NTNU_00034'),
('MPH-IND_00045', 'IDIAP_00042', 'NTNU_00001', 'IDIAP_00010',
'NTNU_00019', 'MPH-IND_00044', 'MPH-IND_00051', 'MPH-IND_00018',
'NTNU_00018', 'IDIAP_00035', 'MPH-FRA_00003', 'MPH-IND_00025',
'MPH-FRA_00005', 'MPH-IND_00050', 'IDIAP_00026'),
('MPH-IND_00055', 'MPH-IND_00011', 'IDIAP_00052', 'MPH-IND_00023',
'IDIAP_00030', 'MPH-IND_00033', 'IDIAP_00046', 'MPH-IND_00030',
'MPH-IND_00016', 'IDIAP_00013', 'NTNU_00014', 'MPH-IND_00008',
'NTNU_00022', 'NTNU_00017', 'IDIAP_00041'),
('IDIAP_00027', 'NTNU_00052', 'IDIAP_00033', 'NTNU_00016', 'NTNU_00023',
'IDIAP_00016', 'MPH-IND_00015', 'MPH-IND_00047', 'IDIAP_00004',
'MPH-FRA_00006', 'IDIAP_00015', 'IDIAP_00032', 'MPH-IND_00010',
'MPH-IND_00013', 'NTNU_00054'),
('NTNU_00005', 'NTNU_00027', 'IDIAP_00051', 'MPH-IND_00048',
'NTNU_00028', 'MPH-IND_00038', 'MPH-IND_00006', 'NTNU_00033',
'NTNU_00025', 'NTNU_00020', 'NTNU_00051', 'MPH-IND_00004',
'IDIAP_00036', 'NTNU_00006', 'NTNU_00021'),
('IDIAP_00006', 'NTNU_00015', 'IDIAP_00019',
'IDIAP_00058', 'MPH-IND_00026', 'MPH-IND_00049',
'NTNU_00043', 'MPH-IND_00037', 'IDIAP_00047',
'IDIAP_00012', 'MPH-IND_00040', 'IDIAP_00060',
'MPH-IND_00014', 'IDIAP_00003', 'IDIAP_00024'))
BIO_FOLDS = (('1', (0, 1, 2), (3, 4, 5, 6, 7, 8, 9)),
('2', (3, 4, 5), (0, 1, 2, 6, 7, 8, 9)),
('3', (6, 7, 8), (0, 1, 2, 3, 4, 5, 9)),
('4', (0, 3, 6), (1, 2, 4, 5, 7, 8, 9)),
('5', (1, 4, 9), (0, 2, 3, 5, 6, 7, 8)),)
PAD_FOLDS = (('1', (2, 0, 5, 8, 4), (9, 7), (1, 6, 3)),
('2', (7, 8, 2, 9, 1), (6, 0), (4, 3, 5)),
('3', (8, 3, 6, 0, 1), (5, 2), (9, 7, 4)),
('4', (9, 4, 7, 0, 2), (6, 5), (1, 8, 3)),
('5', (8, 2, 9, 0, 1), (5, 4), (6, 7, 3)))
def get_ids(ids):
return tuple(x for i in ids for x in IDS[i])
def empty_norm(folder):
path = join(folder, 'norm')
create_directories_safe(path)
path = join(path, 'train_world.lst')
with open(path, 'w'):
pass
def enrollment_probes(folder, files, group, pattern, ids, cls='enroll'):
path = join(folder, group)
create_directories_safe(path)
if cls == 'probe':
path = join(path, 'for_probes.lst')
elif cls == 'attack':
path = join(path, 'for_scores.lst')
else:
path = join(path, 'for_models.lst')
regex = re.compile(pattern)
files = filter(regex.search, files)
with open(path, 'w') as f:
for line in files:
path = line.strip()
client_id = swan_file_metadata(path)[0].id
if client_id not in ids:
continue
if cls == 'probe':
f.write('{0} {1}\n'.format(path, client_id))
elif cls == 'attack':
attack_type = path.split('/')[2]
f.write('{0} {1} {1} attack/{2}\n'.format(
path, client_id, attack_type))
else:
f.write('{0} {1} {1}\n'.format(path, client_id))
def licit_protocols(
out_folder, files, patterns, attack=False, modalities=('eye', 'face', 'voice')
):
for fold, dev_ids, eval_ids in BIO_FOLDS:
for modality in modalities:
folder = '{}_{}_f{}'.format(out_folder, modality, fold)
# create empty norm folder
empty_norm(folder)
# create enrollments
pattern = patterns[(modality, 'enroll')]
enrollment_probes(folder, files, 'dev',
pattern, get_ids(dev_ids))
enrollment_probes(folder, files, 'eval',
pattern, get_ids(eval_ids))
# create probes
pattern = patterns[(modality, 'probe')]
enrollment_probes(folder, files, 'dev',
pattern, get_ids(dev_ids),
cls='attack' if attack else 'probe')
enrollment_probes(folder, files, 'eval',
pattern, get_ids(eval_ids),
cls='attack' if attack else 'probe')
def pad_list(folder, files, bf, pa, ids):
create_directories_safe(folder)
bf = re.compile(bf)
pa = re.compile(pa)
bf_files = filter(bf.search, files)
pa_files = filter(pa.search, files)
for name, lines in [
('for_real.lst', bf_files),
('for_attack.lst', pa_files),
]:
with open(join(folder, name), 'w') as f:
for line in lines:
path = line.strip()
client_id = swan_file_metadata(path)[0].id
if client_id not in ids:
continue
if name == 'for_real.lst':
f.write('{0} {1}\n'.format(path, client_id))
else:
attack_type = path.split('/')[2]
f.write('{0} {1} {2}\n'.format(
path, client_id, attack_type))
def pad_protocols(out_folder, files, patterns):
for fold, train_ids, dev_ids, eval_ids in PAD_FOLDS:
folder = '{}_f{}'.format(out_folder, fold)
for group, ids in (
('train', train_ids),
('dev', dev_ids),
('eval', eval_ids),
):
bf = patterns[(group, 'bf')]
pa = patterns[(group, 'pa')]
pad_list(join(folder, group), files,
bf, pa, get_ids(ids))
def bio_protocol_1(out_folder, files):
# This give the variation for indoor versus outdoor.
# enroll with session 2
# probe with session 3
# Data Partition: 30% development and 70% evaluation.
# 5 Folds
# Enrollment: 2 images corresponding to 2 video.
# Probe: All Video and Images.
# For EYE biometrics: We can enroll Assisted and probe self capture.
patterns = {
('eye', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_3\.mp4',
('face', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1\.mp4',
('voice', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_2\.mp4',
('eye', 'probe'): r'.*session_03/iPhone/.*/.*_03_((0[6-9]|10)_p_3\.png|0[3-4]_p_3\.mp4)',
('face', 'probe'): r'.*session_03/iPhone/.*/.*_03_0[1-2]_p_1.*',
('voice', 'probe'): r'.*session_03/iPhone/.*/.*_03_0[3-4]_p_2.*',
}
licit_protocols(out_folder, files, patterns)
def bio_protocol_2(out_folder, files):
# This will give variation for Indoor controlled.
# enroll with session 1
# probe with session 2
# Data Partition: 30% development and 70% evaluation.
# 5 Folds
# Enrollment: 2 images corresponding to 2 video.
# Probe: All Video and Images.
# For EYE biometrics: We can enroll Assisted and probe self capture.
patterns = {
('eye', 'enroll'): r'.*session_01/iPhone/.*/.*_01_0[1-2]_p_3\.mp4',
('face', 'enroll'): r'.*session_01/iPhone/.*/.*_01_0[1-2]_p_1\.mp4',
('voice', 'enroll'): r'.*session_01/iPhone/.*/.*_01_0[1-2]_p_2\.mp4',
('eye', 'probe'): r'.*session_02/iPhone/.*/.*_02_((0[6-9]|10)_p_3\.png|0[3-4]_p_3\.mp4)',
('face', 'probe'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1.*',
('voice', 'probe'): r'.*session_02/iPhone/.*/.*_02_0[3-4]_p_2.*',
}
licit_protocols(out_folder, files, patterns)
def bio_protocol_3(out_folder, files):
# This will give variation for indoor controlled versus indoor/outdoor uncontrolled
# enroll with session 2
# probe with session 3,4,5,6
# Data Partition: 30% development and 70% evaluation.
# 5 Folds
# Enrollment: 2 images corresponding to 2 video.
# Probe: All Video and Images.
# For EYE biometrics: We can enroll Assisted and probe self capture.
patterns = {
('eye', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_3\.mp4',
('face', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1\.mp4',
('voice', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_2\.mp4',
('eye', 'probe'): r'.*session_0[3-6]/iPhone/.*/.*_0[3-6]_((0[6-9]|10)_p_3\.png|0[3-4]_p_3\.mp4)',
('face', 'probe'): r'.*session_0[3-6]/iPhone/.*/.*_0[3-6]_0[1-2]_p_1.*',
('voice', 'probe'): r'.*session_0[3-6]/iPhone/.*/.*_0[3-6]_0[3-4]_p_2.*',
}
licit_protocols(out_folder, files, patterns)
def bio_protocol_4(out_folder, files):
# This is just like protocol 3 but faces are talking faces
patterns = {
('face', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_2\.mp4',
('face', 'probe'): r'.*session_0[3-6]/iPhone/.*/.*_0[3-6]_0[3-4]_p_2.*',
}
licit_protocols(out_folder, files, patterns, modalities=['face'])
def spoof_protocol_3(out_folder, files):
# This will give variation for indoor controlled versus indoor/outdoor
# uncontrolled
# enroll with session 2
# probe with session 3,4,5,6
# Data Partition: 30% development and 70% evaluation.
# 5 Folds
# Enrollment: 2 images corresponding to 2 video.
# Probe: All Video and Images.
# For EYE biometrics: We can enroll Assisted and probe self capture.
patterns = {
('eye', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_3\.mp4',
('face', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1\.mp4',
('voice', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_2\.mp4',
('eye', 'probe'): r'pa-database/Eye/.*',
('face', 'probe'): r'pa-database/StillFace/.*',
('voice', 'probe'): r'pa-database/Voice/.*',
}
licit_protocols(out_folder, files, patterns, attack=True)
def spoof_protocol_4(out_folder, files):
# spoof protocol for talking faces that matches bio_protocol_4
patterns = {
('face', 'enroll'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_2\.mp4',
('face', 'probe'): r'pa-database/TalkingFace/.*',
}
licit_protocols(out_folder, files, patterns, attack=True, modalities=['face'])
def all_pad_protocols(out_folder, files):
# protocol 1
# eye
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-4]_p_3\.mp4',
('train', 'pa'): r'pa-database/Eye/PA\.EI\.1/.*',
('dev', 'pa'): r'pa-database/Eye/PA\.EI\.1/.*',
('eval', 'pa'): r'pa-database/Eye/PA\.EI\.1/.*',
}
pad_protocols(out_folder + 'pad_p1_pae1', files, patterns)
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-4]_p_3\.mp4',
('train', 'pa'): r'pa-database/Eye/PA\.EI\.4/.*',
('dev', 'pa'): r'pa-database/Eye/PA\.EI\.4/.*',
('eval', 'pa'): r'pa-database/Eye/PA\.EI\.4/.*',
}
pad_protocols(out_folder + 'pad_p1_pae4', files, patterns)
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-4]_p_3\.mp4',
('train', 'pa'): r'pa-database/Eye/PA\.EI\.5/.*',
('dev', 'pa'): r'pa-database/Eye/PA\.EI\.5/.*',
('eval', 'pa'): r'pa-database/Eye/PA\.EI\.5/.*',
}
pad_protocols(out_folder + 'pad_p1_pae5', files, patterns)
# face
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-2]_p_1\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-2]_p_1\.mp4',
('train', 'pa'): r'pa-database/TalkingFace/PA\.F\.1/.*',
('dev', 'pa'): r'pa-database/TalkingFace/PA\.F\.1/.*',
('eval', 'pa'): r'pa-database/TalkingFace/PA\.F\.1/.*',
}
pad_protocols(out_folder + 'pad_p1_paf1', files, patterns)
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_2\.mp4',
('train', 'pa'): r'pa-database/TalkingFace/PA\.F\.5/.*',
('dev', 'pa'): r'pa-database/TalkingFace/PA\.F\.5/.*',
('eval', 'pa'): r'pa-database/TalkingFace/PA\.F\.5/.*',
}
pad_protocols(out_folder + 'pad_p1_paf5', files, patterns)
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_2\.mp4',
('train', 'pa'): r'pa-database/TalkingFace/PA\.F\.6/.*',
('dev', 'pa'): r'pa-database/TalkingFace/PA\.F\.6/.*',
('eval', 'pa'): r'pa-database/TalkingFace/PA\.F\.6/.*',
}
pad_protocols(out_folder + 'pad_p1_paf6', files, patterns)
# voice
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_2\.mp4',
('train', 'pa'): r'pa-database/Voice/PA\.V\.4/.*',
('dev', 'pa'): r'pa-database/Voice/PA\.V\.4/.*',
('eval', 'pa'): r'pa-database/Voice/PA\.V\.4/.*',
}
pad_protocols(out_folder + 'pad_p1_pav4', files, patterns)
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_2\.mp4',
('train', 'pa'): r'pa-database/Voice/PA\.V\.7/.*',
('dev', 'pa'): r'pa-database/Voice/PA\.V\.7/.*',
('eval', 'pa'): r'pa-database/Voice/PA\.V\.7/.*',
}
pad_protocols(out_folder + 'pad_p1_pav7', files, patterns)
# protocol 2
# eye
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-4]_p_3\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-4]_p_3\.mp4',
('train', 'pa'): r'pa-database/Eye/.*',
('dev', 'pa'): r'pa-database/Eye/.*',
('eval', 'pa'): r'pa-database/Eye/.*',
}
pad_protocols(out_folder + 'pad_p2_eye', files, patterns)
# face
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_[1-2]\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_[1-2]\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_[1-2]\.mp4',
('train', 'pa'): r'pa-database/TalkingFace/.*',
('dev', 'pa'): r'pa-database/TalkingFace/.*',
('eval', 'pa'): r'pa-database/TalkingFace/.*',
}
pad_protocols(out_folder + 'pad_p2_face', files, patterns)
# voice
patterns = {
('train', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('dev', 'bf'): r'.*session_02/iPhone/.*/.*_02_0[1-8]_p_2\.mp4',
('eval', 'bf'): r'.*session_0[2-6]/iPhone/.*/.*_0[2-6]_0[1-8]_p_2\.mp4',
('train', 'pa'): r'pa-database/Voice/.*',
('dev', 'pa'): r'pa-database/Voice/.*',
('eval', 'pa'): r'pa-database/Voice/.*',
}
pad_protocols(out_folder + 'pad_p2_voice', files, patterns)
def _create(args):
# list all files
files = open(pkg_resources.resource_filename(
__name__, 'lists/swan_noextra.lst')).readlines()
# create protocols
path = pkg_resources.resource_filename(__name__, 'lists/licit_p1')
bio_protocol_1(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/licit_p2')
bio_protocol_2(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/licit_p3')
bio_protocol_3(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/spoof_p3')
spoof_protocol_3(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/spoof_p4')
spoof_protocol_4(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/')
all_pad_protocols(path, files)
path = pkg_resources.resource_filename(__name__, 'lists/licit_p4')
bio_protocol_4(path, files)
| 45.075188 | 105 | 0.56208 |
32f4870fa68fb1669217869cecd13238035d7113 | 85,129 | py | Python | core/controllers/admin_test.py | queencai/oppia | c9a36db9c258588b04be9bc26f3d2efef7d21abc | [
"Apache-2.0"
] | null | null | null | core/controllers/admin_test.py | queencai/oppia | c9a36db9c258588b04be9bc26f3d2efef7d21abc | [
"Apache-2.0"
] | null | null | null | core/controllers/admin_test.py | queencai/oppia | c9a36db9c258588b04be9bc26f3d2efef7d21abc | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the admin page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import logging
from constants import constants
from core import jobs
from core import jobs_registry
from core import jobs_test
from core.domain import collection_services
from core.domain import config_domain
from core.domain import config_services
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import opportunity_services
from core.domain import question_fetchers
from core.domain import recommendations_services
from core.domain import rights_manager
from core.domain import search_services
from core.domain import skill_services
from core.domain import stats_domain
from core.domain import stats_services
from core.domain import story_domain
from core.domain import story_fetchers
from core.domain import story_services
from core.domain import topic_domain
from core.domain import topic_fetchers
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.platform.taskqueue import gae_taskqueue_services as taskqueue_services
from core.tests import test_utils
import feconf
import utils
(exp_models, job_models, opportunity_models, audit_models) = (
models.Registry.import_models(
[models.NAMES.exploration, models.NAMES.job, models.NAMES.opportunity,
models.NAMES.audit]))
BOTH_MODERATOR_AND_ADMIN_EMAIL = 'moderator.and.admin@example.com'
BOTH_MODERATOR_AND_ADMIN_USERNAME = 'moderatorandadm1n'
class SampleMapReduceJobManager(jobs.BaseMapReduceOneOffJobManager):
"""Test job that counts the total number of explorations."""
@classmethod
def entity_classes_to_map_over(cls):
return [exp_models.ExplorationModel]
@staticmethod
def map(item):
yield ('sum', 1)
@staticmethod
def reduce(key, values):
yield (key, sum([int(value) for value in values]))
class AdminIntegrationTest(test_utils.GenericTestBase):
"""Server integration tests for operations on the admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(AdminIntegrationTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
def test_admin_page_rights(self):
"""Test access rights to the admin page."""
self.get_html_response('/admin', expected_status_int=302)
# Login as a non-admin.
self.login(self.EDITOR_EMAIL)
self.get_html_response('/admin', expected_status_int=401)
self.logout()
# Login as an admin.
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.get_html_response('/admin')
self.logout()
def test_change_configuration_property(self):
"""Test that configuration properties can be changed."""
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
new_config_value = False
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': False,
}, response_config_properties[
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name])
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name: (
new_config_value),
}
}
self.post_json('/adminhandler', payload, csrf_token=csrf_token)
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': new_config_value,
}, response_config_properties[
config_domain.IS_IMPROVEMENTS_TAB_ENABLED.name])
self.logout()
def test_cannot_reload_exploration_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot reload an exploration in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'reload_exploration',
'exploration_id': '2'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_load_new_structures_data_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot load new structures data in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
self.logout()
def test_non_admins_cannot_load_new_structures_data(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
assert_raises_regexp = self.assertRaisesRegexp(
Exception, 'User does not have enough rights to generate data.')
with assert_raises_regexp:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_generate_dummy_skill_data_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot generate dummy skills in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
self.logout()
def test_non_admins_cannot_generate_dummy_skill_data(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
assert_raises_regexp = self.assertRaisesRegexp(
Exception, 'User does not have enough rights to generate data.')
with assert_raises_regexp:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_reload_collection_in_production_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot reload a collection in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'reload_collection',
'collection_id': '2'
}, csrf_token=csrf_token)
self.logout()
def test_reload_collection(self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
collection_services.load_demo('0')
collection_rights = rights_manager.get_collection_rights('0')
self.assertFalse(collection_rights.community_owned)
with self.swap(logging, 'info', _mock_logging_function):
self.post_json(
'/adminhandler', {
'action': 'reload_collection',
'collection_id': '0'
}, csrf_token=csrf_token)
collection_rights = rights_manager.get_collection_rights('0')
self.assertTrue(collection_rights.community_owned)
self.assertEqual(
observed_log_messages,
[
'[ADMIN] %s reloaded collection 0' % self.admin_id,
'Collection with id 0 was loaded.'
]
)
self.logout()
def test_load_new_structures_data(self):
self.set_admins([self.ADMIN_USERNAME])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_structures_data'
}, csrf_token=csrf_token)
topic_summaries = topic_services.get_all_topic_summaries()
self.assertEqual(len(topic_summaries), 2)
for summary in topic_summaries:
if summary.name == 'Dummy Topic 1':
topic_id = summary.id
story_id = (
topic_fetchers.get_topic_by_id(
topic_id).canonical_story_references[0].story_id)
self.assertIsNotNone(
story_fetchers.get_story_by_id(story_id, strict=False))
skill_summaries = skill_services.get_all_skill_summaries()
self.assertEqual(len(skill_summaries), 3)
questions, _, _ = (
question_fetchers.get_questions_and_skill_descriptions_by_skill_ids(
10, [
skill_summaries[0].id, skill_summaries[1].id,
skill_summaries[2].id], '')
)
self.assertEqual(len(questions), 3)
# Testing that there are 3 hindi translation opportunities
# available on the Contributor Dashboard. Hindi was picked arbitrarily,
# any language code other than english (what the dummy explorations
# were written in) can be tested here.
translation_opportunities, _, _ = (
opportunity_services.get_translation_opportunities('hi', None))
self.assertEqual(len(translation_opportunities), 3)
self.logout()
def test_generate_dummy_skill_and_questions_data(self):
self.set_admins([self.ADMIN_USERNAME])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_new_skill_data'
}, csrf_token=csrf_token)
skill_summaries = skill_services.get_all_skill_summaries()
self.assertEqual(len(skill_summaries), 1)
questions, _, _ = (
question_fetchers.get_questions_and_skill_descriptions_by_skill_ids(
20, [skill_summaries[0].id], '')
)
self.assertEqual(len(questions), 15)
self.logout()
def test_flush_migration_bot_contributions_action(self):
created_exploration_ids = ['exp_1', 'exp_2']
edited_exploration_ids = ['exp_3', 'exp_4']
user_services.create_user_contributions(
feconf.MIGRATION_BOT_USER_ID, created_exploration_ids,
edited_exploration_ids)
migration_bot_contributions_model = (
user_services.get_user_contributions(feconf.MIGRATION_BOT_USER_ID))
self.assertEqual(
migration_bot_contributions_model.created_exploration_ids,
created_exploration_ids)
self.assertEqual(
migration_bot_contributions_model.edited_exploration_ids,
edited_exploration_ids)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'flush_migration_bot_contribution_data'
}, csrf_token=csrf_token)
migration_bot_contributions_model = (
user_services.get_user_contributions(feconf.MIGRATION_BOT_USER_ID))
self.assertEqual(
migration_bot_contributions_model.created_exploration_ids, [])
self.assertEqual(
migration_bot_contributions_model.edited_exploration_ids, [])
def test_regenerate_topic_related_opportunities_action(self):
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
topic_id = 'topic'
story_id = 'story'
self.save_new_valid_exploration(
'0', owner_id, title='title',
end_state_name='End State')
self.publish_exploration(owner_id, '0')
topic = topic_domain.Topic.create_default_topic(
topic_id, 'topic', 'abbrev', 'description')
topic_services.save_new_topic(owner_id, topic)
story = story_domain.Story.create_default_story(
story_id, 'A story', 'Description', topic_id, 'story')
story_services.save_new_story(owner_id, story)
topic_services.add_canonical_story(
owner_id, topic_id, story_id)
story_services.update_story(
owner_id, story_id, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': '0'
})], 'Changes.')
all_opportunity_models = list(
opportunity_models.ExplorationOpportunitySummaryModel.get_all())
self.assertEqual(len(all_opportunity_models), 1)
old_creation_time = all_opportunity_models[0].created_on
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
result = self.post_json(
'/adminhandler', {
'action': 'regenerate_topic_related_opportunities',
'topic_id': 'topic'
}, csrf_token=csrf_token)
self.assertEqual(
result, {
'opportunities_count': 1
})
all_opportunity_models = list(
opportunity_models.ExplorationOpportunitySummaryModel.get_all())
self.assertEqual(len(all_opportunity_models), 1)
new_creation_time = all_opportunity_models[0].created_on
self.assertLess(old_creation_time, new_creation_time)
def test_admin_topics_csv_download_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_custom_response(
'/admintopicscsvdownloadhandler', 'text/csv')
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename=topic_similarities.csv')
self.assertIn(
'Architecture,Art,Biology,Business,Chemistry,Computing,Economics,'
'Education,Engineering,Environment,Geography,Government,Hobbies,'
'Languages,Law,Life Skills,Mathematics,Medicine,Music,Philosophy,'
'Physics,Programming,Psychology,Puzzles,Reading,Religion,Sport,'
'Statistics,Welcome',
response.body)
self.logout()
def test_admin_job_output_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.save_new_valid_exploration('exp_id', self.admin_id)
job_id = SampleMapReduceJobManager.create_new()
SampleMapReduceJobManager.enqueue(job_id)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
response = self.get_json('/adminjoboutput', params={'job_id': job_id})
self.assertIsNone(response['output'])
self.process_and_flush_pending_tasks()
response = self.get_json('/adminjoboutput', params={'job_id': job_id})
self.assertEqual(
SampleMapReduceJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
self.assertEqual(response['output'], ['[u\'sum\', 1]'])
self.logout()
def test_revert_config_property(self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
config_services.set_property(self.admin_id, 'promo_bar_enabled', True)
self.assertTrue(config_domain.PROMO_BAR_ENABLED.value)
with self.swap(logging, 'info', _mock_logging_function):
self.post_json(
'/adminhandler', {
'action': 'revert_config_property',
'config_property_id': 'promo_bar_enabled'
}, csrf_token=csrf_token)
self.assertFalse(config_domain.PROMO_BAR_ENABLED.value)
self.assertEqual(
observed_log_messages,
['[ADMIN] %s reverted config property: promo_bar_enabled'
% self.admin_id])
self.logout()
def test_start_new_one_off_job(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 0)
with self.swap(
jobs_registry, 'ONE_OFF_JOB_MANAGERS', [SampleMapReduceJobManager]):
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'start_new_job',
'job_type': 'SampleMapReduceJobManager'
}, csrf_token=csrf_token)
self.assertEqual(
self.count_jobs_in_taskqueue(
taskqueue_services.QUEUE_NAME_ONE_OFF_JOBS), 1)
self.logout()
def test_cancel_one_off_job(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
job_id = SampleMapReduceJobManager.create_new()
SampleMapReduceJobManager.enqueue(job_id)
self.run_but_do_not_flush_pending_tasks()
status = SampleMapReduceJobManager.get_status_code(job_id)
self.assertEqual(status, job_models.STATUS_CODE_STARTED)
with self.swap(
jobs_registry, 'ONE_OFF_JOB_MANAGERS', [SampleMapReduceJobManager]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'cancel_job',
'job_id': job_id,
'job_type': 'SampleMapReduceJobManager'
}, csrf_token=csrf_token)
status = SampleMapReduceJobManager.get_status_code(job_id)
self.assertEqual(status, job_models.STATUS_CODE_CANCELED)
self.logout()
def test_start_computation(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'start_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
self.logout()
def test_stop_computation_with_running_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.run_but_do_not_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_stop_computation_with_finished_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.process_and_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_stop_computation_with_stopped_jobs(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id')
exp_services.save_new_exploration('owner_id', exploration)
self.assertEqual(
jobs_test.StartExplorationEventCounter.get_count('exp_id'), 0)
jobs_test.StartExplorationEventCounter.start_computation()
self.run_but_do_not_flush_pending_tasks()
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_RUNNING)
jobs_test.StartExplorationEventCounter.stop_computation(self.admin_id)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
[jobs_test.StartExplorationEventCounter]):
self.get_json('/adminhandler')
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'stop_computation',
'computation_type': 'StartExplorationEventCounter'
}, csrf_token=csrf_token)
status = jobs_test.StartExplorationEventCounter.get_status_code()
self.assertEqual(
status, job_models.CONTINUOUS_COMPUTATION_STATUS_CODE_IDLE)
self.logout()
def test_upload_topic_similarities(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Biology'), 0.1)
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Art'), feconf.SAME_TOPIC_SIMILARITY)
self.assertEqual(recommendations_services.get_topic_similarity(
'Topic 1', 'Topic 2'), feconf.DEFAULT_TOPIC_SIMILARITY)
self.assertEqual(recommendations_services.get_topic_similarity(
'Topic', 'Topic'), feconf.SAME_TOPIC_SIMILARITY)
topic_similarities_data = (
'Art,Biology,Chemistry\n'
'1.0,0.2,0.1\n'
'0.2,1.0,0.8\n'
'0.1,0.8,1.0'
)
self.post_json(
'/adminhandler', {
'action': 'upload_topic_similarities',
'data': topic_similarities_data
}, csrf_token=csrf_token)
self.assertEqual(recommendations_services.get_topic_similarity(
'Art', 'Biology'), 0.2)
self.logout()
class GenerateDummyExplorationsTest(test_utils.GenericTestBase):
"""Test the conditions for generation of dummy explorations."""
def setUp(self):
super(GenerateDummyExplorationsTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
def test_generate_count_greater_than_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 10,
'num_dummy_exps_to_publish': 3
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 10)
self.assertEqual(len(published_exps), 3)
def test_generate_count_equal_to_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 2,
'num_dummy_exps_to_publish': 2
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 2)
self.assertEqual(len(published_exps), 2)
def test_generate_count_less_than_publish_count(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
generated_exps_response = self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 2,
'num_dummy_exps_to_publish': 5
},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(generated_exps_response['status_code'], 400)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(len(generated_exps), 0)
self.assertEqual(len(published_exps), 0)
def test_handler_raises_error_with_non_int_num_dummy_exps_to_generate(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.assertRaisesRegexp(
Exception, 'invalid_type is not a number'):
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_publish': 1,
'num_dummy_exps_to_generate': 'invalid_type'
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
def test_handler_raises_error_with_non_int_num_dummy_exps_to_publish(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.assertRaisesRegexp(
Exception, 'invalid_type is not a number'):
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_publish': 'invalid_type',
'num_dummy_exps_to_generate': 1
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
def test_cannot_generate_dummy_explorations_in_prod_mode(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
prod_mode_swap = self.swap(constants, 'DEV_MODE', False)
assert_raises_regexp_context_manager = self.assertRaisesRegexp(
Exception, 'Cannot generate dummy explorations in production.')
with assert_raises_regexp_context_manager, prod_mode_swap:
self.post_json(
'/adminhandler', {
'action': 'generate_dummy_explorations',
'num_dummy_exps_to_generate': 10,
'num_dummy_exps_to_publish': 3
}, csrf_token=csrf_token)
generated_exps = exp_services.get_all_exploration_summaries()
published_exps = exp_services.get_recently_published_exp_summaries(5)
self.assertEqual(generated_exps, {})
self.assertEqual(published_exps, {})
self.logout()
class AdminRoleHandlerTest(test_utils.GenericTestBase):
"""Checks the user role handling on the admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(AdminRoleHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.set_admins([self.ADMIN_USERNAME])
def test_view_and_update_role(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Check normal user has expected role. Viewing by username.
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': 'user1'})
self.assertEqual(
response_dict, {'user1': feconf.ROLE_ID_EXPLORATION_EDITOR})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token,
expected_status_int=200)
self.assertEqual(response_dict, {})
# Viewing by role.
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={
'filter_criterion': 'role',
'role': feconf.ROLE_ID_MODERATOR
})
self.assertEqual(response_dict, {'user1': feconf.ROLE_ID_MODERATOR})
self.logout()
def test_invalid_username_in_filter_criterion_and_update_role(self):
username = 'myinvaliduser'
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Trying to view role of non-existent user.
self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username},
expected_status_int=400)
# Trying to update role of non-existent user.
csrf_token = self.get_new_csrf_token()
self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token,
expected_status_int=400)
def test_cannot_view_role_with_invalid_view_filter_criterion(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'invalid', 'username': 'user1'},
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid filter criterion to view roles.')
def test_changing_user_role_from_topic_manager_to_moderator(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.set_topic_managers([username])
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_TOPIC_MANAGER})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_MODERATOR, 'username': username},
csrf_token=csrf_token)
self.assertEqual(response_dict, {})
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(response_dict, {username: feconf.ROLE_ID_MODERATOR})
self.logout()
def test_changing_user_role_from_exploration_editor_to_topic_manager(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
topic_id = topic_services.get_new_topic_id()
self.save_new_topic(
topic_id, user_id, name='Name',
abbreviated_name='abbrev', url_fragment='url-fragment',
description='Description', canonical_story_ids=[],
additional_story_ids=[], uncategorized_skill_ids=[],
subtopics=[], next_subtopic_id=1)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_EXPLORATION_EDITOR})
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.ADMIN_ROLE_HANDLER_URL,
{'role': feconf.ROLE_ID_TOPIC_MANAGER, 'username': username,
'topic_id': topic_id}, csrf_token=csrf_token)
self.assertEqual(response_dict, {})
response_dict = self.get_json(
feconf.ADMIN_ROLE_HANDLER_URL,
params={'filter_criterion': 'username', 'username': username})
self.assertEqual(
response_dict, {username: feconf.ROLE_ID_TOPIC_MANAGER})
self.logout()
class ExplorationsLatexSvgHandlerTest(test_utils.GenericTestBase):
"""Tests for Saving Math SVGs in explorations."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(ExplorationsLatexSvgHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.set_admins([self.ADMIN_USERNAME])
def test_get_latex_to_svg_mapping(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
multiple_explorations_math_rich_text_info = []
math_rich_text_info1 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id1', True, ['abc1', 'xyz1']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info1)
math_rich_text_info2 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id2', True, ['abc2', 'xyz2']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info2)
math_rich_text_info3 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id3', True, ['abc3', 'xyz3']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info3)
exp_services.save_multi_exploration_math_rich_text_info_model(
multiple_explorations_math_rich_text_info)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'exp_id_to_latex_mapping'})
expected_response = {
'exp_id1': ['abc1', 'xyz1'],
'exp_id2': ['abc2', 'xyz2']
}
self.assertEqual(
response_dict,
{'latex_strings_to_exp_id_mapping': expected_response})
def test_get_when_invalid_item_to_fetch_item_given(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'invalid'},
expected_status_int=400)
self.assertIn(
'Please specify a valid type of item to fetch.',
response_dict['error'])
def test_get_number_explorations_left_to_update(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
multiple_explorations_math_rich_text_info = []
math_rich_text_info1 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id1', True, ['abc1', 'xyz1']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info1)
math_rich_text_info2 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id2', True, ['abc2', 'xyz2']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info2)
math_rich_text_info3 = (
exp_domain.ExplorationMathRichTextInfo(
'exp_id3', True, ['abc3', 'xyz3']))
multiple_explorations_math_rich_text_info.append(math_rich_text_info3)
exp_services.save_multi_exploration_math_rich_text_info_model(
multiple_explorations_math_rich_text_info)
response_dict = self.get_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
params={'item_to_fetch': 'number_of_explorations_left_to_update'})
self.assertEqual(
response_dict,
{'number_of_explorations_left_to_update': '3'})
def test_post_svgs_when_all_values_are_valid(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
editor_id = self.get_user_id_from_email(user_email)
post_data = {
'exp_id1': {
'+,+,+,+': {
'latexId': 'latex_id1',
'dimensions': {
'encoded_height_string': '1d429',
'encoded_width_string': '1d33',
'encoded_vertical_padding_string': '0d241'
}
},
'\\frac{x}{y}': {
'latexId': 'latex_id2',
'dimensions': {
'encoded_height_string': '1d525',
'encoded_width_string': '3d33',
'encoded_vertical_padding_string': '0d241'
}
}
}
}
csrf_token = self.get_new_csrf_token()
svg_file_1 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="1.33ex" height="1.4'
'29ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
svg_file_2 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="3.33ex" height="1.5'
'25ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
exploration1 = exp_domain.Exploration.create_default_exploration(
'exp_id1', title='title1', category='category')
exp_services.save_new_exploration(editor_id, exploration1)
exp_models.ExplorationMathRichTextInfoModel(
id='exp_id1',
math_images_generation_required=True,
latex_strings_without_svg=['+,+,+,+', '\\frac{x}{y}'],
estimated_max_size_of_images_in_bytes=20000).put()
response_dict = self.post_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
{'latexMapping': post_data},
csrf_token=csrf_token,
upload_files=(
('latex_id1', 'latex_id1', svg_file_1),
('latex_id2', 'latex_id2', svg_file_2), ),
expected_status_int=200)
self.assertEqual(
response_dict,
{
'number_of_explorations_updated': '1',
'number_of_explorations_left_to_update': '0'
})
self.logout()
def test_post_svgs_when_some_images_are_not_supplied(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
editor_id = self.get_user_id_from_email(user_email)
post_data = {
'exp_id1': {
'+,+,+,+': {
'latexId': 'latex_id1',
'dimensions': {
'encoded_height_string': '1d429',
'encoded_width_string': '1d33',
'encoded_vertical_padding_string': '0d241'
}
},
'\\frac{x}{y}': {
'latexId': 'latex_id2',
'dimensions': {
'encoded_height_string': '1d525',
'encoded_width_string': '3d33',
'encoded_vertical_padding_string': '0d241'
}
}
}
}
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
svg_file_1 = (
'<svg xmlns="http://www.w3.org/2000/svg" width="1.33ex" height="1.4'
'29ex" viewBox="0 -511.5 572.5 615.4" focusable="false" style="vert'
'ical-align: -0.241ex;"><g stroke="currentColor" fill="currentColo'
'r" stroke-width="0" transform="matrix(1 0 0 -1 0 0)"><path stroke'
'-width="1" d="M52 289Q59 331 106 386T222 442Q257 442 2864Q412 404'
' 406 402Q368 386 350 336Q290 115 290 78Q290 50 306 38T341 26Q37'
'8 26 414 59T463 140Q466 150 469 151T485 153H489Q504 153 504 145284'
' 52 289Z"/></g></svg>'
)
exploration1 = exp_domain.Exploration.create_default_exploration(
'exp_id1', title='title1', category='category')
exp_services.save_new_exploration(editor_id, exploration1)
response_dict = self.post_json(
feconf.EXPLORATIONS_LATEX_SVG_HANDLER,
{'latexMapping': post_data},
csrf_token=csrf_token,
upload_files=(
('latex_id1', 'latex_id1', svg_file_1),),
expected_status_int=400)
self.assertIn(
'SVG for LaTeX string \\frac{x}{y} in exploration exp_id1 is not '
'supplied.', response_dict['error'])
self.logout()
class DataExtractionQueryHandlerTests(test_utils.GenericTestBase):
"""Tests for data extraction handler."""
EXP_ID = 'exp'
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(DataExtractionQueryHandlerTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.exploration = self.save_new_valid_exploration(
self.EXP_ID, self.editor_id, end_state_name='End')
stats_services.record_answer(
self.EXP_ID, self.exploration.version,
self.exploration.init_state_name, 'TextInput',
stats_domain.SubmittedAnswer(
'first answer', 'TextInput', 0,
0, exp_domain.EXPLICIT_CLASSIFICATION, {},
'a_session_id_val', 1.0))
stats_services.record_answer(
self.EXP_ID, self.exploration.version,
self.exploration.init_state_name, 'TextInput',
stats_domain.SubmittedAnswer(
'second answer', 'TextInput', 0,
0, exp_domain.EXPLICIT_CLASSIFICATION, {},
'a_session_id_val', 1.0))
def test_data_extraction_handler(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
# Test that it returns all answers when 'num_answers' is 0.
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': self.exploration.init_state_name,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload)
extracted_answers = response['data']
self.assertEqual(len(extracted_answers), 2)
self.assertEqual(extracted_answers[0]['answer'], 'first answer')
self.assertEqual(extracted_answers[1]['answer'], 'second answer')
# Make sure that it returns only 'num_answers' number of answers.
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': self.exploration.init_state_name,
'num_answers': 1
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload)
extracted_answers = response['data']
self.assertEqual(len(extracted_answers), 1)
self.assertEqual(extracted_answers[0]['answer'], 'first answer')
def test_that_handler_raises_exception(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': self.EXP_ID,
'exp_version': self.exploration.version,
'state_name': 'state name',
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Exploration \'exp\' does not have \'state name\' state.')
def test_handler_raises_error_with_invalid_exploration_id(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': 'invalid_exp_id',
'state_name': 'state name',
'exp_version': 1,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Entity for exploration with id invalid_exp_id and version 1 not '
'found.')
def test_handler_raises_error_with_invalid_exploration_version(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
payload = {
'exp_id': self.EXP_ID,
'state_name': 'state name',
'exp_version': 10,
'num_answers': 0
}
response = self.get_json(
'/explorationdataextractionhandler', params=payload,
expected_status_int=400)
self.assertEqual(
response['error'],
'Entity for exploration with id %s and version 10 not found.'
% self.EXP_ID)
class ClearSearchIndexTest(test_utils.GenericTestBase):
"""Tests that search index gets cleared."""
def test_clear_search_index(self):
exp_services.load_demo('0')
result_explorations = search_services.search_explorations(
'Welcome', 2)[0]
self.assertEqual(result_explorations, ['0'])
collection_services.load_demo('0')
result_collections = search_services.search_collections('Welcome', 2)[0]
self.assertEqual(result_collections, ['0'])
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
generated_exps_response = self.post_json(
'/adminhandler', {
'action': 'clear_search_index'
},
csrf_token=csrf_token)
self.assertEqual(generated_exps_response, {})
result_explorations = search_services.search_explorations(
'Welcome', 2)[0]
self.assertEqual(result_explorations, [])
result_collections = search_services.search_collections('Welcome', 2)[0]
self.assertEqual(result_collections, [])
class SendDummyMailTest(test_utils.GenericTestBase):
""""Tests for sending test mails to admin."""
def setUp(self):
super(SendDummyMailTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
def test_send_dummy_mail(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
with self.swap(feconf, 'CAN_SEND_EMAILS', True):
generated_response = self.post_json(
'/senddummymailtoadminhandler', {},
csrf_token=csrf_token, expected_status_int=200)
self.assertEqual(generated_response, {})
with self.swap(feconf, 'CAN_SEND_EMAILS', False):
generated_response = self.post_json(
'/senddummymailtoadminhandler', {},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
generated_response['error'], 'This app cannot send emails.')
class UpdateUsernameHandlerTest(test_utils.GenericTestBase):
"""Tests for updating usernames."""
OLD_USERNAME = 'oldUsername'
NEW_USERNAME = 'newUsername'
def setUp(self):
super(UpdateUsernameHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.OLD_USERNAME)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
def test_update_username_with_none_new_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': None},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid request: A new username must be '
'specified.')
def test_update_username_with_none_old_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': None,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid request: The old username must be '
'specified.')
def test_update_username_with_non_string_new_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': 123},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected new username to be a unicode '
'string, received 123')
def test_update_username_with_non_string_old_username(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': 123,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected old username to be a unicode '
'string, received 123')
def test_update_username_with_long_new_username(self):
long_username = 'a' * (constants.MAX_USERNAME_LENGTH + 1)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': long_username},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Expected new username to be less than %s '
'characters, received %s' % (
constants.MAX_USERNAME_LENGTH,
long_username))
def test_update_username_with_nonexistent_old_username(self):
non_existent_username = 'invalid'
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': non_existent_username,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(response['error'], 'Invalid username: invalid')
def test_update_username_with_new_username_already_taken(self):
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.OLD_USERNAME},
csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(response['error'], 'Username already taken.')
def test_update_username(self):
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token)
self.assertEqual(user_services.get_username(user_id), self.NEW_USERNAME)
def test_update_username_creates_audit_model(self):
user_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
creation_time_in_millisecs = utils.get_current_time_in_millisecs()
mock_get_current_time_in_millisecs = lambda: creation_time_in_millisecs
# Since the UsernameChangeAuditModel's ID is formed from the user ID and
# a millisecond timestamp we need to make sure that
# get_current_time_in_millisecs returns the same value as we have saved
# into current_time_in_millisecs. If we don't force the same value via
# swap flakes can occur, since as the time flows the saved milliseconds
# can differ from the milliseconds saved into the
# UsernameChangeAuditModel's ID.
with self.swap(
utils, 'get_current_time_in_millisecs',
mock_get_current_time_in_millisecs):
self.put_json(
'/updateusernamehandler',
{
'old_username': self.OLD_USERNAME,
'new_username': self.NEW_USERNAME},
csrf_token=csrf_token)
self.assertTrue(
audit_models.UsernameChangeAuditModel.has_reference_to_user_id(
user_id))
model_id = '%s.%d' % (user_id, creation_time_in_millisecs)
username_change_audit_model = (
audit_models.UsernameChangeAuditModel.get(model_id))
self.assertEqual(username_change_audit_model.committer_id, user_id)
self.assertEqual(
username_change_audit_model.old_username, self.OLD_USERNAME)
self.assertEqual(
username_change_audit_model.new_username, self.NEW_USERNAME)
class AddContributionReviewerHandlerTest(test_utils.GenericTestBase):
"""Tests related to add reviewers for contributor's
suggestion/application.
"""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(AddContributionReviewerHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_add_reviewer_with_invalid_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'invalid',
'review_category': 'translation',
'language_code': 'en'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid username: invalid')
def test_add_translation_reviewer(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
def test_add_translation_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_assigning_same_language_for_translation_review_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'translator',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User translator already has rights to review translation in '
'language code hi')
def test_add_voiceover_reviewer(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
def test_add_voiceover_reviewer_in_invalid_language(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
def test_assigning_same_language_for_voiceover_review_raise_error(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertTrue(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'voiceartist',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User voiceartist already has rights to review voiceover in '
'language code hi')
def test_add_question_reviewer(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
def test_assigning_same_user_as_question_reviewer_raise_error(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'question'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'User question already has rights to review question.')
def test_add_reviewer_for_invalid_review_category_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'/addcontributionreviewerhandler', {
'username': 'question',
'review_category': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid review_category: invalid')
class RemoveContributionReviewerHandlerTest(test_utils.GenericTestBase):
"""Tests related to remove reviewers from contributor dashboard page."""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(RemoveContributionReviewerHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_add_reviewer_without_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'removal_type': 'all'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(response['error'], 'Missing username param')
def test_add_reviewer_with_invalid_username_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'invalid',
'removal_type': 'all'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid username: invalid')
def test_remove_translation_reviewer(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
def test_remove_translation_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_remove_unassigned_translation_reviewer_raise_error(self):
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'specific',
'review_category': 'translation',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'translator does not have rights to review translation in language '
'hi.')
def test_remove_voiceover_reviewer(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_voiceover_in_language(
self.voiceover_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_voiceover_applications(
self.voiceover_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
def test_remove_voiceover_reviewer_in_invalid_language_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid language_code: invalid')
def test_remove_unassigned_voiceover_reviewer_raise_error(self):
self.assertFalse(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'voiceartist',
'removal_type': 'specific',
'review_category': 'voiceover',
'language_code': 'hi'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'voiceartist does not have rights to review voiceover in language '
'hi.')
def test_remove_question_reviewer(self):
user_services.allow_user_to_review_question(self.question_reviewer_id)
self.assertTrue(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'question'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
def test_removing_unassigned_question_reviewer_raise_error(self):
self.assertFalse(user_services.can_review_question_suggestions(
self.question_reviewer_id))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'question'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'question does not have rights to review question.')
def test_remove_reviewer_for_invalid_review_category_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'specific',
'review_category': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid review_category: invalid')
def test_remove_reviewer_for_invalid_removal_type_raise_error(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'/removecontributionreviewerhandler', {
'username': 'question',
'removal_type': 'invalid'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid removal_type: invalid')
def test_remove_reviewer_from_all_reviewable_items(self):
user_services.allow_user_to_review_question(
self.translation_reviewer_id)
self.assertTrue(user_services.can_review_question_suggestions(
self.translation_reviewer_id))
user_services.allow_user_to_review_voiceover_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
self.assertTrue(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
self.login(self.ADMIN_EMAIL, is_super_admin=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'/removecontributionreviewerhandler', {
'username': 'translator',
'removal_type': 'all'
}, csrf_token=csrf_token)
self.assertFalse(user_services.can_review_question_suggestions(
self.translation_reviewer_id))
self.assertFalse(
user_services.can_review_voiceover_applications(
self.translation_reviewer_id, language_code='hi'))
self.assertFalse(
user_services.can_review_translation_suggestions(
self.translation_reviewer_id, language_code='hi'))
class ContributionReviewersListHandlerTest(test_utils.GenericTestBase):
"""Tests ContributionReviewersListHandler."""
TRANSLATION_REVIEWER_EMAIL = 'translationreviewer@example.com'
VOICEOVER_REVIEWER_EMAIL = 'voiceoverreviewer@example.com'
QUESTION_REVIEWER_EMAIL = 'questionreviewer@example.com'
def setUp(self):
super(ContributionReviewersListHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TRANSLATION_REVIEWER_EMAIL, 'translator')
self.signup(self.VOICEOVER_REVIEWER_EMAIL, 'voiceartist')
self.signup(self.QUESTION_REVIEWER_EMAIL, 'question')
self.translation_reviewer_id = self.get_user_id_from_email(
self.TRANSLATION_REVIEWER_EMAIL)
self.voiceover_reviewer_id = self.get_user_id_from_email(
self.VOICEOVER_REVIEWER_EMAIL)
self.question_reviewer_id = self.get_user_id_from_email(
self.QUESTION_REVIEWER_EMAIL)
def test_check_contribution_reviewer_by_translation_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_translation_in_language(
self.translation_reviewer_id, 'hi')
user_services.allow_user_to_review_translation_in_language(
self.voiceover_reviewer_id, 'hi')
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'translation',
'language_code': 'hi'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('translator' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_by_voiceover_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_voiceover_in_language(
self.translation_reviewer_id, 'hi')
user_services.allow_user_to_review_voiceover_in_language(
self.voiceover_reviewer_id, 'hi')
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'voiceover',
'language_code': 'hi'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('translator' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_by_question_reviewer_role(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
user_services.allow_user_to_review_question(self.question_reviewer_id)
user_services.allow_user_to_review_question(self.voiceover_reviewer_id)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'question'
})
self.assertEqual(len(response['usernames']), 2)
self.assertTrue('question' in response['usernames'])
self.assertTrue('voiceartist' in response['usernames'])
def test_check_contribution_reviewer_with_invalid_language_code_raise_error(
self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'voiceover',
'language_code': 'invalid'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid language_code: invalid')
self.logout()
def test_check_contribution_reviewer_with_invalid_review_category_raise_error( # pylint: disable=line-too-long
self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/getcontributionreviewershandler', params={
'review_category': 'invalid',
'language_code': 'hi'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid review_category: invalid')
self.logout()
class ContributionReviewerRightsDataHandlerTest(test_utils.GenericTestBase):
"""Tests ContributionReviewerRightsDataHandler."""
REVIEWER_EMAIL = 'reviewer@example.com'
def setUp(self):
super(ContributionReviewerRightsDataHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
def test_check_contribution_reviewer_rights(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'reviewer'
})
self.assertEqual(
response['can_review_translation_for_language_codes'], [])
self.assertEqual(
response['can_review_voiceover_for_language_codes'], [])
self.assertEqual(response['can_review_questions'], False)
user_services.allow_user_to_review_question(self.reviewer_id)
user_services.allow_user_to_review_voiceover_in_language(
self.reviewer_id, 'hi')
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'reviewer'
})
self.assertEqual(
response['can_review_translation_for_language_codes'], ['hi'])
self.assertEqual(
response['can_review_voiceover_for_language_codes'], ['hi'])
self.assertEqual(response['can_review_questions'], True)
def test_check_contribution_reviewer_rights_invalid_username(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={
'username': 'invalid'
}, expected_status_int=400)
self.assertEqual(response['error'], 'Invalid username: invalid')
self.logout()
def test_check_contribution_reviewer_rights_without_username(self):
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.get_json(
'/contributionreviewerrightsdatahandler', params={},
expected_status_int=400)
self.assertEqual(response['error'], 'Missing username param')
self.logout()
| 39.835751 | 114 | 0.646983 |
305ea83c120c251ef5dfaebf4eb25262b3accd29 | 29,232 | py | Python | numpyro/distributions/discrete.py | quattro/numpyro | b7b6e937297ea47c55760446134f84fc82936a9d | [
"Apache-2.0"
] | null | null | null | numpyro/distributions/discrete.py | quattro/numpyro | b7b6e937297ea47c55760446134f84fc82936a9d | [
"Apache-2.0"
] | null | null | null | numpyro/distributions/discrete.py | quattro/numpyro | b7b6e937297ea47c55760446134f84fc82936a9d | [
"Apache-2.0"
] | null | null | null | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
# The implementation largely follows the design in PyTorch's `torch.distributions`
#
# Copyright (c) 2016- Facebook, Inc (Adam Paszke)
# Copyright (c) 2014- Facebook, Inc (Soumith Chintala)
# Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert)
# Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu)
# Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu)
# Copyright (c) 2011-2013 NYU (Clement Farabet)
# Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston)
# Copyright (c) 2006 Idiap Research Institute (Samy Bengio)
# Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz)
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import warnings
import numpy as np
import jax
from jax import lax
from jax.nn import softmax, softplus
import jax.numpy as jnp
from jax.ops import index_add
import jax.random as random
from jax.scipy.special import expit, gammaincc, gammaln, logsumexp, xlog1py, xlogy
from numpyro.distributions import constraints, transforms
from numpyro.distributions.distribution import Distribution
from numpyro.distributions.util import (
binary_cross_entropy_with_logits,
binomial,
categorical,
clamp_probs,
is_prng_key,
lazy_property,
multinomial,
promote_shapes,
validate_sample,
)
from numpyro.util import not_jax_tracer
def _to_probs_bernoulli(logits):
return expit(logits)
def _to_logits_bernoulli(probs):
ps_clamped = clamp_probs(probs)
return jnp.log(ps_clamped) - jnp.log1p(-ps_clamped)
def _to_probs_multinom(logits):
return softmax(logits, axis=-1)
def _to_logits_multinom(probs):
minval = jnp.finfo(jnp.result_type(probs)).min
return jnp.clip(jnp.log(probs), a_min=minval)
class BernoulliProbs(Distribution):
arg_constraints = {"probs": constraints.unit_interval}
support = constraints.boolean
has_enumerate_support = True
def __init__(self, probs, validate_args=None):
self.probs = probs
super(BernoulliProbs, self).__init__(
batch_shape=jnp.shape(self.probs), validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
samples = random.bernoulli(
key, self.probs, shape=sample_shape + self.batch_shape
)
return samples.astype(jnp.result_type(samples, int))
@validate_sample
def log_prob(self, value):
ps_clamped = clamp_probs(self.probs)
return xlogy(value, ps_clamped) + xlog1py(1 - value, -ps_clamped)
@lazy_property
def logits(self):
return _to_logits_bernoulli(self.probs)
@property
def mean(self):
return self.probs
@property
def variance(self):
return self.probs * (1 - self.probs)
def enumerate_support(self, expand=True):
values = jnp.arange(2).reshape((-1,) + (1,) * len(self.batch_shape))
if expand:
values = jnp.broadcast_to(values, values.shape[:1] + self.batch_shape)
return values
class BernoulliLogits(Distribution):
arg_constraints = {"logits": constraints.real}
support = constraints.boolean
has_enumerate_support = True
def __init__(self, logits=None, validate_args=None):
self.logits = logits
super(BernoulliLogits, self).__init__(
batch_shape=jnp.shape(self.logits), validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
samples = random.bernoulli(
key, self.probs, shape=sample_shape + self.batch_shape
)
return samples.astype(jnp.result_type(samples, int))
@validate_sample
def log_prob(self, value):
return -binary_cross_entropy_with_logits(self.logits, value)
@lazy_property
def probs(self):
return _to_probs_bernoulli(self.logits)
@property
def mean(self):
return self.probs
@property
def variance(self):
return self.probs * (1 - self.probs)
def enumerate_support(self, expand=True):
values = jnp.arange(2).reshape((-1,) + (1,) * len(self.batch_shape))
if expand:
values = jnp.broadcast_to(values, values.shape[:1] + self.batch_shape)
return values
def Bernoulli(probs=None, logits=None, validate_args=None):
if probs is not None:
return BernoulliProbs(probs, validate_args=validate_args)
elif logits is not None:
return BernoulliLogits(logits, validate_args=validate_args)
else:
raise ValueError("One of `probs` or `logits` must be specified.")
class BinomialProbs(Distribution):
arg_constraints = {
"probs": constraints.unit_interval,
"total_count": constraints.nonnegative_integer,
}
has_enumerate_support = True
def __init__(self, probs, total_count=1, validate_args=None):
self.probs, self.total_count = promote_shapes(probs, total_count)
batch_shape = lax.broadcast_shapes(jnp.shape(probs), jnp.shape(total_count))
super(BinomialProbs, self).__init__(
batch_shape=batch_shape, validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return binomial(
key, self.probs, n=self.total_count, shape=sample_shape + self.batch_shape
)
@validate_sample
def log_prob(self, value):
log_factorial_n = gammaln(self.total_count + 1)
log_factorial_k = gammaln(value + 1)
log_factorial_nmk = gammaln(self.total_count - value + 1)
return (
log_factorial_n
- log_factorial_k
- log_factorial_nmk
+ xlogy(value, self.probs)
+ xlog1py(self.total_count - value, -self.probs)
)
@lazy_property
def logits(self):
return _to_logits_bernoulli(self.probs)
@property
def mean(self):
return jnp.broadcast_to(self.total_count * self.probs, self.batch_shape)
@property
def variance(self):
return jnp.broadcast_to(
self.total_count * self.probs * (1 - self.probs), self.batch_shape
)
@constraints.dependent_property(is_discrete=True, event_dim=0)
def support(self):
return constraints.integer_interval(0, self.total_count)
def enumerate_support(self, expand=True):
if not_jax_tracer(self.total_count):
total_count = np.amax(self.total_count)
# NB: the error can't be raised if inhomogeneous issue happens when tracing
if np.amin(self.total_count) != total_count:
raise NotImplementedError(
"Inhomogeneous total count not supported" " by `enumerate_support`."
)
else:
total_count = jnp.amax(self.total_count)
values = jnp.arange(total_count + 1).reshape(
(-1,) + (1,) * len(self.batch_shape)
)
if expand:
values = jnp.broadcast_to(values, values.shape[:1] + self.batch_shape)
return values
class BinomialLogits(Distribution):
arg_constraints = {
"logits": constraints.real,
"total_count": constraints.nonnegative_integer,
}
has_enumerate_support = True
enumerate_support = BinomialProbs.enumerate_support
def __init__(self, logits, total_count=1, validate_args=None):
self.logits, self.total_count = promote_shapes(logits, total_count)
batch_shape = lax.broadcast_shapes(jnp.shape(logits), jnp.shape(total_count))
super(BinomialLogits, self).__init__(
batch_shape=batch_shape, validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return binomial(
key, self.probs, n=self.total_count, shape=sample_shape + self.batch_shape
)
@validate_sample
def log_prob(self, value):
log_factorial_n = gammaln(self.total_count + 1)
log_factorial_k = gammaln(value + 1)
log_factorial_nmk = gammaln(self.total_count - value + 1)
normalize_term = (
self.total_count * jnp.clip(self.logits, 0)
+ xlog1py(self.total_count, jnp.exp(-jnp.abs(self.logits)))
- log_factorial_n
)
return (
value * self.logits - log_factorial_k - log_factorial_nmk - normalize_term
)
@lazy_property
def probs(self):
return _to_probs_bernoulli(self.logits)
@property
def mean(self):
return jnp.broadcast_to(self.total_count * self.probs, self.batch_shape)
@property
def variance(self):
return jnp.broadcast_to(
self.total_count * self.probs * (1 - self.probs), self.batch_shape
)
@constraints.dependent_property(is_discrete=True, event_dim=0)
def support(self):
return constraints.integer_interval(0, self.total_count)
def Binomial(total_count=1, probs=None, logits=None, validate_args=None):
if probs is not None:
return BinomialProbs(probs, total_count, validate_args=validate_args)
elif logits is not None:
return BinomialLogits(logits, total_count, validate_args=validate_args)
else:
raise ValueError("One of `probs` or `logits` must be specified.")
class CategoricalProbs(Distribution):
arg_constraints = {"probs": constraints.simplex}
has_enumerate_support = True
def __init__(self, probs, validate_args=None):
if jnp.ndim(probs) < 1:
raise ValueError("`probs` parameter must be at least one-dimensional.")
self.probs = probs
super(CategoricalProbs, self).__init__(
batch_shape=jnp.shape(self.probs)[:-1], validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return categorical(key, self.probs, shape=sample_shape + self.batch_shape)
@validate_sample
def log_prob(self, value):
batch_shape = lax.broadcast_shapes(jnp.shape(value), self.batch_shape)
value = jnp.expand_dims(value, axis=-1)
value = jnp.broadcast_to(value, batch_shape + (1,))
logits = self.logits
log_pmf = jnp.broadcast_to(logits, batch_shape + jnp.shape(logits)[-1:])
return jnp.take_along_axis(log_pmf, value, axis=-1)[..., 0]
@lazy_property
def logits(self):
return _to_logits_multinom(self.probs)
@property
def mean(self):
return jnp.full(self.batch_shape, jnp.nan, dtype=jnp.result_type(self.probs))
@property
def variance(self):
return jnp.full(self.batch_shape, jnp.nan, dtype=jnp.result_type(self.probs))
@constraints.dependent_property(is_discrete=True, event_dim=0)
def support(self):
return constraints.integer_interval(0, jnp.shape(self.probs)[-1] - 1)
def enumerate_support(self, expand=True):
values = jnp.arange(self.probs.shape[-1]).reshape(
(-1,) + (1,) * len(self.batch_shape)
)
if expand:
values = jnp.broadcast_to(values, values.shape[:1] + self.batch_shape)
return values
class CategoricalLogits(Distribution):
arg_constraints = {"logits": constraints.real_vector}
has_enumerate_support = True
def __init__(self, logits, validate_args=None):
if jnp.ndim(logits) < 1:
raise ValueError("`logits` parameter must be at least one-dimensional.")
self.logits = logits
super(CategoricalLogits, self).__init__(
batch_shape=jnp.shape(logits)[:-1], validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return random.categorical(
key, self.logits, shape=sample_shape + self.batch_shape
)
@validate_sample
def log_prob(self, value):
batch_shape = lax.broadcast_shapes(jnp.shape(value), self.batch_shape)
value = jnp.expand_dims(value, -1)
value = jnp.broadcast_to(value, batch_shape + (1,))
log_pmf = self.logits - logsumexp(self.logits, axis=-1, keepdims=True)
log_pmf = jnp.broadcast_to(log_pmf, batch_shape + jnp.shape(log_pmf)[-1:])
return jnp.take_along_axis(log_pmf, value, -1)[..., 0]
@lazy_property
def probs(self):
return _to_probs_multinom(self.logits)
@property
def mean(self):
return jnp.full(self.batch_shape, jnp.nan, dtype=jnp.result_type(self.logits))
@property
def variance(self):
return jnp.full(self.batch_shape, jnp.nan, dtype=jnp.result_type(self.logits))
@constraints.dependent_property(is_discrete=True, event_dim=0)
def support(self):
return constraints.integer_interval(0, jnp.shape(self.logits)[-1] - 1)
def enumerate_support(self, expand=True):
values = jnp.arange(self.logits.shape[-1]).reshape(
(-1,) + (1,) * len(self.batch_shape)
)
if expand:
values = jnp.broadcast_to(values, values.shape[:1] + self.batch_shape)
return values
def Categorical(probs=None, logits=None, validate_args=None):
if probs is not None:
return CategoricalProbs(probs, validate_args=validate_args)
elif logits is not None:
return CategoricalLogits(logits, validate_args=validate_args)
else:
raise ValueError("One of `probs` or `logits` must be specified.")
class OrderedLogistic(CategoricalProbs):
"""
A categorical distribution with ordered outcomes.
**References:**
1. *Stan Functions Reference, v2.20 section 12.6*,
Stan Development Team
:param numpy.ndarray predictor: prediction in real domain; typically this is output
of a linear model.
:param numpy.ndarray cutpoints: positions in real domain to separate categories.
"""
arg_constraints = {
"predictor": constraints.real,
"cutpoints": constraints.ordered_vector,
}
def __init__(self, predictor, cutpoints, validate_args=None):
if jnp.ndim(predictor) == 0:
(predictor,) = promote_shapes(predictor, shape=(1,))
else:
predictor = predictor[..., None]
predictor, self.cutpoints = promote_shapes(predictor, cutpoints)
self.predictor = predictor[..., 0]
probs = transforms.SimplexToOrderedTransform(self.predictor).inv(self.cutpoints)
super(OrderedLogistic, self).__init__(probs, validate_args=validate_args)
@staticmethod
def infer_shapes(predictor, cutpoints):
batch_shape = lax.broadcast_shapes(predictor, cutpoints[:-1])
event_shape = ()
return batch_shape, event_shape
class PRNGIdentity(Distribution):
"""
Distribution over :func:`~jax.random.PRNGKey`. This can be used to
draw a batch of :func:`~jax.random.PRNGKey` using the :class:`~numpyro.handlers.seed`
handler. Only `sample` method is supported.
"""
def __init__(self):
warnings.warn(
"PRNGIdentity distribution is deprecated. To get a random "
"PRNG key, you can use `numpyro.prng_key()` instead.",
FutureWarning,
)
super(PRNGIdentity, self).__init__(event_shape=(2,))
def sample(self, key, sample_shape=()):
return jnp.reshape(
random.split(key, np.prod(sample_shape).astype(np.int32)),
sample_shape + self.event_shape,
)
class MultinomialProbs(Distribution):
arg_constraints = {
"probs": constraints.simplex,
"total_count": constraints.nonnegative_integer,
}
def __init__(self, probs, total_count=1, validate_args=None):
if jnp.ndim(probs) < 1:
raise ValueError("`probs` parameter must be at least one-dimensional.")
batch_shape, event_shape = self.infer_shapes(
jnp.shape(probs), jnp.shape(total_count)
)
self.probs = promote_shapes(probs, shape=batch_shape + jnp.shape(probs)[-1:])[0]
self.total_count = promote_shapes(total_count, shape=batch_shape)[0]
super(MultinomialProbs, self).__init__(
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=validate_args,
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return multinomial(
key, self.probs, self.total_count, shape=sample_shape + self.batch_shape
)
@validate_sample
def log_prob(self, value):
if self._validate_args:
self._validate_sample(value)
return gammaln(self.total_count + 1) + jnp.sum(
xlogy(value, self.probs) - gammaln(value + 1), axis=-1
)
@lazy_property
def logits(self):
return _to_logits_multinom(self.probs)
@property
def mean(self):
return self.probs * jnp.expand_dims(self.total_count, -1)
@property
def variance(self):
return jnp.expand_dims(self.total_count, -1) * self.probs * (1 - self.probs)
@constraints.dependent_property(is_discrete=True, event_dim=1)
def support(self):
return constraints.multinomial(self.total_count)
@staticmethod
def infer_shapes(probs, total_count):
batch_shape = lax.broadcast_shapes(probs[:-1], total_count)
event_shape = probs[-1:]
return batch_shape, event_shape
class MultinomialLogits(Distribution):
arg_constraints = {
"logits": constraints.real_vector,
"total_count": constraints.nonnegative_integer,
}
def __init__(self, logits, total_count=1, validate_args=None):
if jnp.ndim(logits) < 1:
raise ValueError("`logits` parameter must be at least one-dimensional.")
batch_shape, event_shape = self.infer_shapes(
jnp.shape(logits), jnp.shape(total_count)
)
self.logits = promote_shapes(
logits, shape=batch_shape + jnp.shape(logits)[-1:]
)[0]
self.total_count = promote_shapes(total_count, shape=batch_shape)[0]
super(MultinomialLogits, self).__init__(
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=validate_args,
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return multinomial(
key, self.probs, self.total_count, shape=sample_shape + self.batch_shape
)
@validate_sample
def log_prob(self, value):
if self._validate_args:
self._validate_sample(value)
normalize_term = self.total_count * logsumexp(self.logits, axis=-1) - gammaln(
self.total_count + 1
)
return (
jnp.sum(value * self.logits - gammaln(value + 1), axis=-1) - normalize_term
)
@lazy_property
def probs(self):
return _to_probs_multinom(self.logits)
@property
def mean(self):
return jnp.expand_dims(self.total_count, -1) * self.probs
@property
def variance(self):
return jnp.expand_dims(self.total_count, -1) * self.probs * (1 - self.probs)
@constraints.dependent_property(is_discrete=True, event_dim=1)
def support(self):
return constraints.multinomial(self.total_count)
@staticmethod
def infer_shapes(logits, total_count):
batch_shape = lax.broadcast_shapes(logits[:-1], total_count)
event_shape = logits[-1:]
return batch_shape, event_shape
def Multinomial(total_count=1, probs=None, logits=None, validate_args=None):
if probs is not None:
return MultinomialProbs(probs, total_count, validate_args=validate_args)
elif logits is not None:
return MultinomialLogits(logits, total_count, validate_args=validate_args)
else:
raise ValueError("One of `probs` or `logits` must be specified.")
class Poisson(Distribution):
r"""
Creates a Poisson distribution parameterized by rate, the rate parameter.
Samples are nonnegative integers, with a pmf given by
.. math::
\mathrm{rate}^k \frac{e^{-\mathrm{rate}}}{k!}
:param numpy.ndarray rate: The rate parameter
:param bool is_sparse: Whether to assume value is mostly zero when computing
:meth:`log_prob`, which can speed up computation when data is sparse.
"""
arg_constraints = {"rate": constraints.positive}
support = constraints.nonnegative_integer
def __init__(self, rate, *, is_sparse=False, validate_args=None):
self.rate = rate
self.is_sparse = is_sparse
super(Poisson, self).__init__(jnp.shape(rate), validate_args=validate_args)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
return random.poisson(key, self.rate, shape=sample_shape + self.batch_shape)
@validate_sample
def log_prob(self, value):
if self._validate_args:
self._validate_sample(value)
value = jax.device_get(value)
if (
self.is_sparse
and not isinstance(value, jax.core.Tracer)
and jnp.size(value) > 1
):
shape = lax.broadcast_shapes(self.batch_shape, jnp.shape(value))
rate = jnp.broadcast_to(self.rate, shape).reshape(-1)
value = jnp.broadcast_to(value, shape).reshape(-1)
nonzero = value > 0
sparse_value = value[nonzero]
sparse_rate = rate[nonzero]
return index_add(
-rate,
nonzero,
jnp.log(sparse_rate) * sparse_value - gammaln(sparse_value + 1),
).reshape(shape)
return (jnp.log(self.rate) * value) - gammaln(value + 1) - self.rate
@property
def mean(self):
return self.rate
@property
def variance(self):
return self.rate
def cdf(self, value):
k = jnp.floor(value) + 1
return gammaincc(k, self.rate)
class ZeroInflatedProbs(Distribution):
arg_constraints = {"gate": constraints.unit_interval}
def __init__(self, base_dist, gate, *, validate_args=None):
batch_shape = lax.broadcast_shapes(jnp.shape(gate), base_dist.batch_shape)
(self.gate,) = promote_shapes(gate, shape=batch_shape)
assert base_dist.is_discrete
if base_dist.event_shape:
raise ValueError(
"ZeroInflatedProbs expected empty base_dist.event_shape but got {}".format(
base_dist.event_shape
)
)
# XXX: we might need to promote parameters of base_dist but let's keep
# this simplified for now
self.base_dist = base_dist.expand(batch_shape)
super(ZeroInflatedProbs, self).__init__(
batch_shape, validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
key_bern, key_base = random.split(key)
shape = sample_shape + self.batch_shape
mask = random.bernoulli(key_bern, self.gate, shape)
samples = self.base_dist(rng_key=key_base, sample_shape=sample_shape)
return jnp.where(mask, 0, samples)
@validate_sample
def log_prob(self, value):
log_prob = jnp.log1p(-self.gate) + self.base_dist.log_prob(value)
return jnp.where(value == 0, jnp.log(self.gate + jnp.exp(log_prob)), log_prob)
@constraints.dependent_property(is_discrete=True, event_dim=0)
def support(self):
return self.base_dist.support
@lazy_property
def mean(self):
return (1 - self.gate) * self.base_dist.mean
@lazy_property
def variance(self):
return (1 - self.gate) * (
self.base_dist.mean ** 2 + self.base_dist.variance
) - self.mean ** 2
class ZeroInflatedLogits(ZeroInflatedProbs):
arg_constraints = {"gate_logits": constraints.real}
def __init__(self, base_dist, gate_logits, *, validate_args=None):
gate = _to_probs_bernoulli(gate_logits)
batch_shape = lax.broadcast_shapes(jnp.shape(gate), base_dist.batch_shape)
(self.gate_logits,) = promote_shapes(gate_logits, shape=batch_shape)
super().__init__(base_dist, gate, validate_args=validate_args)
@validate_sample
def log_prob(self, value):
log_prob_minus_log_gate = -self.gate_logits + self.base_dist.log_prob(value)
log_gate = -softplus(-self.gate_logits)
log_prob = log_prob_minus_log_gate + log_gate
zero_log_prob = softplus(log_prob_minus_log_gate) + log_gate
return jnp.where(value == 0, zero_log_prob, log_prob)
def ZeroInflatedDistribution(
base_dist, *, gate=None, gate_logits=None, validate_args=None
):
"""
Generic Zero Inflated distribution.
:param Distribution base_dist: the base distribution.
:param numpy.ndarray gate: probability of extra zeros given via a Bernoulli distribution.
:param numpy.ndarray gate_logits: logits of extra zeros given via a Bernoulli distribution.
"""
if (gate is None) == (gate_logits is None):
raise ValueError(
"Either `gate` or `gate_logits` must be specified, but not both."
)
if gate is not None:
return ZeroInflatedProbs(base_dist, gate, validate_args=validate_args)
else:
return ZeroInflatedLogits(base_dist, gate_logits, validate_args=validate_args)
class ZeroInflatedPoisson(ZeroInflatedProbs):
"""
A Zero Inflated Poisson distribution.
:param numpy.ndarray gate: probability of extra zeros.
:param numpy.ndarray rate: rate of Poisson distribution.
"""
arg_constraints = {"gate": constraints.unit_interval, "rate": constraints.positive}
support = constraints.nonnegative_integer
# TODO: resolve inconsistent parameter order w.r.t. Pyro
# and support `gate_logits` argument
def __init__(self, gate, rate=1.0, validate_args=None):
_, self.rate = promote_shapes(gate, rate)
super().__init__(Poisson(self.rate), gate, validate_args=validate_args)
class GeometricProbs(Distribution):
arg_constraints = {"probs": constraints.unit_interval}
support = constraints.nonnegative_integer
def __init__(self, probs, validate_args=None):
self.probs = probs
super(GeometricProbs, self).__init__(
batch_shape=jnp.shape(self.probs), validate_args=validate_args
)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
probs = self.probs
dtype = jnp.result_type(probs)
shape = sample_shape + self.batch_shape
u = random.uniform(key, shape, dtype)
return jnp.floor(jnp.log1p(-u) / jnp.log1p(-probs))
@validate_sample
def log_prob(self, value):
probs = jnp.where((self.probs == 1) & (value == 0), 0, self.probs)
return value * jnp.log1p(-probs) + jnp.log(probs)
@lazy_property
def logits(self):
return _to_logits_bernoulli(self.probs)
@property
def mean(self):
return 1.0 / self.probs - 1.0
@property
def variance(self):
return (1.0 / self.probs - 1.0) / self.probs
class GeometricLogits(Distribution):
arg_constraints = {"logits": constraints.real}
support = constraints.nonnegative_integer
def __init__(self, logits, validate_args=None):
self.logits = logits
super(GeometricLogits, self).__init__(
batch_shape=jnp.shape(self.logits), validate_args=validate_args
)
@lazy_property
def probs(self):
return _to_probs_bernoulli(self.logits)
def sample(self, key, sample_shape=()):
assert is_prng_key(key)
logits = self.logits
dtype = jnp.result_type(logits)
shape = sample_shape + self.batch_shape
u = random.uniform(key, shape, dtype)
return jnp.floor(jnp.log1p(-u) / -softplus(logits))
@validate_sample
def log_prob(self, value):
return (-value - 1) * softplus(self.logits) + self.logits
@property
def mean(self):
return 1.0 / self.probs - 1.0
@property
def variance(self):
return (1.0 / self.probs - 1.0) / self.probs
def Geometric(probs=None, logits=None, validate_args=None):
if probs is not None:
return GeometricProbs(probs, validate_args=validate_args)
elif logits is not None:
return GeometricLogits(logits, validate_args=validate_args)
else:
raise ValueError("One of `probs` or `logits` must be specified.")
| 34.883055 | 108 | 0.663896 |
f0b4af9ec5e2b348dd2e62e3b3ab2d6b6991c3b4 | 255 | py | Python | scripts/slack_messages.py | nguyenanhtuan1008/Machine-Learning | 15eb109c0704f607af229d171aebe42ab84b9892 | [
"MIT"
] | null | null | null | scripts/slack_messages.py | nguyenanhtuan1008/Machine-Learning | 15eb109c0704f607af229d171aebe42ab84b9892 | [
"MIT"
] | null | null | null | scripts/slack_messages.py | nguyenanhtuan1008/Machine-Learning | 15eb109c0704f607af229d171aebe42ab84b9892 | [
"MIT"
] | null | null | null | import requests
import json
import os
data = {
"text": "hi, this is a test"
}
webhook = 'https://hooks.slack.com/services/6ZHCF86A/B011ATP3S2J/ycN6xM2SkxgN99WJ1Zv1y5Nm'
# webhook = os.getenv("webhook_slack")
requests.post(webhook, json.dumps(data)) | 21.25 | 90 | 0.741176 |
251ed4eee5877dfae9e88103f631b3150193af33 | 113 | py | Python | office365/sharepoint/webs/web_info_creation_information.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 544 | 2016-08-04T17:10:16.000Z | 2022-03-31T07:17:20.000Z | office365/sharepoint/webs/web_info_creation_information.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 438 | 2016-10-11T12:24:22.000Z | 2022-03-31T19:30:35.000Z | office365/sharepoint/webs/web_info_creation_information.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 202 | 2016-08-22T19:29:40.000Z | 2022-03-30T20:26:15.000Z | from office365.runtime.client_value import ClientValue
class WebInfoCreationInformation(ClientValue):
pass
| 18.833333 | 54 | 0.840708 |
b08badc8d11ef2e437ea5b86c90c95e8aa253a36 | 3,971 | py | Python | grr/core/grr_response_core/lib/parsers/chrome_history_test.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | 1 | 2021-07-01T01:43:06.000Z | 2021-07-01T01:43:06.000Z | grr/core/grr_response_core/lib/parsers/chrome_history_test.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | 44 | 2021-05-14T22:49:24.000Z | 2022-03-13T21:54:02.000Z | grr/core/grr_response_core/lib/parsers/chrome_history_test.py | tsehori/grr | 048506f22f74642bfe61749069a45ddf496fdab3 | [
"Apache-2.0"
] | 1 | 2020-06-25T14:25:54.000Z | 2020-06-25T14:25:54.000Z | #!/usr/bin/env python
# Lint as: python3
# Copyright 2011 Google Inc. All Rights Reserved.
"""Tests for grr.parsers.chrome_history."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
import io
import os
from absl import app
from grr_response_core.lib.parsers import chrome_history
from grr_response_core.lib.util import temp
from grr.test_lib import test_lib
class ChromeHistoryTest(test_lib.GRRBaseTest):
"""Test parsing of chrome history files."""
def testBasicParsing(self):
"""Test we can parse a standard file."""
history_file = os.path.join(self.base_path, "parser_test", "History2")
with io.open(history_file, mode="rb") as history_filedesc:
history = chrome_history.ChromeParser()
entries = list(history.Parse(history_file, history_filedesc))
try:
dt1 = datetime.datetime(1970, 1, 1)
dt1 += datetime.timedelta(microseconds=entries[0][0])
except (TypeError, ValueError):
dt1 = entries[0][0]
try:
dt2 = datetime.datetime(1970, 1, 1)
dt2 += datetime.timedelta(microseconds=entries[-1][0])
except (TypeError, ValueError):
dt2 = entries[-1][0]
# Check that our results are properly time ordered
time_results = [x[0] for x in entries]
self.assertEqual(time_results, sorted(time_results))
self.assertEqual(str(dt1), "2013-05-03 15:11:26.556635")
self.assertStartsWith(entries[0][2],
"https://www.google.ch/search?q=why+you+shouldn")
self.assertEqual(str(dt2), "2013-05-03 15:11:39.763984")
self.assertStartsWith(entries[-1][2], "http://www.test.ch/")
self.assertLen(entries, 4)
def testTimeOrderingDownload(self):
"""Test we can correctly time order downloads and visits."""
history_file = os.path.join(self.base_path, "parser_test", "History3")
with io.open(history_file, mode="rb") as history_filedesc:
history = chrome_history.ChromeParser()
entries = list(history.Parse(history_file, history_filedesc))
# Check that our results are properly time ordered
time_results = [x[0] for x in entries]
self.assertEqual(time_results, sorted(time_results))
self.assertLen(entries, 23)
def testBasicParsingOldFormat(self):
"""Test we can parse a standard file."""
history_file = os.path.join(self.base_path, "parser_test", "History")
with io.open(history_file, mode="rb") as history_filedesc:
history = chrome_history.ChromeParser()
entries = list(history.Parse(history_file, history_filedesc))
try:
dt1 = datetime.datetime(1970, 1, 1)
dt1 += datetime.timedelta(microseconds=entries[0][0])
except (TypeError, ValueError):
dt1 = entries[0][0]
try:
dt2 = datetime.datetime(1970, 1, 1)
dt2 += datetime.timedelta(microseconds=entries[-1][0])
except (TypeError, ValueError):
dt2 = entries[-1][0]
# Check that our results are properly time ordered
time_results = [x[0] for x in entries]
self.assertEqual(time_results, sorted(time_results))
self.assertEqual(str(dt1), "2011-04-07 12:03:11")
self.assertEqual(entries[0][2], "http://start.ubuntu.com/10.04/Google/")
self.assertEqual(str(dt2), "2011-05-23 08:37:27.061516")
self.assertStartsWith(
entries[-1][2], "https://chrome.google.com/webs"
"tore/detail/mfjkgbjaikamkkojmak"
"jclmkianficch")
self.assertLen(entries, 71)
def testNonSqliteDatabase(self):
with temp.AutoTempFilePath(suffix="-journal") as filepath:
with io.open(filepath, "wb") as filedesc:
filedesc.write(b"foobar")
with io.open(filepath, "rb") as filedesc:
# This should not fail, but return an empty list of results.
results = list(chrome_history.ChromeParser().Parse(filepath, filedesc))
self.assertEmpty(results)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| 33.369748 | 79 | 0.691765 |
9d205fb63a1701f4ce7ec86a057723d55fc8bc28 | 2,571 | py | Python | django_markdown_newsletter/views.py | SamyMe/django-markdown-newsletter | 4b0ad63d703afcba4d561b6f69b761d0609d1ed9 | [
"MIT"
] | null | null | null | django_markdown_newsletter/views.py | SamyMe/django-markdown-newsletter | 4b0ad63d703afcba4d561b6f69b761d0609d1ed9 | [
"MIT"
] | null | null | null | django_markdown_newsletter/views.py | SamyMe/django-markdown-newsletter | 4b0ad63d703afcba4d561b6f69b761d0609d1ed9 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from .models import Subscribe
from .forms import NewsletterForm, SubscribeForm
import sendmail
from markdown import markdown
from django.http import HttpResponseRedirect
def newsletter(request):
if request.user.is_authenticated():
form=NewsletterForm(request.POST, request.FILES)
if "newsletter" in request.POST :
destination=Subscribe.objects.filter(newsletter=request.POST.get("newsletter"))
else :
destination=Subscribe.objects.only("email") #values("email")
addresses=[]
for i in destination:
addresses.append(str(i))
context={'form':form, 'newsletters':Subscribe.objects.values('newsletter').distinct()}
if "envoyer" in request.POST:
email=form.save(commit=False,)
email.save()
try:
sendmail.sendmail(addresses,str(email.subject),markdown(str(email.body)), str(email.attachement.path))
except:
sendmail.sendmail(addresses,str(email.subject),markdown(str(email.body)))
return render(request,'newsletter.html',context)
else:
return HttpResponseRedirect('/')
def subscribe_specific(request):
form = SubscribeForm(request.POST or None )
if form.is_valid():
if 'subscribe' in form.POST:
new_subscribe=form.save(commit=False)
ip=get_ip(request)
new_subscribe.save()
if 'unsubscribe' in form.POST:
pass
return render(request, "subscribed.html", context)
def subscribe_default(request):
try:
form = SubscribeForm(request.POST or None )
context={ "subscribe" : True }
if 'subscribe' in request.POST:
if form.is_valid():
new_subscribe=form.save(commit=False)
ip=get_ip(request)
new_subscribe.save()
context={ "subscribe" : True }
else:
subscriber=Subscribe.objects.filter(email=request.POST.get("email"))[0]
if subscriber:
subscriber.newsletter="default"
subscriber.save()
context={"subscribe": True}
elif 'unsubscribe' in request.POST:
subscriber=Subscribe.objects.filter(email=request.POST.get("email"))[0]
print subscriber
subscriber.newsletter="deleted"
subscriber.save()
context={ "unsubscribe" : True }
return render(request, "subscribed.html", context)
except:
return HttpResponseRedirect('/')
def get_ip(request):
try:
x_forward = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forward:
ip=x_forward.split(",")[0]
else:
ip=request.META.get("REMOTE_ADDR")
except:
ip=""
return ip
| 27.351064 | 106 | 0.676391 |
32ec29844662439ceec0bc03eb5b3e2fe8755b06 | 5,090 | py | Python | src/azure-cli-core/azure/cli/core/commands/client_factory.py | henrypan/azure-cli | 8de0ab5216ed3dc700546ae9a3c485710322376b | [
"MIT"
] | null | null | null | src/azure-cli-core/azure/cli/core/commands/client_factory.py | henrypan/azure-cli | 8de0ab5216ed3dc700546ae9a3c485710322376b | [
"MIT"
] | 2 | 2021-03-25T21:38:56.000Z | 2021-11-15T17:46:45.000Z | src/azure-cli-core/azure/cli/core/commands/client_factory.py | Visual-Studio-China/azure-cli-int | 48c7c7f371a0ecc4ebfd4dcfdc72764beddf5c31 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
from azure.cli.core import __version__ as core_version
from azure.cli.core._profile import Profile, CLOUD
import azure.cli.core._debug as _debug
import azure.cli.core.azlogging as azlogging
from azure.cli.core.util import CLIError
from azure.cli.core.application import APPLICATION
from azure.cli.core.profiles._shared import get_client_class
from azure.cli.core.profiles import get_api_version, get_sdk, ResourceType
logger = azlogging.get_az_logger(__name__)
UA_AGENT = "AZURECLI/{}".format(core_version)
ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT'
def get_mgmt_service_client(client_or_resource_type, subscription_id=None, api_version=None,
**kwargs):
if isinstance(client_or_resource_type, ResourceType):
# Get the versioned client
client_type = get_client_class(client_or_resource_type)
api_version = api_version or get_api_version(client_or_resource_type)
else:
# Get the non-versioned client
client_type = client_or_resource_type
client, _ = _get_mgmt_service_client(client_type, subscription_id=subscription_id,
api_version=api_version, **kwargs)
return client
def get_subscription_service_client(client_type):
return _get_mgmt_service_client(client_type, False)
def configure_common_settings(client):
client = _debug.change_ssl_cert_verification(client)
client.config.add_user_agent(UA_AGENT)
try:
client.config.add_user_agent(os.environ[ENV_ADDITIONAL_USER_AGENT])
except KeyError:
pass
for header, value in APPLICATION.session['headers'].items():
# We are working with the autorest team to expose the add_header
# functionality of the generated client to avoid having to access
# private members
client._client.add_header(header, value) # pylint: disable=protected-access
command_name_suffix = ';completer-request' if APPLICATION.session['completer_active'] else ''
client._client.add_header('CommandName', # pylint: disable=protected-access
"{}{}".format(APPLICATION.session['command'], command_name_suffix))
client.config.generate_client_request_id = \
'x-ms-client-request-id' not in APPLICATION.session['headers']
def _get_mgmt_service_client(client_type, subscription_bound=True, subscription_id=None,
api_version=None, base_url_bound=True, **kwargs):
logger.debug('Getting management service client client_type=%s', client_type.__name__)
profile = Profile()
cred, subscription_id, _ = profile.get_login_credentials(subscription_id=subscription_id)
client_kwargs = {}
if base_url_bound:
client_kwargs = {'base_url': CLOUD.endpoints.resource_manager}
if api_version:
client_kwargs['api_version'] = api_version
if kwargs:
client_kwargs.update(kwargs)
if subscription_bound:
client = client_type(cred, subscription_id, **client_kwargs)
else:
client = client_type(cred, **client_kwargs)
configure_common_settings(client)
return (client, subscription_id)
def get_data_service_client(service_type, account_name, account_key, connection_string=None, # pylint: disable=too-many-arguments
sas_token=None, endpoint_suffix=None):
logger.debug('Getting data service client service_type=%s', service_type.__name__)
try:
client_kwargs = {'account_name': account_name,
'account_key': account_key,
'connection_string': connection_string,
'sas_token': sas_token}
if endpoint_suffix:
client_kwargs['endpoint_suffix'] = endpoint_suffix
client = service_type(**client_kwargs)
except ValueError as exc:
_ERROR_STORAGE_MISSING_INFO = \
get_sdk(ResourceType.DATA_STORAGE, '_error#_ERROR_STORAGE_MISSING_INFO')
if _ERROR_STORAGE_MISSING_INFO in str(exc):
raise ValueError(exc)
else:
raise CLIError('Unable to obtain data client. Check your connection parameters.')
# TODO: enable Fiddler
client.request_callback = _add_headers
return client
def get_subscription_id():
profile = Profile()
_, subscription_id, _ = profile.get_login_credentials()
return subscription_id
def _add_headers(request):
agents = [request.headers['User-Agent'], UA_AGENT]
try:
agents.append(os.environ[ENV_ADDITIONAL_USER_AGENT])
except KeyError:
pass
request.headers['User-Agent'] = ' '.join(agents)
try:
request.headers.update(APPLICATION.session['headers'])
except KeyError:
pass
| 40.07874 | 130 | 0.684086 |
fe96be2a417e342d752008d8c7433f2deed77808 | 778 | py | Python | Pywikibot/category/category.py | Ketho/WowpediaDoc | 4c9388157f3311e1be4c4f8bc5983dd28ab171c4 | [
"MIT"
] | null | null | null | Pywikibot/category/category.py | Ketho/WowpediaDoc | 4c9388157f3311e1be4c4f8bc5983dd28ab171c4 | [
"MIT"
] | null | null | null | Pywikibot/category/category.py | Ketho/WowpediaDoc | 4c9388157f3311e1be4c4f8bc5983dd28ab171c4 | [
"MIT"
] | null | null | null | from importlib import util
import pywikibot
site = pywikibot.Site("en", "wowpedia")
def load_file_as_module(name, location):
spec = util.spec_from_file_location(name, location)
module = util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
mod = load_file_as_module("mymodule", "wowpedia/category/namespace.py")
fs = """[[Category:API namespaces|{!s}]]
===External links===
{{{{#invoke:API namespaces|main|filename={!s}|system={!s}}}}}"""
for v in mod.namespaces:
fileName, systemName, sytemNamespace = v[0], v[1], v[2]
if sytemNamespace:
page = pywikibot.Page(site, "Category:API_namespaces/"+sytemNamespace)
page.text = fs.format(sytemNamespace.replace("C_", ""), fileName, systemName)
page.save(summary="Category sortkey")
print("done")
| 29.923077 | 79 | 0.732648 |
ca97813f702760b23eea59dfe38a3eed9f791e4a | 2,560 | py | Python | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/UpdateMediaWorkflowTriggerModeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/UpdateMediaWorkflowTriggerModeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/UpdateMediaWorkflowTriggerModeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmts.endpoint import endpoint_data
class UpdateMediaWorkflowTriggerModeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Mts', '2014-06-18', 'UpdateMediaWorkflowTriggerMode','mts')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self): # Long
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self, ResourceOwnerId): # Long
self.add_query_param('ResourceOwnerId', ResourceOwnerId)
def get_MediaWorkflowId(self): # String
return self.get_query_params().get('MediaWorkflowId')
def set_MediaWorkflowId(self, MediaWorkflowId): # String
self.add_query_param('MediaWorkflowId', MediaWorkflowId)
def get_ResourceOwnerAccount(self): # String
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String
self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount)
def get_OwnerAccount(self): # String
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self, OwnerAccount): # String
self.add_query_param('OwnerAccount', OwnerAccount)
def get_OwnerId(self): # Long
return self.get_query_params().get('OwnerId')
def set_OwnerId(self, OwnerId): # Long
self.add_query_param('OwnerId', OwnerId)
def get_TriggerMode(self): # String
return self.get_query_params().get('TriggerMode')
def set_TriggerMode(self, TriggerMode): # String
self.add_query_param('TriggerMode', TriggerMode)
| 40 | 89 | 0.769531 |
9caa2f63b9650989f8037f646bb7080bffa6f1cf | 12,459 | py | Python | TA-zscaler-api/bin/input_module_zscaler_zpa_configurations.py | LetMeR00t/TA-zscaler-api | 9a2cee3954bf75a814bb057cf36eb2b2b4c093f6 | [
"MIT"
] | 4 | 2022-03-04T11:11:30.000Z | 2022-03-07T09:55:07.000Z | TA-zscaler-api/bin/input_module_zscaler_zpa_configurations.py | LetMeR00t/TA-zscaler-api | 9a2cee3954bf75a814bb057cf36eb2b2b4c093f6 | [
"MIT"
] | null | null | null | TA-zscaler-api/bin/input_module_zscaler_zpa_configurations.py | LetMeR00t/TA-zscaler-api | 9a2cee3954bf75a814bb057cf36eb2b2b4c093f6 | [
"MIT"
] | null | null | null |
# encoding = utf-8
import os
import sys
import time
import datetime
import json
import hashlib
# Import custom librairies
from pyzscaler import ZPA
import restfly
INPUT_UID = None
ZSCALER_INSTANCE = None
'''
IMPORTANT
Edit only the validate_input and collect_events functions.
Do not edit any other part in this file.
This file is generated only once when creating the modular input.
'''
'''
# For advanced users, if you want to create single instance mod input, uncomment this method.
def use_single_instance_mode():
return True
'''
def validate_input(helper, definition):
"""Implement your own validation logic to validate the input stanza configurations"""
# This example accesses the modular input variable
# client_account = definition.parameters.get('client_account', None)
if(definition.parameters.get('client_account', None) is None):
helper.log_error("[ZPA-E-NO_CLIENT_ACCOUNT] No client account was provided")
sys.exit(1)
pass
def collect_events(helper, ew):
"""Implement your data collection logic here
# The following examples get the arguments of this input.
# Note, for single instance mod input, args will be returned as a dict.
# For multi instance mod input, args will be returned as a single value.
opt_client_account = helper.get_arg('client_account')
# In single instance mode, to get arguments of a particular input, use
opt_client_account = helper.get_arg('client_account', stanza_name)
# get input type
helper.get_input_type()
# The following examples get input stanzas.
# get all detailed input stanzas
helper.get_input_stanza()
# get specific input stanza with stanza name
helper.get_input_stanza(stanza_name)
# get all stanza names
helper.get_input_stanza_names()
# The following examples get options from setup page configuration.
# get the loglevel from the setup page
loglevel = helper.get_log_level()
# get proxy setting configuration
proxy_settings = helper.get_proxy()
# get account credentials as dictionary
account = helper.get_user_credential_by_username("username")
account = helper.get_user_credential_by_id("account id")
# get global variable configuration
global_customer_id = helper.get_global_setting("customer_id")
# The following examples show usage of logging related helper functions.
# write to the log for this modular input using configured global log level or INFO as default
helper.log("log message")
# write to the log using specified log level
helper.log_debug("log message")
helper.log_info("log message")
helper.log_warning("log message")
helper.log_error("log message")
helper.log_critical("log message")
# set the log level for this modular input
# (log_level can be "debug", "info", "warning", "error" or "critical", case insensitive)
helper.set_log_level(log_level)
# The following examples send rest requests to some endpoint.
response = helper.send_http_request(url, method, parameters=None, payload=None,
headers=None, cookies=None, verify=True, cert=None,
timeout=None, use_proxy=True)
# get the response headers
r_headers = response.headers
# get the response body as text
r_text = response.text
# get response body as json. If the body text is not a json string, raise a ValueError
r_json = response.json()
# get response cookies
r_cookies = response.cookies
# get redirect history
historical_responses = response.history
# get response status code
r_status = response.status_code
# check the response status, if the status is not sucessful, raise requests.HTTPError
response.raise_for_status()
# The following examples show usage of check pointing related helper functions.
# save checkpoint
helper.save_check_point(key, state)
# delete checkpoint
helper.delete_check_point(key)
# get checkpoint
state = helper.get_check_point(key)
# To create a splunk event
helper.new_event(data, time=None, host=None, index=None, source=None, sourcetype=None, done=True, unbroken=True)
"""
'''
# The following example writes a random number as an event. (Multi Instance Mode)
# Use this code template by default.
import random
data = str(random.randint(0,100))
event = helper.new_event(source=helper.get_input_type(), index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=data)
ew.write_event(event)
'''
'''
# The following example writes a random number as an event for each input config. (Single Instance Mode)
# For advanced users, if you want to create single instance mod input, please use this code template.
# Also, you need to uncomment use_single_instance_mode() above.
import random
input_type = helper.get_input_type()
for stanza_name in helper.get_input_stanza_names():
data = str(random.randint(0,100))
event = helper.new_event(source=input_type, index=helper.get_output_index(stanza_name), sourcetype=helper.get_sourcetype(stanza_name), data=data)
ew.write_event(event)
'''
helper.log_info("[ZPA-I-START-COLLECT] Start to recover configuration events from Zscaler ZPA")
global ZSCALER_INSTANCE
global INPUT_UID
# Set the Zscaler instance name
ZSCALER_INSTANCE = list(helper.get_input_stanza().keys())[0]
# Calculate a unique ID for the given input event recovery
INPUT_UID = hashlib.sha256(str(datetime.datetime.now()).encode()).hexdigest()[:8]
# Get information about the Splunk input
opt_instance = helper.get_arg('instance')
opt_items = helper.get_arg('items')
# Get credentials for Zscaler
client = helper.get_arg('client_account')
customer_id = helper.get_global_setting("instance_"+str(opt_instance)+"_zpa_customer_id")
if customer_id is None or customer_id == "":
helper.log_error("[ZPA-E-CUSTOMER_ID_NULL] No Customer ID was provided for instance n°"+str(opt_instance)+", check your configuration")
sys.exit(1)
ITEMS_MAP = {
"app_segments": "list_segments",
"certificates": "list_browser_access",
"cloud_connector_groups": "list_groups",
"connector_groups": "list_groups",
"connectors": "list_connectors",
"idp": "list_idps",
"machine_groups": "list_groups",
"posture_profiles": "list_profiles",
"saml_attributes": "list_attributes",
"server_groups": "list_groups",
"servers": "list_servers",
"trusted_networks": "list_networks"
}
# Instanciate the ZPA object with given inputs
try:
zpa = ZPA(client_id=client["username"], client_secret=client["password"], customer_id=customer_id)
except restfly.errors.UnauthorizedError as e:
helper.log_error("[ZPA-E-BAD_CREDENTIALS] 🔴 Your request is not correct and was rejected by Zscaler: "+str(e.msg.replace("\"","'")))
sys.exit(10)
helper.log_debug("[ZPA-D-ZPA_OBJECT] Zscaler ZPA connection object is created successfully")
try:
# Get items (simple methods)
for item in opt_items:
if item in ITEMS_MAP:
function = ITEMS_MAP[item]
all_data = getattr(getattr(zpa,item),function)()
for data in all_data:
write_to_splunk(helper, ew, item, data)
log(helper, item, all_data)
# Get segment groups if specified (more complex, as we can have big segment groups)
if "segment_groups" in opt_items:
for data in zpa.segment_groups.list_groups():
if "applications" in data:
applications = data["applications"]
del data["applications"]
for app in applications:
data["application"] = app
write_to_splunk(helper, ew, "segment_groups:"+str(data["id"]), data)
log(helper, "segment_groups", data)
else:
write_to_splunk(helper, ew, "segment_groups:"+str(data["id"]), data)
log(helper, "segment_groups", data)
# Get policies if specified (more complex)
if "policies" in opt_items:
for policy_name in ["access","timeout","client_forwarding","siem"]:
policy = zpa.policies.get_policy(policy_name)
write_to_splunk(helper, ew, "policies", policy)
log(helper, "policies", policy)
if policy_name != "siem":
all_data = zpa.policies.list_rules(policy_name)
for rule in all_data:
write_to_splunk(helper, ew, "policies:rules", rule)
log(helper, "policies:rules", all_data)
# Get provisioning if specified (more complex)
if "provisioning" in opt_items:
for key in ["connector","service_edge"]:
provisioning = zpa.provisioning.list_provisioning_keys(key)
if provisioning != []:
write_to_splunk(helper, ew, "provisioning", provisioning)
log(helper, "provisioning", provisioning)
# Get SCIM attributes if specified (more complex)
if "scim_attributes" in opt_items:
for idp in zpa.idp.list_idps():
list_attributes = zpa.scim_attributes.list_attributes_by_idp(idp["id"])
if list_attributes != []:
write_to_splunk(helper, ew, "scim_attributes", list_attributes)
log(helper, "scim_attributes", list_attributes)
# Get SCIM groups if specified (more complex)
if "scim_groups" in opt_items:
for idp in zpa.idp.list_idps():
list_groups = zpa.scim_groups.list_groups(idp["id"])
if list_groups != []:
write_to_splunk(helper, ew, "scim_groups", list_groups)
log(helper, "scim_groups", list_groups)
# Get service edges if specified (more complex)
if "service_edges" in opt_items:
all_data = zpa.service_edges.list_service_edges()
for service_edges in all_data:
write_to_splunk(helper, ew, "service_edges", service_edges)
log(helper, "service_edges", list_groups)
all_data = zpa.service_edges.list_service_edge_groups()
for service_edge_groups in all_data:
write_to_splunk(helper, ew, "service_edge_groups", service_edge_groups)
log(helper, "service_edge_groups", list_groups)
except restfly.errors.BadRequestError as e:
helper.log_error("[ZPA-E-BAD_REQUEST] 🔴 Your request is not correct and was rejected by Zscaler: "+str(e.msg.replace("\"","'")))
sys.exit(15)
except restfly.errors.ForbiddenError as e:
helper.log_error("[ZPA-E-FORBIDDEN_REQUEST] 🔴 Your request is forbidden and was rejected by Zscaler: "+str(e.msg.replace("\"","'")))
sys.exit(16)
except restfly.errors.TooManyRequestsError as e:
helper.log_error("[ZPA-E-TOO_MANY_REQUESTS] 🔴 Too many requests were performed to the Zscaler API: "+str(e.msg.replace("\"","'")))
sys.exit(17)
except Exception as e:
helper.log_error("[ZPA-E-HTTP_ERROR] 🔴 An HTTP error occured: "+str(e.msg.replace("\"","'")))
sys.exit(20)
helper.log_info("[ZPA-I-END-COLLECT] 🟢 Events from Zscaler ZPA ("+str(opt_items)+") are recovered")
# This function is writing events in Splunk
def write_to_splunk(helper, ew, item, data):
# Add which Zscaler instance
event = helper.new_event(source="zpa:"+ZSCALER_INSTANCE+":"+INPUT_UID+":"+item, index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=json.dumps(data))
ew.write_event(event)
# This function is logging information in the search.log
def log(helper, item, all_data):
if len(all_data)>0 and all_data!=[]:
helper.log_debug("[ZPA-D-EVENTS_WRITTEN] Events are written for "+item+" to the index "+helper.get_output_index()+": "+str(all_data))
else:
helper.log_debug("[ZPA-D-NO_EVENT_FOUND] No event found for "+item)
| 43.715789 | 175 | 0.658078 |
b8a1361d8282e0beae13093f3872a6a825a9219e | 460 | py | Python | baby_book/accounts/migrations/0009_auto_20201205_1509.py | Martin-Atanasov/Python_Web_Framework_Project | 2d37f9ab10fbaea40aa8ec4cd690dc24178389e3 | [
"MIT"
] | null | null | null | baby_book/accounts/migrations/0009_auto_20201205_1509.py | Martin-Atanasov/Python_Web_Framework_Project | 2d37f9ab10fbaea40aa8ec4cd690dc24178389e3 | [
"MIT"
] | null | null | null | baby_book/accounts/migrations/0009_auto_20201205_1509.py | Martin-Atanasov/Python_Web_Framework_Project | 2d37f9ab10fbaea40aa8ec4cd690dc24178389e3 | [
"MIT"
] | 1 | 2020-12-07T12:26:23.000Z | 2020-12-07T12:26:23.000Z | # Generated by Django 3.1.3 on 2020-12-05 15:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0008_auto_20201205_1502'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='profile_picture',
field=models.ImageField(default='/media/users/profile_pic_default.png', upload_to='media/users'),
),
]
| 24.210526 | 109 | 0.636957 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.