body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
f267a534baf18b5786167650bff2a2878b09f2efc7bd67855dac351eaa89742c
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_version(self, mocked_error): 'Test mapping a bad OpenStack numerical versions to code name' openstack.get_os_codename_version('2014.5.5') expected_err = 'Could not determine OpenStack codename for version 2014.5.5' mocked_error.assert_called_with(expected_err)
Test mapping a bad OpenStack numerical versions to code name
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_bad_version
AurelienLourot/charm-helpers
15
python
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_version(self, mocked_error): openstack.get_os_codename_version('2014.5.5') expected_err = 'Could not determine OpenStack codename for version 2014.5.5' mocked_error.assert_called_with(expected_err)
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_version(self, mocked_error): openstack.get_os_codename_version('2014.5.5') expected_err = 'Could not determine OpenStack codename for version 2014.5.5' mocked_error.assert_called_with(expected_err)<|docstring|>Test mapping a bad OpenStack numerical versions to code name<|endoftext|>
6309423cd005073ccd0cb2604b72db7b2a6d82cb7ed785044d5304dbeeb8857d
def test_os_version_from_codename(self): 'Test mapping a OpenStack codename to numerical version' self.assertEquals(openstack.get_os_version_codename('folsom'), '2012.2')
Test mapping a OpenStack codename to numerical version
tests/contrib/openstack/test_openstack_utils.py
test_os_version_from_codename
AurelienLourot/charm-helpers
15
python
def test_os_version_from_codename(self): self.assertEquals(openstack.get_os_version_codename('folsom'), '2012.2')
def test_os_version_from_codename(self): self.assertEquals(openstack.get_os_version_codename('folsom'), '2012.2')<|docstring|>Test mapping a OpenStack codename to numerical version<|endoftext|>
55ac8cbaab173b68b7375e852a024940cc63b29b39c4cf85ef57af4c3d2e1fed
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_codename(self, mocked_error): 'Test mapping a bad OpenStack codename to numerical version' openstack.get_os_version_codename('foo') expected_err = 'Could not derive OpenStack version for codename: foo' mocked_error.assert_called_with(expected_err)
Test mapping a bad OpenStack codename to numerical version
tests/contrib/openstack/test_openstack_utils.py
test_os_version_from_bad_codename
AurelienLourot/charm-helpers
15
python
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_codename(self, mocked_error): openstack.get_os_version_codename('foo') expected_err = 'Could not derive OpenStack version for codename: foo' mocked_error.assert_called_with(expected_err)
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_codename(self, mocked_error): openstack.get_os_version_codename('foo') expected_err = 'Could not derive OpenStack version for codename: foo' mocked_error.assert_called_with(expected_err)<|docstring|>Test mapping a bad OpenStack codename to numerical version<|endoftext|>
44c651b00276a788f2c9360bf1f4423551d417a769e5bb41977565dbacd06192
def test_os_version_swift_from_codename(self): 'Test mapping a swift codename to numerical version' self.assertEquals(openstack.get_os_version_codename_swift('liberty'), '2.5.0')
Test mapping a swift codename to numerical version
tests/contrib/openstack/test_openstack_utils.py
test_os_version_swift_from_codename
AurelienLourot/charm-helpers
15
python
def test_os_version_swift_from_codename(self): self.assertEquals(openstack.get_os_version_codename_swift('liberty'), '2.5.0')
def test_os_version_swift_from_codename(self): self.assertEquals(openstack.get_os_version_codename_swift('liberty'), '2.5.0')<|docstring|>Test mapping a swift codename to numerical version<|endoftext|>
59bbc8956d12cebd4df1a68fb4db458c6993b686896a58397cb30952e5c52cad
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_swift_from_bad_codename(self, mocked_error): 'Test mapping a bad swift codename to numerical version' openstack.get_os_version_codename_swift('foo') expected_err = 'Could not derive swift version for codename: foo' mocked_error.assert_called_with(expected_err)
Test mapping a bad swift codename to numerical version
tests/contrib/openstack/test_openstack_utils.py
test_os_version_swift_from_bad_codename
AurelienLourot/charm-helpers
15
python
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_swift_from_bad_codename(self, mocked_error): openstack.get_os_version_codename_swift('foo') expected_err = 'Could not derive swift version for codename: foo' mocked_error.assert_called_with(expected_err)
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_swift_from_bad_codename(self, mocked_error): openstack.get_os_version_codename_swift('foo') expected_err = 'Could not derive swift version for codename: foo' mocked_error.assert_called_with(expected_err)<|docstring|>Test mapping a bad swift codename to numerical version<|endoftext|>
951f1de04ab74629da5bb5701718b4c9abffc6d6a266c00e9c0349df69fe2297
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_package(self, mock_snap_install_requested, mock_get_installed_os_version): 'Test deriving OpenStack codename from an installed package' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_codename_package(pkg), vers['os_release'])
Test deriving OpenStack codename from an installed package
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_package
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_package(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_codename_package(pkg), vers['os_release'])
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_package(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_codename_package(pkg), vers['os_release'])<|docstring|>Test deriving OpenStack codename from an installed package<|endoftext|>
eeacabe46aac5b11a489e7bf7c6cf09f8afaa17bbf2a3bf79d21f77c5c4d1d83
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package_version(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): 'Test deriving OpenStack codename for a poorly versioned package' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() openstack.get_os_codename_package('bad-version') _e = 'Could not determine OpenStack codename for version 2200.1' mocked_error.assert_called_with(_e)
Test deriving OpenStack codename for a poorly versioned package
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_bad_package_version
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package_version(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() openstack.get_os_codename_package('bad-version') _e = 'Could not determine OpenStack codename for version 2200.1' mocked_error.assert_called_with(_e)
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package_version(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() openstack.get_os_codename_package('bad-version') _e = 'Could not determine OpenStack codename for version 2200.1' mocked_error.assert_called_with(_e)<|docstring|>Test deriving OpenStack codename for a poorly versioned package<|endoftext|>
d07b4aadeb02eaa54a4031cc5b770d5a054fa402a31c7ae6b8e11546d4c0fe66
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): 'Test deriving OpenStack codename from an unavailable package' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)
Test deriving OpenStack codename from an unavailable package
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_bad_package
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)<|docstring|>Test deriving OpenStack codename from an unavailable package<|endoftext|>
ab89b1608dd86c539be9b3450ebd375cec204f41ab0581d73dc89c1f407e779f
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): 'Test OpenStack codename from an unavailable package is non-fatal' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('foo', fatal=False))
Test OpenStack codename from an unavailable package is non-fatal
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_bad_package_nonfatal
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('foo', fatal=False))
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('foo', fatal=False))<|docstring|>Test OpenStack codename from an unavailable package is non-fatal<|endoftext|>
3ddea9e6d79655df0beff906d1f35d953b20fbd6895db6c858d39fc44a07f1c2
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_uninstalled_package(self, mock_error, mock_snap_install_requested, mock_get_installed_os_version): 'Test OpenStack codename from an available but uninstalled pkg' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('cinder-common', fatal=True) except Exception: pass e = 'Could not determine version of uninstalled package: cinder-common' mock_error.assert_called_with(e)
Test OpenStack codename from an available but uninstalled pkg
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_uninstalled_package
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_uninstalled_package(self, mock_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('cinder-common', fatal=True) except Exception: pass e = 'Could not determine version of uninstalled package: cinder-common' mock_error.assert_called_with(e)
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_codename_from_uninstalled_package(self, mock_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_codename_package('cinder-common', fatal=True) except Exception: pass e = 'Could not determine version of uninstalled package: cinder-common' mock_error.assert_called_with(e)<|docstring|>Test OpenStack codename from an available but uninstalled pkg<|endoftext|>
952de803e42fc6a1d446dd7a423526e60087781cac5d976517c0a6b22ba19771
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_uninstalled_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): 'Test OpenStack codename from avail uninstalled pkg is non fatal' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('cinder-common', fatal=False))
Test OpenStack codename from avail uninstalled pkg is non fatal
tests/contrib/openstack/test_openstack_utils.py
test_os_codename_from_uninstalled_package_nonfatal
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_uninstalled_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('cinder-common', fatal=False))
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_codename_from_uninstalled_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_codename_package('cinder-common', fatal=False))<|docstring|>Test OpenStack codename from avail uninstalled pkg is non fatal<|endoftext|>
f863f976c799b89a1c84650fabc13b03ac05181b2b04a0191db262878a51e3d6
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): 'Test deriving OpenStack version from an installed package' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_version_package(pkg), vers['os_version'])
Test deriving OpenStack version from an installed package
tests/contrib/openstack/test_openstack_utils.py
test_os_version_from_package
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_version_package(pkg), vers['os_version'])
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() for (pkg, vers) in six.iteritems(FAKE_REPO): if pkg.startswith('bad-'): continue if ('pkg_vers' not in vers): continue self.assertEquals(openstack.get_os_version_package(pkg), vers['os_version'])<|docstring|>Test deriving OpenStack version from an installed package<|endoftext|>
79585d11caddf55377a9a813c31e9d3be8379e69dbb21a45809a3e8e8e1d6e90
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): 'Test deriving OpenStack version from an uninstalled package' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_version_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)
Test deriving OpenStack version from an uninstalled package
tests/contrib/openstack/test_openstack_utils.py
test_os_version_from_bad_package
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_version_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') @patch('charmhelpers.contrib.openstack.utils.error_out') def test_os_version_from_bad_package(self, mocked_error, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() try: openstack.get_os_version_package('foo') except Exception: pass e = 'Could not determine version of package with no installation candidate: foo' mocked_error.assert_called_with(e)<|docstring|>Test deriving OpenStack version from an uninstalled package<|endoftext|>
c100144896b7fc9d46661445e345775af18dd7af4e7662268e12b9db093b2361
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_version_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): 'Test OpenStack version from an uninstalled package is non-fatal' mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_version_package('foo', fatal=False))
Test OpenStack version from an uninstalled package is non-fatal
tests/contrib/openstack/test_openstack_utils.py
test_os_version_from_bad_package_nonfatal
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_version_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_version_package('foo', fatal=False))
@patch.object(openstack, 'get_installed_os_version') @patch.object(openstack, 'snap_install_requested') def test_os_version_from_bad_package_nonfatal(self, mock_snap_install_requested, mock_get_installed_os_version): mock_snap_install_requested.return_value = False mock_get_installed_os_version.return_value = None with patch.object(openstack, 'apt_cache') as cache: cache.return_value = self._apt_cache() self.assertEquals(None, openstack.get_os_version_package('foo', fatal=False))<|docstring|>Test OpenStack version from an uninstalled package is non-fatal<|endoftext|>
29bb9801b81f2d3589160a777ea199b88f7440dcfca8602237c2b3e61c6af42d
@patch.object(openstack, 'juju_log') @patch('sys.exit') def test_error_out(self, mocked_exit, juju_log): 'Test erroring out' openstack.error_out('Everything broke.') _log = 'FATAL ERROR: Everything broke.' juju_log.assert_called_with(_log, level='ERROR') mocked_exit.assert_called_with(1)
Test erroring out
tests/contrib/openstack/test_openstack_utils.py
test_error_out
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'juju_log') @patch('sys.exit') def test_error_out(self, mocked_exit, juju_log): openstack.error_out('Everything broke.') _log = 'FATAL ERROR: Everything broke.' juju_log.assert_called_with(_log, level='ERROR') mocked_exit.assert_called_with(1)
@patch.object(openstack, 'juju_log') @patch('sys.exit') def test_error_out(self, mocked_exit, juju_log): openstack.error_out('Everything broke.') _log = 'FATAL ERROR: Everything broke.' juju_log.assert_called_with(_log, level='ERROR') mocked_exit.assert_called_with(1)<|docstring|>Test erroring out<|endoftext|>
14eb25827fda10ca9f00e56e744ed5ee6d397edaae0bde953419d21d4ccf2d40
def test_configure_install_source_distro(self): 'Test configuring installation from distro' self.assertIsNone(openstack.configure_installation_source('distro'))
Test configuring installation from distro
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_distro
AurelienLourot/charm-helpers
15
python
def test_configure_install_source_distro(self): self.assertIsNone(openstack.configure_installation_source('distro'))
def test_configure_install_source_distro(self): self.assertIsNone(openstack.configure_installation_source('distro'))<|docstring|>Test configuring installation from distro<|endoftext|>
89080243927bb7f4fbd922fa8e0def52f39b0b3f7680a45b5d6188cd085db65d
def test_configure_install_source_ppa(self): 'Test configuring installation source from PPA' with patch('subprocess.check_call') as mock: src = 'ppa:gandelman-a/openstack' openstack.configure_installation_source(src) ex_cmd = ['add-apt-repository', '--yes', 'ppa:gandelman-a/openstack'] mock.assert_called_with(ex_cmd, env={})
Test configuring installation source from PPA
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_ppa
AurelienLourot/charm-helpers
15
python
def test_configure_install_source_ppa(self): with patch('subprocess.check_call') as mock: src = 'ppa:gandelman-a/openstack' openstack.configure_installation_source(src) ex_cmd = ['add-apt-repository', '--yes', 'ppa:gandelman-a/openstack'] mock.assert_called_with(ex_cmd, env={})
def test_configure_install_source_ppa(self): with patch('subprocess.check_call') as mock: src = 'ppa:gandelman-a/openstack' openstack.configure_installation_source(src) ex_cmd = ['add-apt-repository', '--yes', 'ppa:gandelman-a/openstack'] mock.assert_called_with(ex_cmd, env={})<|docstring|>Test configuring installation source from PPA<|endoftext|>
58466a71b402f64a5730f185e45cd681792623099c7f0709578a907f660e4f79
@patch('subprocess.check_call') @patch.object(fetch, 'import_key') def test_configure_install_source_deb_url(self, _import, _spcc): 'Test configuring installation source from deb repo url' src = 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main|KEYID' openstack.configure_installation_source(src) _import.assert_called_with('KEYID') _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main'], env={})
Test configuring installation source from deb repo url
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_deb_url
AurelienLourot/charm-helpers
15
python
@patch('subprocess.check_call') @patch.object(fetch, 'import_key') def test_configure_install_source_deb_url(self, _import, _spcc): src = 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main|KEYID' openstack.configure_installation_source(src) _import.assert_called_with('KEYID') _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main'], env={})
@patch('subprocess.check_call') @patch.object(fetch, 'import_key') def test_configure_install_source_deb_url(self, _import, _spcc): src = 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main|KEYID' openstack.configure_installation_source(src) _import.assert_called_with('KEYID') _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://ubuntu-cloud.archive.canonical.com/ubuntu precise-havana main'], env={})<|docstring|>Test configuring installation source from deb repo url<|endoftext|>
ddc73c108e74ffea9a4ceec7c2762c42a037c3e6d7e89c8a7138d77cc586fba1
@patch.object(fetch, 'get_distrib_codename') @patch(builtin_open) @patch('subprocess.check_call') def test_configure_install_source_distro_proposed(self, _spcc, _open, _lsb): 'Test configuring installation source from deb repo url' _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file openstack.configure_installation_source('distro-proposed') _file.__enter__().write.assert_called_once_with('# Proposed\ndeb http://archive.ubuntu.com/ubuntu precise-proposed main universe multiverse restricted\n') src = 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe' openstack.configure_installation_source(src) _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe'], env={})
Test configuring installation source from deb repo url
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_distro_proposed
AurelienLourot/charm-helpers
15
python
@patch.object(fetch, 'get_distrib_codename') @patch(builtin_open) @patch('subprocess.check_call') def test_configure_install_source_distro_proposed(self, _spcc, _open, _lsb): _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file openstack.configure_installation_source('distro-proposed') _file.__enter__().write.assert_called_once_with('# Proposed\ndeb http://archive.ubuntu.com/ubuntu precise-proposed main universe multiverse restricted\n') src = 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe' openstack.configure_installation_source(src) _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe'], env={})
@patch.object(fetch, 'get_distrib_codename') @patch(builtin_open) @patch('subprocess.check_call') def test_configure_install_source_distro_proposed(self, _spcc, _open, _lsb): _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file openstack.configure_installation_source('distro-proposed') _file.__enter__().write.assert_called_once_with('# Proposed\ndeb http://archive.ubuntu.com/ubuntu precise-proposed main universe multiverse restricted\n') src = 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe' openstack.configure_installation_source(src) _spcc.assert_called_once_with(['add-apt-repository', '--yes', 'deb http://archive.ubuntu.com/ubuntu/ precise-proposed restricted main multiverse universe'], env={})<|docstring|>Test configuring installation source from deb repo url<|endoftext|>
07c9f088209a0fc376f1d5cc3504a699dc64dbf75969b093c2c499db2ed03f07
@patch.object(fetch, 'get_distrib_codename') def test_configure_install_source_uca_staging(self, _lsb): 'Test configuring installation source from UCA staging sources' _lsb.return_value = FAKE_CODENAME with patch('subprocess.check_call') as _subp: src = 'cloud:precise-folsom/staging' openstack.configure_installation_source(src) cmd = ['add-apt-repository', '-y', 'ppa:ubuntu-cloud-archive/folsom-staging'] _subp.assert_called_with(cmd, env={})
Test configuring installation source from UCA staging sources
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_uca_staging
AurelienLourot/charm-helpers
15
python
@patch.object(fetch, 'get_distrib_codename') def test_configure_install_source_uca_staging(self, _lsb): _lsb.return_value = FAKE_CODENAME with patch('subprocess.check_call') as _subp: src = 'cloud:precise-folsom/staging' openstack.configure_installation_source(src) cmd = ['add-apt-repository', '-y', 'ppa:ubuntu-cloud-archive/folsom-staging'] _subp.assert_called_with(cmd, env={})
@patch.object(fetch, 'get_distrib_codename') def test_configure_install_source_uca_staging(self, _lsb): _lsb.return_value = FAKE_CODENAME with patch('subprocess.check_call') as _subp: src = 'cloud:precise-folsom/staging' openstack.configure_installation_source(src) cmd = ['add-apt-repository', '-y', 'ppa:ubuntu-cloud-archive/folsom-staging'] _subp.assert_called_with(cmd, env={})<|docstring|>Test configuring installation source from UCA staging sources<|endoftext|>
e4b3d3c839f10de2da721fd216c95bd9c317f7fc29040faa53cec7d42a1964d7
@patch(builtin_open) @patch.object(fetch, 'apt_install') @patch.object(fetch, 'get_distrib_codename') @patch.object(fetch, 'filter_installed_packages') def test_configure_install_source_uca_repos(self, _fip, _lsb, _install, _open): 'Test configuring installation source from UCA sources' _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file _fip.side_effect = (lambda x: x) for (src, url) in UCA_SOURCES: actual_url = '# Ubuntu Cloud Archive\n{}\n'.format(url) openstack.configure_installation_source(src) _install.assert_called_with(['ubuntu-cloud-keyring'], fatal=True) _open.assert_called_with('/etc/apt/sources.list.d/cloud-archive.list', 'w') _file.__enter__().write.assert_called_with(actual_url)
Test configuring installation source from UCA sources
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_uca_repos
AurelienLourot/charm-helpers
15
python
@patch(builtin_open) @patch.object(fetch, 'apt_install') @patch.object(fetch, 'get_distrib_codename') @patch.object(fetch, 'filter_installed_packages') def test_configure_install_source_uca_repos(self, _fip, _lsb, _install, _open): _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file _fip.side_effect = (lambda x: x) for (src, url) in UCA_SOURCES: actual_url = '# Ubuntu Cloud Archive\n{}\n'.format(url) openstack.configure_installation_source(src) _install.assert_called_with(['ubuntu-cloud-keyring'], fatal=True) _open.assert_called_with('/etc/apt/sources.list.d/cloud-archive.list', 'w') _file.__enter__().write.assert_called_with(actual_url)
@patch(builtin_open) @patch.object(fetch, 'apt_install') @patch.object(fetch, 'get_distrib_codename') @patch.object(fetch, 'filter_installed_packages') def test_configure_install_source_uca_repos(self, _fip, _lsb, _install, _open): _lsb.return_value = FAKE_CODENAME _file = MagicMock(spec=io.FileIO) _open.return_value = _file _fip.side_effect = (lambda x: x) for (src, url) in UCA_SOURCES: actual_url = '# Ubuntu Cloud Archive\n{}\n'.format(url) openstack.configure_installation_source(src) _install.assert_called_with(['ubuntu-cloud-keyring'], fatal=True) _open.assert_called_with('/etc/apt/sources.list.d/cloud-archive.list', 'w') _file.__enter__().write.assert_called_with(actual_url)<|docstring|>Test configuring installation source from UCA sources<|endoftext|>
cc5aa46813ec1ac9fcfdc58a2f44f5e0234c147224cf5723de155ab9ae1e2bf8
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_configure_install_source_bad_uca(self, mocked_error): 'Test configuring installation source from bad UCA source' try: openstack.configure_installation_source('cloud:foo-bar') except Exception: pass _e = 'Invalid Cloud Archive release specified: foo-bar on this Ubuntuversion' _s = mocked_error.call_args[0][0] self.assertTrue(_s.startswith(_e))
Test configuring installation source from bad UCA source
tests/contrib/openstack/test_openstack_utils.py
test_configure_install_source_bad_uca
AurelienLourot/charm-helpers
15
python
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_configure_install_source_bad_uca(self, mocked_error): try: openstack.configure_installation_source('cloud:foo-bar') except Exception: pass _e = 'Invalid Cloud Archive release specified: foo-bar on this Ubuntuversion' _s = mocked_error.call_args[0][0] self.assertTrue(_s.startswith(_e))
@patch('charmhelpers.contrib.openstack.utils.error_out') def test_configure_install_source_bad_uca(self, mocked_error): try: openstack.configure_installation_source('cloud:foo-bar') except Exception: pass _e = 'Invalid Cloud Archive release specified: foo-bar on this Ubuntuversion' _s = mocked_error.call_args[0][0] self.assertTrue(_s.startswith(_e))<|docstring|>Test configuring installation source from bad UCA source<|endoftext|>
c404aab7fa44609f0c81a3482cdc504bac8824d311a2c62acfa9cfb85fd668d3
@patch('os.mkdir') @patch('os.path.exists') @patch('charmhelpers.contrib.openstack.utils.charm_dir') @patch(builtin_open) def test_save_scriptrc(self, _open, _charm_dir, _exists, _mkdir): 'Test generation of scriptrc from environment' scriptrc = ['#!/bin/bash\n', 'export setting1=foo\n', 'export setting2=bar\n'] _file = MagicMock(spec=io.FileIO) _open.return_value = _file _charm_dir.return_value = '/var/lib/juju/units/testing-foo-0/charm' _exists.return_value = False os.environ['JUJU_UNIT_NAME'] = 'testing-foo/0' openstack.save_script_rc(setting1='foo', setting2='bar') rcdir = '/var/lib/juju/units/testing-foo-0/charm/scripts' _mkdir.assert_called_with(rcdir) expected_f = '/var/lib/juju/units/testing-foo-0/charm/scripts/scriptrc' _open.assert_called_with(expected_f, 'wt') _mkdir.assert_called_with(os.path.dirname(expected_f)) _file.__enter__().write.assert_has_calls(list((call(line) for line in scriptrc)), any_order=True)
Test generation of scriptrc from environment
tests/contrib/openstack/test_openstack_utils.py
test_save_scriptrc
AurelienLourot/charm-helpers
15
python
@patch('os.mkdir') @patch('os.path.exists') @patch('charmhelpers.contrib.openstack.utils.charm_dir') @patch(builtin_open) def test_save_scriptrc(self, _open, _charm_dir, _exists, _mkdir): scriptrc = ['#!/bin/bash\n', 'export setting1=foo\n', 'export setting2=bar\n'] _file = MagicMock(spec=io.FileIO) _open.return_value = _file _charm_dir.return_value = '/var/lib/juju/units/testing-foo-0/charm' _exists.return_value = False os.environ['JUJU_UNIT_NAME'] = 'testing-foo/0' openstack.save_script_rc(setting1='foo', setting2='bar') rcdir = '/var/lib/juju/units/testing-foo-0/charm/scripts' _mkdir.assert_called_with(rcdir) expected_f = '/var/lib/juju/units/testing-foo-0/charm/scripts/scriptrc' _open.assert_called_with(expected_f, 'wt') _mkdir.assert_called_with(os.path.dirname(expected_f)) _file.__enter__().write.assert_has_calls(list((call(line) for line in scriptrc)), any_order=True)
@patch('os.mkdir') @patch('os.path.exists') @patch('charmhelpers.contrib.openstack.utils.charm_dir') @patch(builtin_open) def test_save_scriptrc(self, _open, _charm_dir, _exists, _mkdir): scriptrc = ['#!/bin/bash\n', 'export setting1=foo\n', 'export setting2=bar\n'] _file = MagicMock(spec=io.FileIO) _open.return_value = _file _charm_dir.return_value = '/var/lib/juju/units/testing-foo-0/charm' _exists.return_value = False os.environ['JUJU_UNIT_NAME'] = 'testing-foo/0' openstack.save_script_rc(setting1='foo', setting2='bar') rcdir = '/var/lib/juju/units/testing-foo-0/charm/scripts' _mkdir.assert_called_with(rcdir) expected_f = '/var/lib/juju/units/testing-foo-0/charm/scripts/scriptrc' _open.assert_called_with(expected_f, 'wt') _mkdir.assert_called_with(os.path.dirname(expected_f)) _file.__enter__().write.assert_has_calls(list((call(line) for line in scriptrc)), any_order=True)<|docstring|>Test generation of scriptrc from environment<|endoftext|>
d5a43635834becd44f96b379ba2943b23fdfc86a6c4cb8073992b2dbb2b8e845
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'get_os_version_codename_swift') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_true(self, config, vers_swift, vers_pkg, lsb): 'Test it detects when an openstack package has available upgrade' lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '2013.1.1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.2~b1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '1.9.0' vers_swift.return_value = '2.5.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy')) vers_pkg.return_value = '2.5.0' vers_swift.return_value = '2.10.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy'))
Test it detects when an openstack package has available upgrade
tests/contrib/openstack/test_openstack_utils.py
test_openstack_upgrade_detection_true
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'get_os_version_codename_swift') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_true(self, config, vers_swift, vers_pkg, lsb): lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '2013.1.1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.2~b1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '1.9.0' vers_swift.return_value = '2.5.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy')) vers_pkg.return_value = '2.5.0' vers_swift.return_value = '2.10.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy'))
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'get_os_version_codename_swift') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_true(self, config, vers_swift, vers_pkg, lsb): lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '2013.1.1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.2~b1' self.assertTrue(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '1.9.0' vers_swift.return_value = '2.5.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy')) vers_pkg.return_value = '2.5.0' vers_swift.return_value = '2.10.0' self.assertTrue(openstack.openstack_upgrade_available('swift-proxy'))<|docstring|>Test it detects when an openstack package has available upgrade<|endoftext|>
42496a4ebbd484d9ffe4bb067d00e32015c381acc5a33c93da7119234d119806
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_false(self, config, vers_pkg, lsb): 'Test it detects when an openstack upgrade is not necessary' lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-folsom' vers_pkg.return_value = '2013.1.1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.1~b1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '1.10.0' self.assertFalse(openstack.openstack_upgrade_available('swift-proxy'))
Test it detects when an openstack upgrade is not necessary
tests/contrib/openstack/test_openstack_utils.py
test_openstack_upgrade_detection_false
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_false(self, config, vers_pkg, lsb): lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-folsom' vers_pkg.return_value = '2013.1.1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.1~b1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '1.10.0' self.assertFalse(openstack.openstack_upgrade_available('swift-proxy'))
@patch.object(openstack, 'lsb_release') @patch.object(openstack, 'get_os_version_package') @patch.object(openstack, 'config') def test_openstack_upgrade_detection_false(self, config, vers_pkg, lsb): lsb.return_value = FAKE_RELEASE config.return_value = 'cloud:precise-folsom' vers_pkg.return_value = '2013.1.1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) vers_pkg.return_value = '2013.1~b1' self.assertFalse(openstack.openstack_upgrade_available('nova-common')) config.return_value = 'cloud:precise-havana' vers_pkg.return_value = '1.10.0' self.assertFalse(openstack.openstack_upgrade_available('swift-proxy'))<|docstring|>Test it detects when an openstack upgrade is not necessary<|endoftext|>
b9f3c8fbd0571db6f0219efddc91d93404755b2e3a943adf7a11e1f5d9f4aa6e
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_block_device_bad_config(self, err, is_bd): "Test it doesn't prepare storage with bad config" openstack.ensure_block_device(block_device='none') self.assertTrue(err.called)
Test it doesn't prepare storage with bad config
tests/contrib/openstack/test_openstack_utils.py
test_ensure_block_device_bad_config
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_block_device_bad_config(self, err, is_bd): openstack.ensure_block_device(block_device='none') self.assertTrue(err.called)
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_block_device_bad_config(self, err, is_bd): openstack.ensure_block_device(block_device='none') self.assertTrue(err.called)<|docstring|>Test it doesn't prepare storage with bad config<|endoftext|>
4c4e402393e4963fe7cd36df72d34c19036d92243dfee2c8d6446df1d50c1207
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'ensure_loopback_device') def test_ensure_block_device_loopback(self, ensure_loopback, is_bd): 'Test it ensures loopback device when checking block device' defsize = openstack.DEFAULT_LOOPBACK_SIZE is_bd.return_value = True ensure_loopback.return_value = '/tmp/cinder.img' result = openstack.ensure_block_device('/tmp/cinder.img') ensure_loopback.assert_called_with('/tmp/cinder.img', defsize) self.assertEquals(result, '/tmp/cinder.img') ensure_loopback.return_value = '/tmp/cinder-2.img' result = openstack.ensure_block_device('/tmp/cinder-2.img|15G') ensure_loopback.assert_called_with('/tmp/cinder-2.img', '15G') self.assertEquals(result, '/tmp/cinder-2.img')
Test it ensures loopback device when checking block device
tests/contrib/openstack/test_openstack_utils.py
test_ensure_block_device_loopback
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'ensure_loopback_device') def test_ensure_block_device_loopback(self, ensure_loopback, is_bd): defsize = openstack.DEFAULT_LOOPBACK_SIZE is_bd.return_value = True ensure_loopback.return_value = '/tmp/cinder.img' result = openstack.ensure_block_device('/tmp/cinder.img') ensure_loopback.assert_called_with('/tmp/cinder.img', defsize) self.assertEquals(result, '/tmp/cinder.img') ensure_loopback.return_value = '/tmp/cinder-2.img' result = openstack.ensure_block_device('/tmp/cinder-2.img|15G') ensure_loopback.assert_called_with('/tmp/cinder-2.img', '15G') self.assertEquals(result, '/tmp/cinder-2.img')
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'ensure_loopback_device') def test_ensure_block_device_loopback(self, ensure_loopback, is_bd): defsize = openstack.DEFAULT_LOOPBACK_SIZE is_bd.return_value = True ensure_loopback.return_value = '/tmp/cinder.img' result = openstack.ensure_block_device('/tmp/cinder.img') ensure_loopback.assert_called_with('/tmp/cinder.img', defsize) self.assertEquals(result, '/tmp/cinder.img') ensure_loopback.return_value = '/tmp/cinder-2.img' result = openstack.ensure_block_device('/tmp/cinder-2.img|15G') ensure_loopback.assert_called_with('/tmp/cinder-2.img', '15G') self.assertEquals(result, '/tmp/cinder-2.img')<|docstring|>Test it ensures loopback device when checking block device<|endoftext|>
7aaa4871eba307d5a713d896bfd177a121bee776d271676f665acd64e4cea3b7
@patch.object(openstack, 'is_block_device') def test_ensure_standard_block_device(self, is_bd): 'Test it looks for storage at both relative and full device path' for dev in ['vdb', '/dev/vdb']: openstack.ensure_block_device(dev) is_bd.assert_called_with('/dev/vdb')
Test it looks for storage at both relative and full device path
tests/contrib/openstack/test_openstack_utils.py
test_ensure_standard_block_device
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'is_block_device') def test_ensure_standard_block_device(self, is_bd): for dev in ['vdb', '/dev/vdb']: openstack.ensure_block_device(dev) is_bd.assert_called_with('/dev/vdb')
@patch.object(openstack, 'is_block_device') def test_ensure_standard_block_device(self, is_bd): for dev in ['vdb', '/dev/vdb']: openstack.ensure_block_device(dev) is_bd.assert_called_with('/dev/vdb')<|docstring|>Test it looks for storage at both relative and full device path<|endoftext|>
c4674ef141391812bf37b4b87dc69717b6e1a9a0af46acd01d33b06f648eb4e6
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_nonexistent_block_device(self, error_out, is_bd): 'Test it will not ensure a non-existent block device' is_bd.return_value = False openstack.ensure_block_device(block_device='foo') self.assertTrue(error_out.called)
Test it will not ensure a non-existent block device
tests/contrib/openstack/test_openstack_utils.py
test_ensure_nonexistent_block_device
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_nonexistent_block_device(self, error_out, is_bd): is_bd.return_value = False openstack.ensure_block_device(block_device='foo') self.assertTrue(error_out.called)
@patch.object(openstack, 'is_block_device') @patch.object(openstack, 'error_out') def test_ensure_nonexistent_block_device(self, error_out, is_bd): is_bd.return_value = False openstack.ensure_block_device(block_device='foo') self.assertTrue(error_out.called)<|docstring|>Test it will not ensure a non-existent block device<|endoftext|>
ebd8611f31bd81f40e873fa1ef0b027f41a7df8c459bf978ea7c49b675794e0c
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'umount') @patch.object(openstack, 'mounts') @patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_unmount(self, is_pv, zap_disk, mounts, umount, log): 'Test it unmounts block device when cleaning storage' is_pv.return_value = False zap_disk.return_value = True mounts.return_value = MOUNTS openstack.clean_storage('/dev/vdb') umount.called_with('/dev/vdb', True)
Test it unmounts block device when cleaning storage
tests/contrib/openstack/test_openstack_utils.py
test_clean_storage_unmount
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'umount') @patch.object(openstack, 'mounts') @patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_unmount(self, is_pv, zap_disk, mounts, umount, log): is_pv.return_value = False zap_disk.return_value = True mounts.return_value = MOUNTS openstack.clean_storage('/dev/vdb') umount.called_with('/dev/vdb', True)
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'umount') @patch.object(openstack, 'mounts') @patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_unmount(self, is_pv, zap_disk, mounts, umount, log): is_pv.return_value = False zap_disk.return_value = True mounts.return_value = MOUNTS openstack.clean_storage('/dev/vdb') umount.called_with('/dev/vdb', True)<|docstring|>Test it unmounts block device when cleaning storage<|endoftext|>
925f432fcd8f5cdc265442e39275b6adbe8a05cfb509132a42cbec45885cb045
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'remove_lvm_physical_volume') @patch.object(openstack, 'deactivate_lvm_volume_group') @patch.object(openstack, 'mounts') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_lvm_wipe(self, is_pv, mounts, rm_lv, rm_vg, log): 'Test it removes traces of LVM when cleaning storage' mounts.return_value = [] is_pv.return_value = True openstack.clean_storage('/dev/vdb') rm_lv.assert_called_with('/dev/vdb') rm_vg.assert_called_with('/dev/vdb')
Test it removes traces of LVM when cleaning storage
tests/contrib/openstack/test_openstack_utils.py
test_clean_storage_lvm_wipe
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'remove_lvm_physical_volume') @patch.object(openstack, 'deactivate_lvm_volume_group') @patch.object(openstack, 'mounts') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_lvm_wipe(self, is_pv, mounts, rm_lv, rm_vg, log): mounts.return_value = [] is_pv.return_value = True openstack.clean_storage('/dev/vdb') rm_lv.assert_called_with('/dev/vdb') rm_vg.assert_called_with('/dev/vdb')
@patch.object(openstack, 'juju_log') @patch.object(openstack, 'remove_lvm_physical_volume') @patch.object(openstack, 'deactivate_lvm_volume_group') @patch.object(openstack, 'mounts') @patch.object(openstack, 'is_lvm_physical_volume') def test_clean_storage_lvm_wipe(self, is_pv, mounts, rm_lv, rm_vg, log): mounts.return_value = [] is_pv.return_value = True openstack.clean_storage('/dev/vdb') rm_lv.assert_called_with('/dev/vdb') rm_vg.assert_called_with('/dev/vdb')<|docstring|>Test it removes traces of LVM when cleaning storage<|endoftext|>
29072a659a46268debd1b655c99ff4194310290c2f92881eea7b33fb09b726e7
@patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') @patch.object(openstack, 'mounts') def test_clean_storage_zap_disk(self, mounts, is_pv, zap_disk): 'It removes traces of LVM when cleaning storage' mounts.return_value = [] is_pv.return_value = False openstack.clean_storage('/dev/vdb') zap_disk.assert_called_with('/dev/vdb')
It removes traces of LVM when cleaning storage
tests/contrib/openstack/test_openstack_utils.py
test_clean_storage_zap_disk
AurelienLourot/charm-helpers
15
python
@patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') @patch.object(openstack, 'mounts') def test_clean_storage_zap_disk(self, mounts, is_pv, zap_disk): mounts.return_value = [] is_pv.return_value = False openstack.clean_storage('/dev/vdb') zap_disk.assert_called_with('/dev/vdb')
@patch.object(openstack, 'zap_disk') @patch.object(openstack, 'is_lvm_physical_volume') @patch.object(openstack, 'mounts') def test_clean_storage_zap_disk(self, mounts, is_pv, zap_disk): mounts.return_value = [] is_pv.return_value = False openstack.clean_storage('/dev/vdb') zap_disk.assert_called_with('/dev/vdb')<|docstring|>It removes traces of LVM when cleaning storage<|endoftext|>
a69e66410c1e24197768b69b8d434d45ceaa20902c95cee2fcdcbb4ff507f463
def _read_bib(path_list, db_src): '\n read output data from bibliographic database \n args:\n path_list: a list of paths to read from\n db_src: database source of the bibliographic data\n returns:\n bib_df: a bibliographic dataframe\n ' db_specs = _switch_case_db(db_src) if (len(path_list) == 1): bib_df = pd.read_csv(path_list[0], usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) else: bib_df = pd.concat([pd.read_csv(path, usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) for path in path_list], ignore_index=True) bib_df['db_src'] = db_src bib_df = bib_df.rename(columns=db_specs['new_cols']) return bib_df
read output data from bibliographic database args: path_list: a list of paths to read from db_src: database source of the bibliographic data returns: bib_df: a bibliographic dataframe
bibl_io.py
_read_bib
JohannesNakayama/CitationNetworks
0
python
def _read_bib(path_list, db_src): '\n read output data from bibliographic database \n args:\n path_list: a list of paths to read from\n db_src: database source of the bibliographic data\n returns:\n bib_df: a bibliographic dataframe\n ' db_specs = _switch_case_db(db_src) if (len(path_list) == 1): bib_df = pd.read_csv(path_list[0], usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) else: bib_df = pd.concat([pd.read_csv(path, usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) for path in path_list], ignore_index=True) bib_df['db_src'] = db_src bib_df = bib_df.rename(columns=db_specs['new_cols']) return bib_df
def _read_bib(path_list, db_src): '\n read output data from bibliographic database \n args:\n path_list: a list of paths to read from\n db_src: database source of the bibliographic data\n returns:\n bib_df: a bibliographic dataframe\n ' db_specs = _switch_case_db(db_src) if (len(path_list) == 1): bib_df = pd.read_csv(path_list[0], usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) else: bib_df = pd.concat([pd.read_csv(path, usecols=db_specs['columns'], dtype=db_specs['dtype'], sep=db_specs['sep'], index_col=False) for path in path_list], ignore_index=True) bib_df['db_src'] = db_src bib_df = bib_df.rename(columns=db_specs['new_cols']) return bib_df<|docstring|>read output data from bibliographic database args: path_list: a list of paths to read from db_src: database source of the bibliographic data returns: bib_df: a bibliographic dataframe<|endoftext|>
ebbb2cf12cacbf75d5a88f1754aafcc7e5ad3673500b82dc1166a971ccd271bb
def _convert_bib_to_dict(bib_df, db_src): '\n convert bibliographic dataframe into python dictionary\n args:\n bib_df: a bibliographic dataframe\n db_src: database source of the bibliographic data\n returns:\n bib_dict: a bibliographic dictionary\n ' db_specs = _switch_case_db(db_src) keys = bib_df.columns bib_dict = {} for j in range(bib_df.shape[0]): entry = {} for key in keys: entry[key] = bib_df.loc[(j, key)] entry = _split_columns(entry, db_specs['to_split']) bib_dict[str(uuid.uuid4())] = entry return bib_dict
convert bibliographic dataframe into python dictionary args: bib_df: a bibliographic dataframe db_src: database source of the bibliographic data returns: bib_dict: a bibliographic dictionary
bibl_io.py
_convert_bib_to_dict
JohannesNakayama/CitationNetworks
0
python
def _convert_bib_to_dict(bib_df, db_src): '\n convert bibliographic dataframe into python dictionary\n args:\n bib_df: a bibliographic dataframe\n db_src: database source of the bibliographic data\n returns:\n bib_dict: a bibliographic dictionary\n ' db_specs = _switch_case_db(db_src) keys = bib_df.columns bib_dict = {} for j in range(bib_df.shape[0]): entry = {} for key in keys: entry[key] = bib_df.loc[(j, key)] entry = _split_columns(entry, db_specs['to_split']) bib_dict[str(uuid.uuid4())] = entry return bib_dict
def _convert_bib_to_dict(bib_df, db_src): '\n convert bibliographic dataframe into python dictionary\n args:\n bib_df: a bibliographic dataframe\n db_src: database source of the bibliographic data\n returns:\n bib_dict: a bibliographic dictionary\n ' db_specs = _switch_case_db(db_src) keys = bib_df.columns bib_dict = {} for j in range(bib_df.shape[0]): entry = {} for key in keys: entry[key] = bib_df.loc[(j, key)] entry = _split_columns(entry, db_specs['to_split']) bib_dict[str(uuid.uuid4())] = entry return bib_dict<|docstring|>convert bibliographic dataframe into python dictionary args: bib_df: a bibliographic dataframe db_src: database source of the bibliographic data returns: bib_dict: a bibliographic dictionary<|endoftext|>
d258c2d51c5ad548fd1f33262fa414f0fae5a128ff47a81a4622b26a828d7b6a
def _switch_scopus(): "\n in case db_src='scopus', use these specifications\n args:\n None\n returns:\n wos: the scopus specifics\n " scopus = {'columns': ['Title', 'Authors', 'Author(s) ID', 'Source title', 'Year', 'Volume', 'Issue', 'Cited by', 'References', 'DOI', 'ISBN', 'ISSN', 'CODEN', 'PubMed ID', 'EID'], 'dtype': {'Title': 'str', 'Authors': 'str', 'Author(s) ID': 'str', 'Source title': 'str', 'Year': 'float64', 'Volume': 'str', 'Issue': 'str', 'Cited by': 'float64', 'References': 'str', 'DOI': 'str', 'ISBN': 'str', 'ISSN': 'str', 'CODEN': 'str', 'PubMed ID': 'str', 'EID': 'str'}, 'sep': ',', 'new_cols': {'Title': 'title', 'Authors': 'authors', 'Author(s) ID': 'scopus_author_id', 'Source title': 'source', 'Year': 'year', 'Volume': 'vol', 'Issue': 'issue', 'Cited by': 'cit_count', 'References': 'refs', 'DOI': 'doi', 'ISBN': 'isbn', 'ISSN': 'issn', 'CODEN': 'coden', 'PubMed ID': 'pmid', 'EID': 'eid'}, 'to_split': {'authors': ',', 'scopus_author_id': ';', 'refs': ';', 'isbn': ';'}} return scopus
in case db_src='scopus', use these specifications args: None returns: wos: the scopus specifics
bibl_io.py
_switch_scopus
JohannesNakayama/CitationNetworks
0
python
def _switch_scopus(): "\n in case db_src='scopus', use these specifications\n args:\n None\n returns:\n wos: the scopus specifics\n " scopus = {'columns': ['Title', 'Authors', 'Author(s) ID', 'Source title', 'Year', 'Volume', 'Issue', 'Cited by', 'References', 'DOI', 'ISBN', 'ISSN', 'CODEN', 'PubMed ID', 'EID'], 'dtype': {'Title': 'str', 'Authors': 'str', 'Author(s) ID': 'str', 'Source title': 'str', 'Year': 'float64', 'Volume': 'str', 'Issue': 'str', 'Cited by': 'float64', 'References': 'str', 'DOI': 'str', 'ISBN': 'str', 'ISSN': 'str', 'CODEN': 'str', 'PubMed ID': 'str', 'EID': 'str'}, 'sep': ',', 'new_cols': {'Title': 'title', 'Authors': 'authors', 'Author(s) ID': 'scopus_author_id', 'Source title': 'source', 'Year': 'year', 'Volume': 'vol', 'Issue': 'issue', 'Cited by': 'cit_count', 'References': 'refs', 'DOI': 'doi', 'ISBN': 'isbn', 'ISSN': 'issn', 'CODEN': 'coden', 'PubMed ID': 'pmid', 'EID': 'eid'}, 'to_split': {'authors': ',', 'scopus_author_id': ';', 'refs': ';', 'isbn': ';'}} return scopus
def _switch_scopus(): "\n in case db_src='scopus', use these specifications\n args:\n None\n returns:\n wos: the scopus specifics\n " scopus = {'columns': ['Title', 'Authors', 'Author(s) ID', 'Source title', 'Year', 'Volume', 'Issue', 'Cited by', 'References', 'DOI', 'ISBN', 'ISSN', 'CODEN', 'PubMed ID', 'EID'], 'dtype': {'Title': 'str', 'Authors': 'str', 'Author(s) ID': 'str', 'Source title': 'str', 'Year': 'float64', 'Volume': 'str', 'Issue': 'str', 'Cited by': 'float64', 'References': 'str', 'DOI': 'str', 'ISBN': 'str', 'ISSN': 'str', 'CODEN': 'str', 'PubMed ID': 'str', 'EID': 'str'}, 'sep': ',', 'new_cols': {'Title': 'title', 'Authors': 'authors', 'Author(s) ID': 'scopus_author_id', 'Source title': 'source', 'Year': 'year', 'Volume': 'vol', 'Issue': 'issue', 'Cited by': 'cit_count', 'References': 'refs', 'DOI': 'doi', 'ISBN': 'isbn', 'ISSN': 'issn', 'CODEN': 'coden', 'PubMed ID': 'pmid', 'EID': 'eid'}, 'to_split': {'authors': ',', 'scopus_author_id': ';', 'refs': ';', 'isbn': ';'}} return scopus<|docstring|>in case db_src='scopus', use these specifications args: None returns: wos: the scopus specifics<|endoftext|>
badf8dc27e9e014ce44bd2c887b37acd22296248883aee3313a855b3243b9318
def _switch_wos(): "\n in case db_src='wos', use these specifications\n args:\n None\n returns:\n wos: the wos specifics\n " wos = {'columns': ['TI', 'AU', 'AF', 'RI', 'OI', 'SO', 'PY', 'VL', 'IS', 'TC', 'Z9', 'CR', 'U1', 'U2', 'DI', 'D2', 'BN', 'SN', 'PM', 'J9', 'JI', 'BP', 'EP'], 'dtype': {'TI': 'str', 'AU': 'str', 'AF': 'str', 'RI': 'str', 'OI': 'str', 'SO': 'str', 'PY': 'float64', 'VL': 'str', 'IS': 'str', 'TC': 'float64', 'Z9': 'float64', 'CR': 'str', 'U1': 'float64', 'U2': 'float64', 'DI': 'str', 'D2': 'str', 'BN': 'str', 'SN': 'str', 'PM': 'str', 'J9': 'str', 'JI': 'str', 'BP': 'str', 'EP': 'str'}, 'sep': '\t', 'new_cols': {'TI': 'title', 'AU': 'authors', 'AF': 'authors_full', 'RI': 'researcher_id', 'OI': 'orcid', 'SO': 'source', 'PY': 'year', 'VL': 'vol', 'IS': 'issue', 'TC': 'cit_count', 'Z9': 'cit_count_z9', 'CR': 'refs', 'U1': 'usage_count_180d', 'U2': 'usage_count_2013', 'DI': 'doi', 'D2': 'book_doi', 'BN': 'isbn', 'SN': 'issn', 'PM': 'pmid', 'J9': 'src_abb_29', 'JI': 'src_abb_iso', 'BP': 'start_page', 'EP': 'end_page'}, 'to_split': {'authors': ';', 'authors_full': ';', 'researcher_id': ';', 'orcid': ';', 'refs': ';', 'isbn': ';'}} return wos
in case db_src='wos', use these specifications args: None returns: wos: the wos specifics
bibl_io.py
_switch_wos
JohannesNakayama/CitationNetworks
0
python
def _switch_wos(): "\n in case db_src='wos', use these specifications\n args:\n None\n returns:\n wos: the wos specifics\n " wos = {'columns': ['TI', 'AU', 'AF', 'RI', 'OI', 'SO', 'PY', 'VL', 'IS', 'TC', 'Z9', 'CR', 'U1', 'U2', 'DI', 'D2', 'BN', 'SN', 'PM', 'J9', 'JI', 'BP', 'EP'], 'dtype': {'TI': 'str', 'AU': 'str', 'AF': 'str', 'RI': 'str', 'OI': 'str', 'SO': 'str', 'PY': 'float64', 'VL': 'str', 'IS': 'str', 'TC': 'float64', 'Z9': 'float64', 'CR': 'str', 'U1': 'float64', 'U2': 'float64', 'DI': 'str', 'D2': 'str', 'BN': 'str', 'SN': 'str', 'PM': 'str', 'J9': 'str', 'JI': 'str', 'BP': 'str', 'EP': 'str'}, 'sep': '\t', 'new_cols': {'TI': 'title', 'AU': 'authors', 'AF': 'authors_full', 'RI': 'researcher_id', 'OI': 'orcid', 'SO': 'source', 'PY': 'year', 'VL': 'vol', 'IS': 'issue', 'TC': 'cit_count', 'Z9': 'cit_count_z9', 'CR': 'refs', 'U1': 'usage_count_180d', 'U2': 'usage_count_2013', 'DI': 'doi', 'D2': 'book_doi', 'BN': 'isbn', 'SN': 'issn', 'PM': 'pmid', 'J9': 'src_abb_29', 'JI': 'src_abb_iso', 'BP': 'start_page', 'EP': 'end_page'}, 'to_split': {'authors': ';', 'authors_full': ';', 'researcher_id': ';', 'orcid': ';', 'refs': ';', 'isbn': ';'}} return wos
def _switch_wos(): "\n in case db_src='wos', use these specifications\n args:\n None\n returns:\n wos: the wos specifics\n " wos = {'columns': ['TI', 'AU', 'AF', 'RI', 'OI', 'SO', 'PY', 'VL', 'IS', 'TC', 'Z9', 'CR', 'U1', 'U2', 'DI', 'D2', 'BN', 'SN', 'PM', 'J9', 'JI', 'BP', 'EP'], 'dtype': {'TI': 'str', 'AU': 'str', 'AF': 'str', 'RI': 'str', 'OI': 'str', 'SO': 'str', 'PY': 'float64', 'VL': 'str', 'IS': 'str', 'TC': 'float64', 'Z9': 'float64', 'CR': 'str', 'U1': 'float64', 'U2': 'float64', 'DI': 'str', 'D2': 'str', 'BN': 'str', 'SN': 'str', 'PM': 'str', 'J9': 'str', 'JI': 'str', 'BP': 'str', 'EP': 'str'}, 'sep': '\t', 'new_cols': {'TI': 'title', 'AU': 'authors', 'AF': 'authors_full', 'RI': 'researcher_id', 'OI': 'orcid', 'SO': 'source', 'PY': 'year', 'VL': 'vol', 'IS': 'issue', 'TC': 'cit_count', 'Z9': 'cit_count_z9', 'CR': 'refs', 'U1': 'usage_count_180d', 'U2': 'usage_count_2013', 'DI': 'doi', 'D2': 'book_doi', 'BN': 'isbn', 'SN': 'issn', 'PM': 'pmid', 'J9': 'src_abb_29', 'JI': 'src_abb_iso', 'BP': 'start_page', 'EP': 'end_page'}, 'to_split': {'authors': ';', 'authors_full': ';', 'researcher_id': ';', 'orcid': ';', 'refs': ';', 'isbn': ';'}} return wos<|docstring|>in case db_src='wos', use these specifications args: None returns: wos: the wos specifics<|endoftext|>
679be6f4017e38e1e21c02b16f73840477effa62178d829129059f07801c289d
def _switch_case_db(arg): '\n replacement for switch case \n args:\n arg: the case to execute\n returns:\n func(): the switch function\n ' switcher = {'scopus': _switch_scopus, 'wos': _switch_wos} if (arg not in ['scopus', 'wos']): print((('Expection: Unknown db_source ' + str(arg)) + '\nOutput might not be in proper format')) func = switcher.get(arg, (lambda : [[], {}])) return func()
replacement for switch case args: arg: the case to execute returns: func(): the switch function
bibl_io.py
_switch_case_db
JohannesNakayama/CitationNetworks
0
python
def _switch_case_db(arg): '\n replacement for switch case \n args:\n arg: the case to execute\n returns:\n func(): the switch function\n ' switcher = {'scopus': _switch_scopus, 'wos': _switch_wos} if (arg not in ['scopus', 'wos']): print((('Expection: Unknown db_source ' + str(arg)) + '\nOutput might not be in proper format')) func = switcher.get(arg, (lambda : [[], {}])) return func()
def _switch_case_db(arg): '\n replacement for switch case \n args:\n arg: the case to execute\n returns:\n func(): the switch function\n ' switcher = {'scopus': _switch_scopus, 'wos': _switch_wos} if (arg not in ['scopus', 'wos']): print((('Expection: Unknown db_source ' + str(arg)) + '\nOutput might not be in proper format')) func = switcher.get(arg, (lambda : [[], {}])) return func()<|docstring|>replacement for switch case args: arg: the case to execute returns: func(): the switch function<|endoftext|>
17eb527a0d4e6903882fe25920e7f83b74598b8376a7fedbc6c8642509c232f4
def _split_columns(entry, split_list): '\n split pre-defined dictionary entries along pre-defined separators\n args:\n entry: a bibliography entry\n split_list: a pre-defined list of columns to separate\n returns:\n entry: the split entry\n ' space_stripper = (lambda x: x.rstrip().lstrip()) for (label, separator) in split_list.items(): try: entry[label] = space_stripper(entry[label]).rstrip(';') entry[label] = entry[label].split(separator) entry[label] = list(map(space_stripper, entry[label])) except: continue return entry
split pre-defined dictionary entries along pre-defined separators args: entry: a bibliography entry split_list: a pre-defined list of columns to separate returns: entry: the split entry
bibl_io.py
_split_columns
JohannesNakayama/CitationNetworks
0
python
def _split_columns(entry, split_list): '\n split pre-defined dictionary entries along pre-defined separators\n args:\n entry: a bibliography entry\n split_list: a pre-defined list of columns to separate\n returns:\n entry: the split entry\n ' space_stripper = (lambda x: x.rstrip().lstrip()) for (label, separator) in split_list.items(): try: entry[label] = space_stripper(entry[label]).rstrip(';') entry[label] = entry[label].split(separator) entry[label] = list(map(space_stripper, entry[label])) except: continue return entry
def _split_columns(entry, split_list): '\n split pre-defined dictionary entries along pre-defined separators\n args:\n entry: a bibliography entry\n split_list: a pre-defined list of columns to separate\n returns:\n entry: the split entry\n ' space_stripper = (lambda x: x.rstrip().lstrip()) for (label, separator) in split_list.items(): try: entry[label] = space_stripper(entry[label]).rstrip(';') entry[label] = entry[label].split(separator) entry[label] = list(map(space_stripper, entry[label])) except: continue return entry<|docstring|>split pre-defined dictionary entries along pre-defined separators args: entry: a bibliography entry split_list: a pre-defined list of columns to separate returns: entry: the split entry<|endoftext|>
ea6ccafe87f83b28653b36548c333d71b0977c55101c6138d1b374a7cf59d4d4
def _extract_cit_rel(bib_dict): "\n extract the citation relation \n args:\n bib_dict: a bibliographic dictionary\n returns:\n cit_rel: a list of tuples (x, y) with 'x cites y'\n " cit_rel = [] for key in bib_dict.keys(): doi_at_key = bib_dict[key]['doi'] if (len(str(doi_at_key)) > 8): pass else: continue refs_at_key = bib_dict[key]['refs'] try: for ref_idx in range(len(refs_at_key)): ref_doi = _extract_doi(refs_at_key[ref_idx], bib_dict[key]['db_src']) if (ref_doi == 'NO_DOI'): continue if (ref_doi != doi_at_key): cit_rel.append((ref_doi, doi_at_key)) except: continue return cit_rel
extract the citation relation args: bib_dict: a bibliographic dictionary returns: cit_rel: a list of tuples (x, y) with 'x cites y'
bibl_io.py
_extract_cit_rel
JohannesNakayama/CitationNetworks
0
python
def _extract_cit_rel(bib_dict): "\n extract the citation relation \n args:\n bib_dict: a bibliographic dictionary\n returns:\n cit_rel: a list of tuples (x, y) with 'x cites y'\n " cit_rel = [] for key in bib_dict.keys(): doi_at_key = bib_dict[key]['doi'] if (len(str(doi_at_key)) > 8): pass else: continue refs_at_key = bib_dict[key]['refs'] try: for ref_idx in range(len(refs_at_key)): ref_doi = _extract_doi(refs_at_key[ref_idx], bib_dict[key]['db_src']) if (ref_doi == 'NO_DOI'): continue if (ref_doi != doi_at_key): cit_rel.append((ref_doi, doi_at_key)) except: continue return cit_rel
def _extract_cit_rel(bib_dict): "\n extract the citation relation \n args:\n bib_dict: a bibliographic dictionary\n returns:\n cit_rel: a list of tuples (x, y) with 'x cites y'\n " cit_rel = [] for key in bib_dict.keys(): doi_at_key = bib_dict[key]['doi'] if (len(str(doi_at_key)) > 8): pass else: continue refs_at_key = bib_dict[key]['refs'] try: for ref_idx in range(len(refs_at_key)): ref_doi = _extract_doi(refs_at_key[ref_idx], bib_dict[key]['db_src']) if (ref_doi == 'NO_DOI'): continue if (ref_doi != doi_at_key): cit_rel.append((ref_doi, doi_at_key)) except: continue return cit_rel<|docstring|>extract the citation relation args: bib_dict: a bibliographic dictionary returns: cit_rel: a list of tuples (x, y) with 'x cites y'<|endoftext|>
70163f153cbe3c48490e74122f4fd4262b735d20fbe90a77f74464a4ab3e8767
def _extract_doi(ref_elem, db_src='wos'): "\n extract doi from reference (CAUTION: only works for db_src == 'wos' so far!)\n args:\n ref_elem: a reference\n db_src: database source of the bibliographic data\n returns:\n doi if doi is extractable\n 'NO_DOI' if no doi is extractable\n " if (db_src == 'wos'): regex_doi = re.compile('10.\\d{4,9}/[-._;()/:A-Za-z0-9]+$') try: res = regex_doi.search(ref_elem) doi = res.group(0) return doi except: return 'NO_DOI' else: return 'NO_DOI'
extract doi from reference (CAUTION: only works for db_src == 'wos' so far!) args: ref_elem: a reference db_src: database source of the bibliographic data returns: doi if doi is extractable 'NO_DOI' if no doi is extractable
bibl_io.py
_extract_doi
JohannesNakayama/CitationNetworks
0
python
def _extract_doi(ref_elem, db_src='wos'): "\n extract doi from reference (CAUTION: only works for db_src == 'wos' so far!)\n args:\n ref_elem: a reference\n db_src: database source of the bibliographic data\n returns:\n doi if doi is extractable\n 'NO_DOI' if no doi is extractable\n " if (db_src == 'wos'): regex_doi = re.compile('10.\\d{4,9}/[-._;()/:A-Za-z0-9]+$') try: res = regex_doi.search(ref_elem) doi = res.group(0) return doi except: return 'NO_DOI' else: return 'NO_DOI'
def _extract_doi(ref_elem, db_src='wos'): "\n extract doi from reference (CAUTION: only works for db_src == 'wos' so far!)\n args:\n ref_elem: a reference\n db_src: database source of the bibliographic data\n returns:\n doi if doi is extractable\n 'NO_DOI' if no doi is extractable\n " if (db_src == 'wos'): regex_doi = re.compile('10.\\d{4,9}/[-._;()/:A-Za-z0-9]+$') try: res = regex_doi.search(ref_elem) doi = res.group(0) return doi except: return 'NO_DOI' else: return 'NO_DOI'<|docstring|>extract doi from reference (CAUTION: only works for db_src == 'wos' so far!) args: ref_elem: a reference db_src: database source of the bibliographic data returns: doi if doi is extractable 'NO_DOI' if no doi is extractable<|endoftext|>
cf5f64bb4b90f930e5dbd40f2048e041b1a0eb39fdcf77b4218f688f2ee2a5cd
def _create_citation_network(BibObject, keywords=['citation network'], w_method='nppc'): "\n create a citation network\n args:\n BibObject: Bibliography object\n keywords: list of keywords as graph attributes (for later reference)\n w_method: weighting method (so far only 'nppc')\n returns:\n net: a citation network\n " net = nx.DiGraph(kw=keywords) net.add_edges_from(BibObject.cit_rel) net = _break_cycles(net) net = _compute_edge_weights(net, w_method) return net
create a citation network args: BibObject: Bibliography object keywords: list of keywords as graph attributes (for later reference) w_method: weighting method (so far only 'nppc') returns: net: a citation network
bibl_io.py
_create_citation_network
JohannesNakayama/CitationNetworks
0
python
def _create_citation_network(BibObject, keywords=['citation network'], w_method='nppc'): "\n create a citation network\n args:\n BibObject: Bibliography object\n keywords: list of keywords as graph attributes (for later reference)\n w_method: weighting method (so far only 'nppc')\n returns:\n net: a citation network\n " net = nx.DiGraph(kw=keywords) net.add_edges_from(BibObject.cit_rel) net = _break_cycles(net) net = _compute_edge_weights(net, w_method) return net
def _create_citation_network(BibObject, keywords=['citation network'], w_method='nppc'): "\n create a citation network\n args:\n BibObject: Bibliography object\n keywords: list of keywords as graph attributes (for later reference)\n w_method: weighting method (so far only 'nppc')\n returns:\n net: a citation network\n " net = nx.DiGraph(kw=keywords) net.add_edges_from(BibObject.cit_rel) net = _break_cycles(net) net = _compute_edge_weights(net, w_method) return net<|docstring|>create a citation network args: BibObject: Bibliography object keywords: list of keywords as graph attributes (for later reference) w_method: weighting method (so far only 'nppc') returns: net: a citation network<|endoftext|>
924b9a0795546c333a6c131b4c86c9eace79611edac9144f246d630f0cec3c4e
def _compute_edge_weights(net, method='nppc'): "\n compute the edge weights of a citation network\n args:\n net: a citation network\n method: a weighting method (so far only 'nppc')\n returns:\n net: the net with weighted edges\n " if (method == 'nppc'): con_nodes = _find_connected_nodes(net) for sub in [nx.all_simple_paths(net, source=pair[0], target=pair[1]) for pair in con_nodes]: tmp = net.subgraph({node for path in sub for node in path}) for edge in tmp.edges: try: net[edge[0]][edge[1]]['weight'] += 1 except: net[edge[0]][edge[1]]['weight'] = 1 else: print('This does not seem to be a valid weighting method.') return net
compute the edge weights of a citation network args: net: a citation network method: a weighting method (so far only 'nppc') returns: net: the net with weighted edges
bibl_io.py
_compute_edge_weights
JohannesNakayama/CitationNetworks
0
python
def _compute_edge_weights(net, method='nppc'): "\n compute the edge weights of a citation network\n args:\n net: a citation network\n method: a weighting method (so far only 'nppc')\n returns:\n net: the net with weighted edges\n " if (method == 'nppc'): con_nodes = _find_connected_nodes(net) for sub in [nx.all_simple_paths(net, source=pair[0], target=pair[1]) for pair in con_nodes]: tmp = net.subgraph({node for path in sub for node in path}) for edge in tmp.edges: try: net[edge[0]][edge[1]]['weight'] += 1 except: net[edge[0]][edge[1]]['weight'] = 1 else: print('This does not seem to be a valid weighting method.') return net
def _compute_edge_weights(net, method='nppc'): "\n compute the edge weights of a citation network\n args:\n net: a citation network\n method: a weighting method (so far only 'nppc')\n returns:\n net: the net with weighted edges\n " if (method == 'nppc'): con_nodes = _find_connected_nodes(net) for sub in [nx.all_simple_paths(net, source=pair[0], target=pair[1]) for pair in con_nodes]: tmp = net.subgraph({node for path in sub for node in path}) for edge in tmp.edges: try: net[edge[0]][edge[1]]['weight'] += 1 except: net[edge[0]][edge[1]]['weight'] = 1 else: print('This does not seem to be a valid weighting method.') return net<|docstring|>compute the edge weights of a citation network args: net: a citation network method: a weighting method (so far only 'nppc') returns: net: the net with weighted edges<|endoftext|>
49e3829ca5b75540487f96935d57045d8627a0f723771d3627752a2e2ecc1488
def _find_connected_nodes(net): '\n find all connected nodes (a, b) where there is a path from a to b\n args:\n net: a citation network\n returns:\n con_nodes: a list of tuples with all connected nodes\n ' con_nodes = [(struc[0], i) for node in net.nodes for struc in nx.bfs_successors(net, node) for i in struc[1]] return con_nodes
find all connected nodes (a, b) where there is a path from a to b args: net: a citation network returns: con_nodes: a list of tuples with all connected nodes
bibl_io.py
_find_connected_nodes
JohannesNakayama/CitationNetworks
0
python
def _find_connected_nodes(net): '\n find all connected nodes (a, b) where there is a path from a to b\n args:\n net: a citation network\n returns:\n con_nodes: a list of tuples with all connected nodes\n ' con_nodes = [(struc[0], i) for node in net.nodes for struc in nx.bfs_successors(net, node) for i in struc[1]] return con_nodes
def _find_connected_nodes(net): '\n find all connected nodes (a, b) where there is a path from a to b\n args:\n net: a citation network\n returns:\n con_nodes: a list of tuples with all connected nodes\n ' con_nodes = [(struc[0], i) for node in net.nodes for struc in nx.bfs_successors(net, node) for i in struc[1]] return con_nodes<|docstring|>find all connected nodes (a, b) where there is a path from a to b args: net: a citation network returns: con_nodes: a list of tuples with all connected nodes<|endoftext|>
075ec459e150838956c6c04f2cd268a19690b6807ce168de01a1b92e3fd7a41e
def _extract_out_deg_dist(net): '\n extract the distribution of out-degrees in the citation network for later pruning\n args:\n net: a citation graph\n returns:\n a pandas Dataframe with the frequency distribution of out-degrees\n ' out_deg_list = [net.out_degree(node) for node in net.nodes] freq = ([0] * (max(out_deg_list) + 1)) for i in out_deg_list: freq[i] += 1 data = {'out_degree': list(range((max(out_deg_list) + 1))), 'freq': freq} return pd.DataFrame(data)
extract the distribution of out-degrees in the citation network for later pruning args: net: a citation graph returns: a pandas Dataframe with the frequency distribution of out-degrees
bibl_io.py
_extract_out_deg_dist
JohannesNakayama/CitationNetworks
0
python
def _extract_out_deg_dist(net): '\n extract the distribution of out-degrees in the citation network for later pruning\n args:\n net: a citation graph\n returns:\n a pandas Dataframe with the frequency distribution of out-degrees\n ' out_deg_list = [net.out_degree(node) for node in net.nodes] freq = ([0] * (max(out_deg_list) + 1)) for i in out_deg_list: freq[i] += 1 data = {'out_degree': list(range((max(out_deg_list) + 1))), 'freq': freq} return pd.DataFrame(data)
def _extract_out_deg_dist(net): '\n extract the distribution of out-degrees in the citation network for later pruning\n args:\n net: a citation graph\n returns:\n a pandas Dataframe with the frequency distribution of out-degrees\n ' out_deg_list = [net.out_degree(node) for node in net.nodes] freq = ([0] * (max(out_deg_list) + 1)) for i in out_deg_list: freq[i] += 1 data = {'out_degree': list(range((max(out_deg_list) + 1))), 'freq': freq} return pd.DataFrame(data)<|docstring|>extract the distribution of out-degrees in the citation network for later pruning args: net: a citation graph returns: a pandas Dataframe with the frequency distribution of out-degrees<|endoftext|>
5dc418acb9479ba172f3fbd68e95d91e00ff0f6214122e3b1a0cf4f8a4b2d438
def _enclose_cit_rel(net): '\n add sink and source to a citation network\n args:\n net: an unweighted citation network\n return:\n enclosing: list of edges to add sink and source\n ' try: source_edges = [('source', node, {'weight': 1}) for node in net.nodes if (net.in_degree(node) == 0)] sink_edges = [(node, 'sink', {'weight': 1}) for node in net.nodes if (net.out_degree(node) == 0)] return (source_edges + sink_edges) except: return False
add sink and source to a citation network args: net: an unweighted citation network return: enclosing: list of edges to add sink and source
bibl_io.py
_enclose_cit_rel
JohannesNakayama/CitationNetworks
0
python
def _enclose_cit_rel(net): '\n add sink and source to a citation network\n args:\n net: an unweighted citation network\n return:\n enclosing: list of edges to add sink and source\n ' try: source_edges = [('source', node, {'weight': 1}) for node in net.nodes if (net.in_degree(node) == 0)] sink_edges = [(node, 'sink', {'weight': 1}) for node in net.nodes if (net.out_degree(node) == 0)] return (source_edges + sink_edges) except: return False
def _enclose_cit_rel(net): '\n add sink and source to a citation network\n args:\n net: an unweighted citation network\n return:\n enclosing: list of edges to add sink and source\n ' try: source_edges = [('source', node, {'weight': 1}) for node in net.nodes if (net.in_degree(node) == 0)] sink_edges = [(node, 'sink', {'weight': 1}) for node in net.nodes if (net.out_degree(node) == 0)] return (source_edges + sink_edges) except: return False<|docstring|>add sink and source to a citation network args: net: an unweighted citation network return: enclosing: list of edges to add sink and source<|endoftext|>
45fe9e77c4bd1218aa81a0472872cf330de356ca4595e6e0a04ea7931ba46064
def _break_cycles(net): '\n breaks potential cycles in a citation net\n args:\n net: a citation net\n returns:\n net: the acyclic citation net\n ' flag = True counter = 0 while flag: try: counter += 1 cycles = nx.find_cycle(net) net.remove_edge(cycles[(- 1)][0], cycles[(- 1)][1]) except: if (counter == 1): print('NOTE: {} edge was removed to break cycles'.format(counter)) else: print('NOTE: {} edges were removed to break cycles'.format(counter)) flag = False if (counter >= 100): print('NOTE: There are oddly many cycles. Please check your data to avoid problems in the further process.') flag = False return net
breaks potential cycles in a citation net args: net: a citation net returns: net: the acyclic citation net
bibl_io.py
_break_cycles
JohannesNakayama/CitationNetworks
0
python
def _break_cycles(net): '\n breaks potential cycles in a citation net\n args:\n net: a citation net\n returns:\n net: the acyclic citation net\n ' flag = True counter = 0 while flag: try: counter += 1 cycles = nx.find_cycle(net) net.remove_edge(cycles[(- 1)][0], cycles[(- 1)][1]) except: if (counter == 1): print('NOTE: {} edge was removed to break cycles'.format(counter)) else: print('NOTE: {} edges were removed to break cycles'.format(counter)) flag = False if (counter >= 100): print('NOTE: There are oddly many cycles. Please check your data to avoid problems in the further process.') flag = False return net
def _break_cycles(net): '\n breaks potential cycles in a citation net\n args:\n net: a citation net\n returns:\n net: the acyclic citation net\n ' flag = True counter = 0 while flag: try: counter += 1 cycles = nx.find_cycle(net) net.remove_edge(cycles[(- 1)][0], cycles[(- 1)][1]) except: if (counter == 1): print('NOTE: {} edge was removed to break cycles'.format(counter)) else: print('NOTE: {} edges were removed to break cycles'.format(counter)) flag = False if (counter >= 100): print('NOTE: There are oddly many cycles. Please check your data to avoid problems in the further process.') flag = False return net<|docstring|>breaks potential cycles in a citation net args: net: a citation net returns: net: the acyclic citation net<|endoftext|>
e3837abe759d48e98335cacf4d90dcb238f68b418be8b72c19bace619a479e8c
def _main_path_analysis(CitNetObject, mc, iterations): '\n conduct main path analysis on a citation network\n args:\n CitNetObject: a CitationNetwork object\n mc: minimum citations for start nodes\n iterations: number of iterations to conduct\n returns:\n main_paths: a list of all mined main paths\n main_net: the edges of all main paths in one list\n ' CitNetObject.cit_net.remove_nodes_from(['source', 'sink']) print('NOTE: source and sink have been removed from your citation network') init_net = CitNetObject.cit_net for i in range(iterations): start_nodes = _retrieve_start_nodes(init_net, mc) main_paths = [] main_net = [] for node in start_nodes: counter = 0 flag = True error = False mp = [] cur_nodes = [node] while flag: candidates = [[(e[0], e[1], init_net.edges[(e[0], e[1])]['weight']) for e in init_net.out_edges(cur_node)] for cur_node in cur_nodes] weights = [[t[2] for t in l] for l in candidates] mw = [max(w) for w in weights] max_idx = [[i for (i, j) in enumerate(weights[m]) if (j == mw[m])] for m in range(len(mw))] cur_nodes.clear() for (i, mi) in enumerate(max_idx): next_edges = [candidates[i][j] for j in mi] mp.extend(next_edges) cur_nodes.extend([e[1] for e in next_edges]) cur_nodes = list(dict.fromkeys(cur_nodes)) rm_idx = [] for i in range(len(cur_nodes)): if (init_net.out_degree(cur_nodes[i]) == 0): rm_idx.append(i) for idx in sorted(rm_idx, reverse=True): del cur_nodes[idx] counter += 1 if (not cur_nodes): flag = False if (counter >= 100): print('This takes oddly long. Something must have gone wrong.') error = True flag = False mp = list(dict.fromkeys(mp)) main_paths.append(mp) main_net.extend(mp) if error: print('An error occurred.') break init_net = nx.DiGraph() init_net.add_weighted_edges_from(main_net) return (main_paths, main_net)
conduct main path analysis on a citation network args: CitNetObject: a CitationNetwork object mc: minimum citations for start nodes iterations: number of iterations to conduct returns: main_paths: a list of all mined main paths main_net: the edges of all main paths in one list
bibl_io.py
_main_path_analysis
JohannesNakayama/CitationNetworks
0
python
def _main_path_analysis(CitNetObject, mc, iterations): '\n conduct main path analysis on a citation network\n args:\n CitNetObject: a CitationNetwork object\n mc: minimum citations for start nodes\n iterations: number of iterations to conduct\n returns:\n main_paths: a list of all mined main paths\n main_net: the edges of all main paths in one list\n ' CitNetObject.cit_net.remove_nodes_from(['source', 'sink']) print('NOTE: source and sink have been removed from your citation network') init_net = CitNetObject.cit_net for i in range(iterations): start_nodes = _retrieve_start_nodes(init_net, mc) main_paths = [] main_net = [] for node in start_nodes: counter = 0 flag = True error = False mp = [] cur_nodes = [node] while flag: candidates = [[(e[0], e[1], init_net.edges[(e[0], e[1])]['weight']) for e in init_net.out_edges(cur_node)] for cur_node in cur_nodes] weights = [[t[2] for t in l] for l in candidates] mw = [max(w) for w in weights] max_idx = [[i for (i, j) in enumerate(weights[m]) if (j == mw[m])] for m in range(len(mw))] cur_nodes.clear() for (i, mi) in enumerate(max_idx): next_edges = [candidates[i][j] for j in mi] mp.extend(next_edges) cur_nodes.extend([e[1] for e in next_edges]) cur_nodes = list(dict.fromkeys(cur_nodes)) rm_idx = [] for i in range(len(cur_nodes)): if (init_net.out_degree(cur_nodes[i]) == 0): rm_idx.append(i) for idx in sorted(rm_idx, reverse=True): del cur_nodes[idx] counter += 1 if (not cur_nodes): flag = False if (counter >= 100): print('This takes oddly long. Something must have gone wrong.') error = True flag = False mp = list(dict.fromkeys(mp)) main_paths.append(mp) main_net.extend(mp) if error: print('An error occurred.') break init_net = nx.DiGraph() init_net.add_weighted_edges_from(main_net) return (main_paths, main_net)
def _main_path_analysis(CitNetObject, mc, iterations): '\n conduct main path analysis on a citation network\n args:\n CitNetObject: a CitationNetwork object\n mc: minimum citations for start nodes\n iterations: number of iterations to conduct\n returns:\n main_paths: a list of all mined main paths\n main_net: the edges of all main paths in one list\n ' CitNetObject.cit_net.remove_nodes_from(['source', 'sink']) print('NOTE: source and sink have been removed from your citation network') init_net = CitNetObject.cit_net for i in range(iterations): start_nodes = _retrieve_start_nodes(init_net, mc) main_paths = [] main_net = [] for node in start_nodes: counter = 0 flag = True error = False mp = [] cur_nodes = [node] while flag: candidates = [[(e[0], e[1], init_net.edges[(e[0], e[1])]['weight']) for e in init_net.out_edges(cur_node)] for cur_node in cur_nodes] weights = [[t[2] for t in l] for l in candidates] mw = [max(w) for w in weights] max_idx = [[i for (i, j) in enumerate(weights[m]) if (j == mw[m])] for m in range(len(mw))] cur_nodes.clear() for (i, mi) in enumerate(max_idx): next_edges = [candidates[i][j] for j in mi] mp.extend(next_edges) cur_nodes.extend([e[1] for e in next_edges]) cur_nodes = list(dict.fromkeys(cur_nodes)) rm_idx = [] for i in range(len(cur_nodes)): if (init_net.out_degree(cur_nodes[i]) == 0): rm_idx.append(i) for idx in sorted(rm_idx, reverse=True): del cur_nodes[idx] counter += 1 if (not cur_nodes): flag = False if (counter >= 100): print('This takes oddly long. Something must have gone wrong.') error = True flag = False mp = list(dict.fromkeys(mp)) main_paths.append(mp) main_net.extend(mp) if error: print('An error occurred.') break init_net = nx.DiGraph() init_net.add_weighted_edges_from(main_net) return (main_paths, main_net)<|docstring|>conduct main path analysis on a citation network args: CitNetObject: a CitationNetwork object mc: minimum citations for start nodes iterations: number of iterations to conduct returns: main_paths: a list of all mined main paths main_net: the edges of all main paths in one list<|endoftext|>
dde755559a79e0ef398cb37e0a348b09fd44117ab8cc45b18698b151239c18c9
def _retrieve_start_nodes(net, mc): '\n retrieve nodes with in_degree == 0 as starting points for main path analysis\n args:\n net: a CitationNetwork object\n mc: minimum citations\n returns:\n list of start nodes in net\n ' return [node for node in net.nodes if ((net.in_degree(node) == 0) and (net.out_degree(node) > mc))]
retrieve nodes with in_degree == 0 as starting points for main path analysis args: net: a CitationNetwork object mc: minimum citations returns: list of start nodes in net
bibl_io.py
_retrieve_start_nodes
JohannesNakayama/CitationNetworks
0
python
def _retrieve_start_nodes(net, mc): '\n retrieve nodes with in_degree == 0 as starting points for main path analysis\n args:\n net: a CitationNetwork object\n mc: minimum citations\n returns:\n list of start nodes in net\n ' return [node for node in net.nodes if ((net.in_degree(node) == 0) and (net.out_degree(node) > mc))]
def _retrieve_start_nodes(net, mc): '\n retrieve nodes with in_degree == 0 as starting points for main path analysis\n args:\n net: a CitationNetwork object\n mc: minimum citations\n returns:\n list of start nodes in net\n ' return [node for node in net.nodes if ((net.in_degree(node) == 0) and (net.out_degree(node) > mc))]<|docstring|>retrieve nodes with in_degree == 0 as starting points for main path analysis args: net: a CitationNetwork object mc: minimum citations returns: list of start nodes in net<|endoftext|>
47656234ea5eb18ca81da400477a530a28af4a689acbeb2594046f771905ad6f
def export_bib_dict(self, path): '\n write bibliographic dictionary to a json file\n ' with open(path, 'w') as json_file: json.dump(self.bib_dict, json_file)
write bibliographic dictionary to a json file
bibl_io.py
export_bib_dict
JohannesNakayama/CitationNetworks
0
python
def export_bib_dict(self, path): '\n \n ' with open(path, 'w') as json_file: json.dump(self.bib_dict, json_file)
def export_bib_dict(self, path): '\n \n ' with open(path, 'w') as json_file: json.dump(self.bib_dict, json_file)<|docstring|>write bibliographic dictionary to a json file<|endoftext|>
2d98dd39a88915c137abe8745c21cddf31c24b2accd53ae49b37383c3db51600
def add_enclosing(self): '\n add sink and source to citation network\n ' self.cit_net.add_edges_from(_enclose_cit_rel(self.cit_net))
add sink and source to citation network
bibl_io.py
add_enclosing
JohannesNakayama/CitationNetworks
0
python
def add_enclosing(self): '\n \n ' self.cit_net.add_edges_from(_enclose_cit_rel(self.cit_net))
def add_enclosing(self): '\n \n ' self.cit_net.add_edges_from(_enclose_cit_rel(self.cit_net))<|docstring|>add sink and source to citation network<|endoftext|>
d3cc55314ce37ab9132364c67fbc528e94958087cc6a31106c9b65721398da8f
def create_network(self, mw=1): '\n create a main path network\n contains all edges that are part of a main path\n network is pruned with some options\n ' self.main_path_net.add_weighted_edges_from(self.main_net) _rm_list_e = [] for e in self.main_path_net.edges: if (self.main_path_net.get_edge_data(e[0], e[1])['weight'] < mw): _rm_list_e.append(e) self.main_path_net.remove_edges_from(_rm_list_e) _rm_list_n = [] for n in self.main_path_net.nodes: if ((self.main_path_net.in_degree(n) == 0) and (self.main_path_net.out_degree(n) == 0)): _rm_list_n.append(n) self.main_path_net.remove_nodes_from(_rm_list_n)
create a main path network contains all edges that are part of a main path network is pruned with some options
bibl_io.py
create_network
JohannesNakayama/CitationNetworks
0
python
def create_network(self, mw=1): '\n create a main path network\n contains all edges that are part of a main path\n network is pruned with some options\n ' self.main_path_net.add_weighted_edges_from(self.main_net) _rm_list_e = [] for e in self.main_path_net.edges: if (self.main_path_net.get_edge_data(e[0], e[1])['weight'] < mw): _rm_list_e.append(e) self.main_path_net.remove_edges_from(_rm_list_e) _rm_list_n = [] for n in self.main_path_net.nodes: if ((self.main_path_net.in_degree(n) == 0) and (self.main_path_net.out_degree(n) == 0)): _rm_list_n.append(n) self.main_path_net.remove_nodes_from(_rm_list_n)
def create_network(self, mw=1): '\n create a main path network\n contains all edges that are part of a main path\n network is pruned with some options\n ' self.main_path_net.add_weighted_edges_from(self.main_net) _rm_list_e = [] for e in self.main_path_net.edges: if (self.main_path_net.get_edge_data(e[0], e[1])['weight'] < mw): _rm_list_e.append(e) self.main_path_net.remove_edges_from(_rm_list_e) _rm_list_n = [] for n in self.main_path_net.nodes: if ((self.main_path_net.in_degree(n) == 0) and (self.main_path_net.out_degree(n) == 0)): _rm_list_n.append(n) self.main_path_net.remove_nodes_from(_rm_list_n)<|docstring|>create a main path network contains all edges that are part of a main path network is pruned with some options<|endoftext|>
5839fdd3a7a73947f8dec18da27a4d9af7a713adad00381008e7521b9b9c30f9
def plot_mask2D(mask, title='', point_coords=None, figsize=10, point_marker_size=5): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape HxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor)): x and y point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' (H, W) = mask.shape plt.figure(figsize=(figsize, figsize)) if title: title += ', ' plt.title('{}resolution {}x{}'.format(title, H, W), fontsize=30) plt.ylabel(H, fontsize=30) plt.xlabel(W, fontsize=30) plt.xticks([], []) plt.yticks([], []) plt.imshow(mask.detach(), interpolation='nearest', cmap=plt.get_cmap('gray')) if (point_coords is not None): plt.scatter(x=point_coords[0], y=point_coords[1], color='red', s=point_marker_size, clip_on=True) plt.xlim((- 0.5), (W - 0.5)) plt.ylim((H - 0.5), (- 0.5)) plt.show()
Simple plotting tool to show intermediate mask predictions and points where PointRend is applied. Args: mask (Tensor): mask prediction of shape HxW title (str): title for the plot point_coords ((Tensor, Tensor)): x and y point coordinates figsize (int): size of the figure to plot point_marker_size (int): marker size for points
lib/common/seg3d_utils.py
plot_mask2D
YuliangXiu/ICON
486
python
def plot_mask2D(mask, title=, point_coords=None, figsize=10, point_marker_size=5): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape HxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor)): x and y point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' (H, W) = mask.shape plt.figure(figsize=(figsize, figsize)) if title: title += ', ' plt.title('{}resolution {}x{}'.format(title, H, W), fontsize=30) plt.ylabel(H, fontsize=30) plt.xlabel(W, fontsize=30) plt.xticks([], []) plt.yticks([], []) plt.imshow(mask.detach(), interpolation='nearest', cmap=plt.get_cmap('gray')) if (point_coords is not None): plt.scatter(x=point_coords[0], y=point_coords[1], color='red', s=point_marker_size, clip_on=True) plt.xlim((- 0.5), (W - 0.5)) plt.ylim((H - 0.5), (- 0.5)) plt.show()
def plot_mask2D(mask, title=, point_coords=None, figsize=10, point_marker_size=5): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape HxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor)): x and y point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' (H, W) = mask.shape plt.figure(figsize=(figsize, figsize)) if title: title += ', ' plt.title('{}resolution {}x{}'.format(title, H, W), fontsize=30) plt.ylabel(H, fontsize=30) plt.xlabel(W, fontsize=30) plt.xticks([], []) plt.yticks([], []) plt.imshow(mask.detach(), interpolation='nearest', cmap=plt.get_cmap('gray')) if (point_coords is not None): plt.scatter(x=point_coords[0], y=point_coords[1], color='red', s=point_marker_size, clip_on=True) plt.xlim((- 0.5), (W - 0.5)) plt.ylim((H - 0.5), (- 0.5)) plt.show()<|docstring|>Simple plotting tool to show intermediate mask predictions and points where PointRend is applied. Args: mask (Tensor): mask prediction of shape HxW title (str): title for the plot point_coords ((Tensor, Tensor)): x and y point coordinates figsize (int): size of the figure to plot point_marker_size (int): marker size for points<|endoftext|>
8f2303640a258c1c59ed1264a3b1bc3e9b6d4a0cedc1ad5804e4002860593be9
def plot_mask3D(mask=None, title='', point_coords=None, figsize=1500, point_marker_size=8, interactive=True): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape DxHxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor, Tensor)): x and y and z point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' import trimesh import vtkplotter from skimage import measure vp = vtkplotter.Plotter(title=title, size=(figsize, figsize)) vis_list = [] if (mask is not None): mask = mask.detach().to('cpu').numpy() mask = mask.transpose(2, 1, 0) (verts, faces, normals, values) = measure.marching_cubes_lewiner(mask, 0.5, gradient_direction='ascent') mesh = trimesh.Trimesh(verts, faces) mesh.visual.face_colors = [200, 200, 250, 100] vis_list.append(mesh) if (point_coords is not None): point_coords = torch.stack(point_coords, 1).to('cpu').numpy() pc = vtkplotter.Points(point_coords, r=point_marker_size, c='red') vis_list.append(pc) vp.show(*vis_list, bg='white', axes=1, interactive=interactive, azimuth=30, elevation=30)
Simple plotting tool to show intermediate mask predictions and points where PointRend is applied. Args: mask (Tensor): mask prediction of shape DxHxW title (str): title for the plot point_coords ((Tensor, Tensor, Tensor)): x and y and z point coordinates figsize (int): size of the figure to plot point_marker_size (int): marker size for points
lib/common/seg3d_utils.py
plot_mask3D
YuliangXiu/ICON
486
python
def plot_mask3D(mask=None, title=, point_coords=None, figsize=1500, point_marker_size=8, interactive=True): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape DxHxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor, Tensor)): x and y and z point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' import trimesh import vtkplotter from skimage import measure vp = vtkplotter.Plotter(title=title, size=(figsize, figsize)) vis_list = [] if (mask is not None): mask = mask.detach().to('cpu').numpy() mask = mask.transpose(2, 1, 0) (verts, faces, normals, values) = measure.marching_cubes_lewiner(mask, 0.5, gradient_direction='ascent') mesh = trimesh.Trimesh(verts, faces) mesh.visual.face_colors = [200, 200, 250, 100] vis_list.append(mesh) if (point_coords is not None): point_coords = torch.stack(point_coords, 1).to('cpu').numpy() pc = vtkplotter.Points(point_coords, r=point_marker_size, c='red') vis_list.append(pc) vp.show(*vis_list, bg='white', axes=1, interactive=interactive, azimuth=30, elevation=30)
def plot_mask3D(mask=None, title=, point_coords=None, figsize=1500, point_marker_size=8, interactive=True): '\n Simple plotting tool to show intermediate mask predictions and points \n where PointRend is applied.\n\n Args:\n mask (Tensor): mask prediction of shape DxHxW\n title (str): title for the plot\n point_coords ((Tensor, Tensor, Tensor)): x and y and z point coordinates\n figsize (int): size of the figure to plot\n point_marker_size (int): marker size for points\n ' import trimesh import vtkplotter from skimage import measure vp = vtkplotter.Plotter(title=title, size=(figsize, figsize)) vis_list = [] if (mask is not None): mask = mask.detach().to('cpu').numpy() mask = mask.transpose(2, 1, 0) (verts, faces, normals, values) = measure.marching_cubes_lewiner(mask, 0.5, gradient_direction='ascent') mesh = trimesh.Trimesh(verts, faces) mesh.visual.face_colors = [200, 200, 250, 100] vis_list.append(mesh) if (point_coords is not None): point_coords = torch.stack(point_coords, 1).to('cpu').numpy() pc = vtkplotter.Points(point_coords, r=point_marker_size, c='red') vis_list.append(pc) vp.show(*vis_list, bg='white', axes=1, interactive=interactive, azimuth=30, elevation=30)<|docstring|>Simple plotting tool to show intermediate mask predictions and points where PointRend is applied. Args: mask (Tensor): mask prediction of shape DxHxW title (str): title for the plot point_coords ((Tensor, Tensor, Tensor)): x and y and z point coordinates figsize (int): size of the figure to plot point_marker_size (int): marker size for points<|endoftext|>
3a7bdd61877701ad403678027c3b8ebe8db9e0bf2a5a1a43024138b091b16470
def get_uncertain_point_coords_on_grid3D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape num_points = min(((D * H) * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, ((D * H) * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) print(f'resolution {D} x {H} x {W}', point_scores.min(), point_scores.max()) return (point_indices, point_coords)
Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty values for a set of points on a regular H x W x D grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W x D) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W x D grid.
lib/common/seg3d_utils.py
get_uncertain_point_coords_on_grid3D
YuliangXiu/ICON
486
python
def get_uncertain_point_coords_on_grid3D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape num_points = min(((D * H) * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, ((D * H) * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) print(f'resolution {D} x {H} x {W}', point_scores.min(), point_scores.max()) return (point_indices, point_coords)
def get_uncertain_point_coords_on_grid3D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape num_points = min(((D * H) * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, ((D * H) * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) print(f'resolution {D} x {H} x {W}', point_scores.min(), point_scores.max()) return (point_indices, point_coords)<|docstring|>Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty values for a set of points on a regular H x W x D grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W x D) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W x D grid.<|endoftext|>
19af53157d890c385aa7d8448892ab2456174d87a35c831aee66bc9aeca6511c
def get_uncertain_point_coords_on_grid3D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view(((D * H) * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) return (point_indices, point_coords)
Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty values for a set of points on a regular H x W x D grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W x D) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W x D grid.
lib/common/seg3d_utils.py
get_uncertain_point_coords_on_grid3D_faster
YuliangXiu/ICON
486
python
def get_uncertain_point_coords_on_grid3D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view(((D * H) * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) return (point_indices, point_coords)
def get_uncertain_point_coords_on_grid3D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty\n values for a set of points on a regular H x W x D grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W x D) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W x D grid.\n ' (R, _, D, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view(((D * H) * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 3, dtype=torch.float, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.float) point_coords[(:, :, 1)] = ((point_indices % (H * W)) // W).to(torch.float) point_coords[(:, :, 2)] = (point_indices // (H * W)).to(torch.float) return (point_indices, point_coords)<|docstring|>Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W, D) that contains uncertainty values for a set of points on a regular H x W x D grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W x D) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 3) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W x D grid.<|endoftext|>
4172fb207fdf959f3413f1c6aa1e5e99fd63dddf72ee5aaa463f9d0fd5c4d535
def get_uncertain_point_coords_on_grid2D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape num_points = min((H * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, (H * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)
Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty values for a set of points on a regular H x W grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W grid.
lib/common/seg3d_utils.py
get_uncertain_point_coords_on_grid2D
YuliangXiu/ICON
486
python
def get_uncertain_point_coords_on_grid2D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape num_points = min((H * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, (H * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)
def get_uncertain_point_coords_on_grid2D(uncertainty_map, num_points, **kwargs): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape num_points = min((H * W), num_points) (point_scores, point_indices) = torch.topk(uncertainty_map.view(R, (H * W)), k=num_points, dim=1) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)<|docstring|>Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty values for a set of points on a regular H x W grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W grid.<|endoftext|>
f74019e69936b712bec66b71da574b94d6eef383ddf22fc99efd64b876438db2
def get_uncertain_point_coords_on_grid2D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view((H * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)
Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty values for a set of points on a regular H x W grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W grid.
lib/common/seg3d_utils.py
get_uncertain_point_coords_on_grid2D_faster
YuliangXiu/ICON
486
python
def get_uncertain_point_coords_on_grid2D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view((H * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)
def get_uncertain_point_coords_on_grid2D_faster(uncertainty_map, num_points, clip_min): '\n Find `num_points` most uncertain points from `uncertainty_map` grid.\n Args:\n uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty\n values for a set of points on a regular H x W grid.\n num_points (int): The number of points P to select.\n Returns:\n point_indices (Tensor): A tensor of shape (N, P) that contains indices from\n [0, H x W) of the most uncertain points.\n point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized\n coordinates of the most uncertain points from the H x W grid.\n ' (R, _, H, W) = uncertainty_map.shape assert (R == 1), 'batchsize > 1 is not implemented!' uncertainty_map = uncertainty_map.view((H * W)) indices = (uncertainty_map >= clip_min).nonzero().squeeze(1) num_points = min(num_points, indices.size(0)) (point_scores, point_indices) = torch.topk(uncertainty_map[indices], k=num_points, dim=0) point_indices = indices[point_indices].unsqueeze(0) point_coords = torch.zeros(R, num_points, 2, dtype=torch.long, device=uncertainty_map.device) point_coords[(:, :, 0)] = (point_indices % W).to(torch.long) point_coords[(:, :, 1)] = (point_indices // W).to(torch.long) return (point_indices, point_coords)<|docstring|>Find `num_points` most uncertain points from `uncertainty_map` grid. Args: uncertainty_map (Tensor): A tensor of shape (N, 1, H, W) that contains uncertainty values for a set of points on a regular H x W grid. num_points (int): The number of points P to select. Returns: point_indices (Tensor): A tensor of shape (N, P) that contains indices from [0, H x W) of the most uncertain points. point_coords (Tensor): A tensor of shape (N, P, 2) that contains [0, 1] x [0, 1] normalized coordinates of the most uncertain points from the H x W grid.<|endoftext|>
68964b0922ae2b939898a0a50c092071edde6fc005adcc8c79f28a7070b16705
def calculate_uncertainty(logits, classes=None, balance_value=0.5): "\n We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the\n foreground class in `classes`.\n Args:\n logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or\n class-agnostic, where R is the total number of predicted masks in all images and C is\n the number of foreground classes. The values are logits.\n classes (list): A list of length R that contains either predicted of ground truth class\n for eash predicted mask.\n Returns:\n scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with\n the most uncertain locations having the highest uncertainty score.\n " if (logits.shape[1] == 1): gt_class_logits = logits else: gt_class_logits = logits[(torch.arange(logits.shape[0], device=logits.device), classes)].unsqueeze(1) return (- torch.abs((gt_class_logits - balance_value)))
We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the foreground class in `classes`. Args: logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or class-agnostic, where R is the total number of predicted masks in all images and C is the number of foreground classes. The values are logits. classes (list): A list of length R that contains either predicted of ground truth class for eash predicted mask. Returns: scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with the most uncertain locations having the highest uncertainty score.
lib/common/seg3d_utils.py
calculate_uncertainty
YuliangXiu/ICON
486
python
def calculate_uncertainty(logits, classes=None, balance_value=0.5): "\n We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the\n foreground class in `classes`.\n Args:\n logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or\n class-agnostic, where R is the total number of predicted masks in all images and C is\n the number of foreground classes. The values are logits.\n classes (list): A list of length R that contains either predicted of ground truth class\n for eash predicted mask.\n Returns:\n scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with\n the most uncertain locations having the highest uncertainty score.\n " if (logits.shape[1] == 1): gt_class_logits = logits else: gt_class_logits = logits[(torch.arange(logits.shape[0], device=logits.device), classes)].unsqueeze(1) return (- torch.abs((gt_class_logits - balance_value)))
def calculate_uncertainty(logits, classes=None, balance_value=0.5): "\n We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the\n foreground class in `classes`.\n Args:\n logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or\n class-agnostic, where R is the total number of predicted masks in all images and C is\n the number of foreground classes. The values are logits.\n classes (list): A list of length R that contains either predicted of ground truth class\n for eash predicted mask.\n Returns:\n scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with\n the most uncertain locations having the highest uncertainty score.\n " if (logits.shape[1] == 1): gt_class_logits = logits else: gt_class_logits = logits[(torch.arange(logits.shape[0], device=logits.device), classes)].unsqueeze(1) return (- torch.abs((gt_class_logits - balance_value)))<|docstring|>We estimate uncerainty as L1 distance between 0.0 and the logit prediction in 'logits' for the foreground class in `classes`. Args: logits (Tensor): A tensor of shape (R, C, ...) or (R, 1, ...) for class-specific or class-agnostic, where R is the total number of predicted masks in all images and C is the number of foreground classes. The values are logits. classes (list): A list of length R that contains either predicted of ground truth class for eash predicted mask. Returns: scores (Tensor): A tensor of shape (R, 1, ...) that contains uncertainty scores with the most uncertain locations having the highest uncertainty score.<|endoftext|>
efacb71edd1b97e6686e4d4badd880645075934289d68c5dd1ef8e31b631534e
def parse(file, sink): 'Parse an RCS file.\n\n Parameters: FILE is the binary input stream object to parse. (I.e.\n an instance of the subclass of the io.IOBase class which has read()\n function returns bytes object, usually created using Python\'s built-in\n "open()" function). It should be opened in binary mode.\n SINK is an instance of (some subclass of) Sink. It\'s methods will be\n called as the file is parsed; see the definition of Sink for the\n details.\n ' return Parser().parse(file, sink)
Parse an RCS file. Parameters: FILE is the binary input stream object to parse. (I.e. an instance of the subclass of the io.IOBase class which has read() function returns bytes object, usually created using Python's built-in "open()" function). It should be opened in binary mode. SINK is an instance of (some subclass of) Sink. It's methods will be called as the file is parsed; see the definition of Sink for the details.
lib/vclib/ccvs/rcsparse/__init__.py
parse
rpluem/viewvc
294
python
def parse(file, sink): 'Parse an RCS file.\n\n Parameters: FILE is the binary input stream object to parse. (I.e.\n an instance of the subclass of the io.IOBase class which has read()\n function returns bytes object, usually created using Python\'s built-in\n "open()" function). It should be opened in binary mode.\n SINK is an instance of (some subclass of) Sink. It\'s methods will be\n called as the file is parsed; see the definition of Sink for the\n details.\n ' return Parser().parse(file, sink)
def parse(file, sink): 'Parse an RCS file.\n\n Parameters: FILE is the binary input stream object to parse. (I.e.\n an instance of the subclass of the io.IOBase class which has read()\n function returns bytes object, usually created using Python\'s built-in\n "open()" function). It should be opened in binary mode.\n SINK is an instance of (some subclass of) Sink. It\'s methods will be\n called as the file is parsed; see the definition of Sink for the\n details.\n ' return Parser().parse(file, sink)<|docstring|>Parse an RCS file. Parameters: FILE is the binary input stream object to parse. (I.e. an instance of the subclass of the io.IOBase class which has read() function returns bytes object, usually created using Python's built-in "open()" function). It should be opened in binary mode. SINK is an instance of (some subclass of) Sink. It's methods will be called as the file is parsed; see the definition of Sink for the details.<|endoftext|>
51d96e4bfcc283cd4c7ff1fe97fd32b0bdc922c65f9a5466304fe22530627c76
def strip_unneeded(text: str) -> str: 'Get rid of unneeded characters in text and return it.\n ' text = text.strip().replace('+', '').replace(',', '').replace(' ', '') if (not text): text = '0' return text
Get rid of unneeded characters in text and return it.
coronav.py
strip_unneeded
z33kz33k/coronav
0
python
def strip_unneeded(text: str) -> str: '\n ' text = text.strip().replace('+', ).replace(',', ).replace(' ', ) if (not text): text = '0' return text
def strip_unneeded(text: str) -> str: '\n ' text = text.strip().replace('+', ).replace(',', ).replace(' ', ) if (not text): text = '0' return text<|docstring|>Get rid of unneeded characters in text and return it.<|endoftext|>
bf804339e2f6c8e0acd3609a03d86da5b24b1ad5755a0da723fc317e05b4ead8
def make_datarow(data: List[str]) -> DataRow: 'Make a DataRow object from list of scraped data.\n ' return DataRow(data[0], int(strip_unneeded(data[1])), int(strip_unneeded(data[2])), int(strip_unneeded(data[3])), int(strip_unneeded(data[4])), int(strip_unneeded(data[5])), int(strip_unneeded(data[6])), int(strip_unneeded(data[7])), float(strip_unneeded(data[8])), float(strip_unneeded(data[9])))
Make a DataRow object from list of scraped data.
coronav.py
make_datarow
z33kz33k/coronav
0
python
def make_datarow(data: List[str]) -> DataRow: '\n ' return DataRow(data[0], int(strip_unneeded(data[1])), int(strip_unneeded(data[2])), int(strip_unneeded(data[3])), int(strip_unneeded(data[4])), int(strip_unneeded(data[5])), int(strip_unneeded(data[6])), int(strip_unneeded(data[7])), float(strip_unneeded(data[8])), float(strip_unneeded(data[9])))
def make_datarow(data: List[str]) -> DataRow: '\n ' return DataRow(data[0], int(strip_unneeded(data[1])), int(strip_unneeded(data[2])), int(strip_unneeded(data[3])), int(strip_unneeded(data[4])), int(strip_unneeded(data[5])), int(strip_unneeded(data[6])), int(strip_unneeded(data[7])), float(strip_unneeded(data[8])), float(strip_unneeded(data[9])))<|docstring|>Make a DataRow object from list of scraped data.<|endoftext|>
ec1593256b4e179ccb8f7e8e6068641bdd56b93a6235fa3d3b56336f1d2858ac
@property def hot_index(self) -> float: 'Healthcare On Top Index - that is total cases / total deaths\n ' return ((self.total_cases / self.total_deaths) if (self.total_deaths != 0) else 0)
Healthcare On Top Index - that is total cases / total deaths
coronav.py
hot_index
z33kz33k/coronav
0
python
@property def hot_index(self) -> float: '\n ' return ((self.total_cases / self.total_deaths) if (self.total_deaths != 0) else 0)
@property def hot_index(self) -> float: '\n ' return ((self.total_cases / self.total_deaths) if (self.total_deaths != 0) else 0)<|docstring|>Healthcare On Top Index - that is total cases / total deaths<|endoftext|>
5369b976f21e819ce9cb9a98435a3d0bf1bcbe8dccd2375ea0f429af7fe22ff0
@property def hot_index_str(self) -> str: 'HOT index string-formatted.\n ' return f'{self.hot_index:.1f}'
HOT index string-formatted.
coronav.py
hot_index_str
z33kz33k/coronav
0
python
@property def hot_index_str(self) -> str: '\n ' return f'{self.hot_index:.1f}'
@property def hot_index_str(self) -> str: '\n ' return f'{self.hot_index:.1f}'<|docstring|>HOT index string-formatted.<|endoftext|>
87105ab89a464183552037a7de251d2f561fa6ac92af5d7d5f565fbe63bddaaa
def future_date_validator(date: datetime.date) -> None: "Raise ValidationError if the date isn't a future date." if (date < datetime.datetime.now(datetime.timezone.utc)): raise ValidationError('Date must be a future date')
Raise ValidationError if the date isn't a future date.
pydis_site/apps/api/models/bot/offensive_message.py
future_date_validator
mathstrains21/site
700
python
def future_date_validator(date: datetime.date) -> None: if (date < datetime.datetime.now(datetime.timezone.utc)): raise ValidationError('Date must be a future date')
def future_date_validator(date: datetime.date) -> None: if (date < datetime.datetime.now(datetime.timezone.utc)): raise ValidationError('Date must be a future date')<|docstring|>Raise ValidationError if the date isn't a future date.<|endoftext|>
e469372138ed55ba63f56f1015a1e6829b9b8d07627f956cc708217346760dc8
def __str__(self): 'Return some info on this message, for display purposes only.' return f'Message {self.id}, will be deleted at {self.delete_date}'
Return some info on this message, for display purposes only.
pydis_site/apps/api/models/bot/offensive_message.py
__str__
mathstrains21/site
700
python
def __str__(self): return f'Message {self.id}, will be deleted at {self.delete_date}'
def __str__(self): return f'Message {self.id}, will be deleted at {self.delete_date}'<|docstring|>Return some info on this message, for display purposes only.<|endoftext|>
b79ff94e875cc570ddea216a7703291b32f2a4aa481d3e5a79915c440d10042d
def read_fits(fname, drop_separators=False): 'Load SNANA formatted data and cast it to a PANDAS dataframe\n\n Args:\n fname (str): path + name to PHOT.FITS file\n drop_separators (Boolean): if -777 are to be dropped\n\n Returns:\n (pandas.DataFrame) dataframe from PHOT.FITS file (with ID)\n (pandas.DataFrame) dataframe from HEAD.FITS file\n ' dat = Table.read(fname, format='fits') df_phot = dat.to_pandas() if (df_phot.MJD.values[(- 1)] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[(- 1)]) if (df_phot.MJD.values[0] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[0]) metadata_hdu = fits.open(fname.replace('PHOT', 'HEAD')) survey_name = metadata_hdu[0].header['SURVEY'] header = Table.read(fname.replace('PHOT', 'HEAD'), format='fits') df_header = header.to_pandas() df_header['SNID'] = df_header['SNID'].astype(np.int32) arr_ID = np.zeros(len(df_phot), dtype=np.int32) arr_idx = np.where((df_phot['MJD'].values == (- 777.0)))[0] arr_idx = np.hstack((np.array([0]), arr_idx, np.array([len(df_phot)]))) for counter in range(1, len(arr_idx)): (start, end) = (arr_idx[(counter - 1)], arr_idx[counter]) arr_ID[start:end] = df_header.SNID.iloc[(counter - 1)] df_phot['SNID'] = arr_ID if drop_separators: df_phot = df_phot[(df_phot.MJD != (- 777.0))] band_colname = ('FLT' if ('FLT' in df_phot.columns) else 'BAND') df_header = df_header[['SNID', 'SNTYPE', 'PEAKMJD', 'REDSHIFT_FINAL', 'MWEBV']] df_phot = df_phot[['SNID', 'MJD', band_colname, 'FLUXCAL', 'FLUXCALERR']] df_header = df_header.rename(columns={'SNID': 'object_id', 'SNTYPE': 'true_target', 'PEAKMJD': 'true_peakmjd', 'REDSHIFT_FINAL': 'true_z', 'MWEBV': 'mwebv'}) if ('true_target' in df_header.columns): print('has true target: {}'.format(df_header.true_target)) df_header.replace({'true_target': {120: 42, 20: 42, 121: 42, 21: 42, 122: 42, 22: 42, 130: 62, 30: 62, 131: 62, 31: 62, 101: 90, 1: 90, 102: 52, 2: 52, 104: 64, 4: 64, 103: 95, 3: 95, 191: 67, 91: 67}}, inplace=True) else: df_header['true_target'] = (len(df_header) * [0]) df_phot = df_phot.rename(columns={'SNID': 'object_id', 'MJD': 'mjd', band_colname: 'passband', 'FLUXCAL': 'flux', 'FLUXCALERR': 'flux_err'}) print(f"FOR PHIL: in SNANA FITS to csv: {np.unique(df_header['true_target'].values)}") return (df_header, df_phot, survey_name, np.unique(df_header['true_target'].values))
Load SNANA formatted data and cast it to a PANDAS dataframe Args: fname (str): path + name to PHOT.FITS file drop_separators (Boolean): if -777 are to be dropped Returns: (pandas.DataFrame) dataframe from PHOT.FITS file (with ID) (pandas.DataFrame) dataframe from HEAD.FITS file
pippin/external/SNANA_FITS_to_pd.py
read_fits
Samreay/Pippin
5
python
def read_fits(fname, drop_separators=False): 'Load SNANA formatted data and cast it to a PANDAS dataframe\n\n Args:\n fname (str): path + name to PHOT.FITS file\n drop_separators (Boolean): if -777 are to be dropped\n\n Returns:\n (pandas.DataFrame) dataframe from PHOT.FITS file (with ID)\n (pandas.DataFrame) dataframe from HEAD.FITS file\n ' dat = Table.read(fname, format='fits') df_phot = dat.to_pandas() if (df_phot.MJD.values[(- 1)] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[(- 1)]) if (df_phot.MJD.values[0] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[0]) metadata_hdu = fits.open(fname.replace('PHOT', 'HEAD')) survey_name = metadata_hdu[0].header['SURVEY'] header = Table.read(fname.replace('PHOT', 'HEAD'), format='fits') df_header = header.to_pandas() df_header['SNID'] = df_header['SNID'].astype(np.int32) arr_ID = np.zeros(len(df_phot), dtype=np.int32) arr_idx = np.where((df_phot['MJD'].values == (- 777.0)))[0] arr_idx = np.hstack((np.array([0]), arr_idx, np.array([len(df_phot)]))) for counter in range(1, len(arr_idx)): (start, end) = (arr_idx[(counter - 1)], arr_idx[counter]) arr_ID[start:end] = df_header.SNID.iloc[(counter - 1)] df_phot['SNID'] = arr_ID if drop_separators: df_phot = df_phot[(df_phot.MJD != (- 777.0))] band_colname = ('FLT' if ('FLT' in df_phot.columns) else 'BAND') df_header = df_header[['SNID', 'SNTYPE', 'PEAKMJD', 'REDSHIFT_FINAL', 'MWEBV']] df_phot = df_phot[['SNID', 'MJD', band_colname, 'FLUXCAL', 'FLUXCALERR']] df_header = df_header.rename(columns={'SNID': 'object_id', 'SNTYPE': 'true_target', 'PEAKMJD': 'true_peakmjd', 'REDSHIFT_FINAL': 'true_z', 'MWEBV': 'mwebv'}) if ('true_target' in df_header.columns): print('has true target: {}'.format(df_header.true_target)) df_header.replace({'true_target': {120: 42, 20: 42, 121: 42, 21: 42, 122: 42, 22: 42, 130: 62, 30: 62, 131: 62, 31: 62, 101: 90, 1: 90, 102: 52, 2: 52, 104: 64, 4: 64, 103: 95, 3: 95, 191: 67, 91: 67}}, inplace=True) else: df_header['true_target'] = (len(df_header) * [0]) df_phot = df_phot.rename(columns={'SNID': 'object_id', 'MJD': 'mjd', band_colname: 'passband', 'FLUXCAL': 'flux', 'FLUXCALERR': 'flux_err'}) print(f"FOR PHIL: in SNANA FITS to csv: {np.unique(df_header['true_target'].values)}") return (df_header, df_phot, survey_name, np.unique(df_header['true_target'].values))
def read_fits(fname, drop_separators=False): 'Load SNANA formatted data and cast it to a PANDAS dataframe\n\n Args:\n fname (str): path + name to PHOT.FITS file\n drop_separators (Boolean): if -777 are to be dropped\n\n Returns:\n (pandas.DataFrame) dataframe from PHOT.FITS file (with ID)\n (pandas.DataFrame) dataframe from HEAD.FITS file\n ' dat = Table.read(fname, format='fits') df_phot = dat.to_pandas() if (df_phot.MJD.values[(- 1)] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[(- 1)]) if (df_phot.MJD.values[0] == (- 777.0)): df_phot = df_phot.drop(df_phot.index[0]) metadata_hdu = fits.open(fname.replace('PHOT', 'HEAD')) survey_name = metadata_hdu[0].header['SURVEY'] header = Table.read(fname.replace('PHOT', 'HEAD'), format='fits') df_header = header.to_pandas() df_header['SNID'] = df_header['SNID'].astype(np.int32) arr_ID = np.zeros(len(df_phot), dtype=np.int32) arr_idx = np.where((df_phot['MJD'].values == (- 777.0)))[0] arr_idx = np.hstack((np.array([0]), arr_idx, np.array([len(df_phot)]))) for counter in range(1, len(arr_idx)): (start, end) = (arr_idx[(counter - 1)], arr_idx[counter]) arr_ID[start:end] = df_header.SNID.iloc[(counter - 1)] df_phot['SNID'] = arr_ID if drop_separators: df_phot = df_phot[(df_phot.MJD != (- 777.0))] band_colname = ('FLT' if ('FLT' in df_phot.columns) else 'BAND') df_header = df_header[['SNID', 'SNTYPE', 'PEAKMJD', 'REDSHIFT_FINAL', 'MWEBV']] df_phot = df_phot[['SNID', 'MJD', band_colname, 'FLUXCAL', 'FLUXCALERR']] df_header = df_header.rename(columns={'SNID': 'object_id', 'SNTYPE': 'true_target', 'PEAKMJD': 'true_peakmjd', 'REDSHIFT_FINAL': 'true_z', 'MWEBV': 'mwebv'}) if ('true_target' in df_header.columns): print('has true target: {}'.format(df_header.true_target)) df_header.replace({'true_target': {120: 42, 20: 42, 121: 42, 21: 42, 122: 42, 22: 42, 130: 62, 30: 62, 131: 62, 31: 62, 101: 90, 1: 90, 102: 52, 2: 52, 104: 64, 4: 64, 103: 95, 3: 95, 191: 67, 91: 67}}, inplace=True) else: df_header['true_target'] = (len(df_header) * [0]) df_phot = df_phot.rename(columns={'SNID': 'object_id', 'MJD': 'mjd', band_colname: 'passband', 'FLUXCAL': 'flux', 'FLUXCALERR': 'flux_err'}) print(f"FOR PHIL: in SNANA FITS to csv: {np.unique(df_header['true_target'].values)}") return (df_header, df_phot, survey_name, np.unique(df_header['true_target'].values))<|docstring|>Load SNANA formatted data and cast it to a PANDAS dataframe Args: fname (str): path + name to PHOT.FITS file drop_separators (Boolean): if -777 are to be dropped Returns: (pandas.DataFrame) dataframe from PHOT.FITS file (with ID) (pandas.DataFrame) dataframe from HEAD.FITS file<|endoftext|>
5eb5e44f395242eb5fa4e3dac965244fb992f144d5266d8cf67421a8d9d0d384
def save_fits(df, fname): 'Save data frame in fits table\n\n Arguments:\n df {pandas.DataFrame} -- data to save\n fname {str} -- outname, must end in .FITS\n ' keep_cols = df.keys() df = df.reset_index() df = df[keep_cols] outtable = Table.from_pandas(df) Path(fname).parent.mkdir(parents=True, exist_ok=True) outtable.write(fname, format='fits', overwrite=True)
Save data frame in fits table Arguments: df {pandas.DataFrame} -- data to save fname {str} -- outname, must end in .FITS
pippin/external/SNANA_FITS_to_pd.py
save_fits
Samreay/Pippin
5
python
def save_fits(df, fname): 'Save data frame in fits table\n\n Arguments:\n df {pandas.DataFrame} -- data to save\n fname {str} -- outname, must end in .FITS\n ' keep_cols = df.keys() df = df.reset_index() df = df[keep_cols] outtable = Table.from_pandas(df) Path(fname).parent.mkdir(parents=True, exist_ok=True) outtable.write(fname, format='fits', overwrite=True)
def save_fits(df, fname): 'Save data frame in fits table\n\n Arguments:\n df {pandas.DataFrame} -- data to save\n fname {str} -- outname, must end in .FITS\n ' keep_cols = df.keys() df = df.reset_index() df = df[keep_cols] outtable = Table.from_pandas(df) Path(fname).parent.mkdir(parents=True, exist_ok=True) outtable.write(fname, format='fits', overwrite=True)<|docstring|>Save data frame in fits table Arguments: df {pandas.DataFrame} -- data to save fname {str} -- outname, must end in .FITS<|endoftext|>
e16a78f618300ec60704e7aedb18f7407ba0b36ed973dc6ae9b6f79432b341f0
@click.command() @click.option('-a', '--authority', help='Authority to filter by') @click.option('-e', '--entity', help='Entity to filter by') @click.option('-s', '--source', help='Source to filter by') @handle_cli_exceptions @command_with_output('sort_by(schemaInfos[].schemaIdentity[].{id:id,Authority:authority,Source:source,Entity:entityType,MajorVersion:schemaVersionMajor,MinorVersion:schemaVersionMinor,PatchVersion:schemaVersionPatch},&id)') def _click_command(state: State, authority: str=None, entity: str=None, source: str=None): 'List schemas' return schema_list(state, authority, entity, source)
List schemas
src/osducli/commands/schema/list.py
_click_command
equinor/osdu-cli
3
python
@click.command() @click.option('-a', '--authority', help='Authority to filter by') @click.option('-e', '--entity', help='Entity to filter by') @click.option('-s', '--source', help='Source to filter by') @handle_cli_exceptions @command_with_output('sort_by(schemaInfos[].schemaIdentity[].{id:id,Authority:authority,Source:source,Entity:entityType,MajorVersion:schemaVersionMajor,MinorVersion:schemaVersionMinor,PatchVersion:schemaVersionPatch},&id)') def _click_command(state: State, authority: str=None, entity: str=None, source: str=None): return schema_list(state, authority, entity, source)
@click.command() @click.option('-a', '--authority', help='Authority to filter by') @click.option('-e', '--entity', help='Entity to filter by') @click.option('-s', '--source', help='Source to filter by') @handle_cli_exceptions @command_with_output('sort_by(schemaInfos[].schemaIdentity[].{id:id,Authority:authority,Source:source,Entity:entityType,MajorVersion:schemaVersionMajor,MinorVersion:schemaVersionMinor,PatchVersion:schemaVersionPatch},&id)') def _click_command(state: State, authority: str=None, entity: str=None, source: str=None): return schema_list(state, authority, entity, source)<|docstring|>List schemas<|endoftext|>
85361a8456116875edcbdf5d083ce1ef2bb13960fa2a53dc2c3f3e62142d6e35
def schema_list(state: State, authority: str, entity: str, source: str): 'List schemas\n\n Args:\n state (State): Global state\n authority (str): Global state\n entity (str): Global state\n source (str): Global state\n ' connection = CliOsduClient(state.config) url = 'schema?limit=10000' if authority: url += ('&authority=' + quote_plus(authority)) if entity: url += ('&entity=' + quote_plus(entity)) if source: url += ('&source=' + quote_plus(source)) json = connection.cli_get_returning_json(CONFIG_SCHEMA_URL, url) return json
List schemas Args: state (State): Global state authority (str): Global state entity (str): Global state source (str): Global state
src/osducli/commands/schema/list.py
schema_list
equinor/osdu-cli
3
python
def schema_list(state: State, authority: str, entity: str, source: str): 'List schemas\n\n Args:\n state (State): Global state\n authority (str): Global state\n entity (str): Global state\n source (str): Global state\n ' connection = CliOsduClient(state.config) url = 'schema?limit=10000' if authority: url += ('&authority=' + quote_plus(authority)) if entity: url += ('&entity=' + quote_plus(entity)) if source: url += ('&source=' + quote_plus(source)) json = connection.cli_get_returning_json(CONFIG_SCHEMA_URL, url) return json
def schema_list(state: State, authority: str, entity: str, source: str): 'List schemas\n\n Args:\n state (State): Global state\n authority (str): Global state\n entity (str): Global state\n source (str): Global state\n ' connection = CliOsduClient(state.config) url = 'schema?limit=10000' if authority: url += ('&authority=' + quote_plus(authority)) if entity: url += ('&entity=' + quote_plus(entity)) if source: url += ('&source=' + quote_plus(source)) json = connection.cli_get_returning_json(CONFIG_SCHEMA_URL, url) return json<|docstring|>List schemas Args: state (State): Global state authority (str): Global state entity (str): Global state source (str): Global state<|endoftext|>
97d5e425bf652d5c4a821fb929d3c16158634e668affcda899eb1963ff05f2ed
def run(self, tmp=None, task_vars=None, *args, **kw): '.' if (task_vars is None): task_vars = dict() self._supports_check_mode = True self._supports_async = True result = super(ActionModule, self).run(tmp, task_vars) module = self._task.args.get('use', 'auto').lower() if (module == 'auto'): try: if self._task.delegate_to: module = self._templar.template(("{{hostvars['%s']['ansible_service_mgr']}}" % self._task.delegate_to)) else: module = self._templar.template('{{ansible_service_mgr}}') except Exception: pass if ((module == 'auto') or (module != 'systemd')): facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars) self._display.debug(('Facts %s' % facts)) if (('ansible_facts' in facts) and ('ansible_service_mgr' in facts['ansible_facts'])): module = facts['ansible_facts']['ansible_service_mgr'] if (module != 'systemd'): distrib = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_distribution*'), task_vars=task_vars).get('ansible_facts', {}) dv = distrib.get('ansible_distribution_version', '').lower() ldv = LooseVersion(dv) osd = distrib.get('ansible_distribution', '').lower() if (osd == 'ubuntu'): if (ldv >= LooseVersion('15.04')): module = 'systemd' elif (osd in ['linuxmint', 'mint']): if (ldv >= LooseVersion('18.00')): module = 'systemd' elif (osd in ['arch', 'archlinux']): module = 'systemd' elif (osd in ['fedora']): if (ldv >= LooseVersion('20')): module = 'systemd' elif (osd in ['redhat', 'red-hat']): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'centos'): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'debian'): if (ldv >= LooseVersion('8')): module = 'systemd' if ((not module) or (module == 'auto') or (module not in self._shared_loader_obj.module_loader)): module = 'service' if (module != 'auto'): new_module_args = self._task.args.copy() if (module == 'systemd'): margs = (self._task, self._connection, self._play_context, self._loader, self._templar, self._shared_loader_obj) self._ah = self._shared_loader_obj.action_loader.get('cops_actionhelper', *margs) self._ah.set_forwarded_task_vars(task_vars) ret1 = self._ah.exec_command('systemctl --no-pager is-system-running') ret2 = self._ah.exec_command('systemctl --no-pager --version 2>&1') if (('offline' in ret1['stdout']) or DBUS_RE.search(ret1['stderr']) or ((ret2['rc'] == 0) and ('systemd' not in ret2['stdout'].lower()))): module = 'cops_systemd' if ('use' in new_module_args): del new_module_args['use'] if ('state' in new_module_args): if (new_module_args['state'] == 'loaded'): new_module_args['state'] = 'reloaded' if (new_module_args['state'] == 'running'): new_module_args['state'] = 'started' if (module in self.UNUSED_PARAMS): for unused in self.UNUSED_PARAMS[module]: if (unused in new_module_args): del new_module_args[unused] self._display.warning(('Ignoring "%s" as it is not used in "%s"' % (unused, module))) self._display.vvvv(('Running %s' % module)) result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)) else: result['failed'] = True result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.' return result
.
service/action_plugins/cops_service.py
run
corpusops/roles
0
python
def run(self, tmp=None, task_vars=None, *args, **kw): if (task_vars is None): task_vars = dict() self_supports_check_mode = True self_supports_async = True result = super(ActionModule, self)run(tmp, task_vars) module = self_taskargsget('use', 'auto')lower() if (module == 'auto'): try: if self_taskdelegate_to: module = self_templartemplate(("{{hostvars['%s']['ansible_service_mgr']}}" % self_taskdelegate_to)) else: module = self_templartemplate('{{ansible_service_mgr}}') except Exception: pass if ((module == 'auto') or (module != 'systemd')): facts = self_execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars) self_displaydebug(('Facts %s' % facts)) if (('ansible_facts' in facts) and ('ansible_service_mgr' in facts['ansible_facts'])): module = facts['ansible_facts']['ansible_service_mgr'] if (module != 'systemd'): distrib = self_execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_distribution*'), task_vars=task_vars)get('ansible_facts', {}) dv = distribget('ansible_distribution_version', )lower() ldv = LooseVersion(dv) osd = distribget('ansible_distribution', )lower() if (osd == 'ubuntu'): if (ldv >= LooseVersion('1504')): module = 'systemd' elif (osd in ['linuxmint', 'mint']): if (ldv >= LooseVersion('1800')): module = 'systemd' elif (osd in ['arch', 'archlinux']): module = 'systemd' elif (osd in ['fedora']): if (ldv >= LooseVersion('20')): module = 'systemd' elif (osd in ['redhat', 'red-hat']): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'centos'): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'debian'): if (ldv >= LooseVersion('8')): module = 'systemd' if ((not module) or (module == 'auto') or (module not in self_shared_loader_objmodule_loader)): module = 'service' if (module != 'auto'): new_module_args = self_taskargscopy() if (module == 'systemd'): margs = (self_task, self_connection, self_play_context, self_loader, self_templar, self_shared_loader_obj) self_ah = self_shared_loader_objaction_loaderget('cops_actionhelper', *margs) self_ahset_forwarded_task_vars(task_vars) ret1 = self_ahexec_command('systemctl --no-pager is-system-running') ret2 = self_ahexec_command('systemctl --no-pager --version 2>&1') if (('offline' in ret1['stdout']) or DBUS_REsearch(ret1['stderr']) or ((ret2['rc'] == 0) and ('systemd' not in ret2['stdout']lower()))): module = 'cops_systemd' if ('use' in new_module_args): del new_module_args['use'] if ('state' in new_module_args): if (new_module_args['state'] == 'loaded'): new_module_args['state'] = 'reloaded' if (new_module_args['state'] == 'running'): new_module_args['state'] = 'started' if (module in selfUNUSED_PARAMS): for unused in selfUNUSED_PARAMS[module]: if (unused in new_module_args): del new_module_args[unused] self_displaywarning(('Ignoring "%s" as it is not used in "%s"' % (unused, module))) self_displayvvvv(('Running %s' % module)) resultupdate(self_execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)) else: result['failed'] = True result['msg'] = 'Could not detect which service manager to use Try gathering facts or setting the "use" option' return result
def run(self, tmp=None, task_vars=None, *args, **kw): if (task_vars is None): task_vars = dict() self_supports_check_mode = True self_supports_async = True result = super(ActionModule, self)run(tmp, task_vars) module = self_taskargsget('use', 'auto')lower() if (module == 'auto'): try: if self_taskdelegate_to: module = self_templartemplate(("{{hostvars['%s']['ansible_service_mgr']}}" % self_taskdelegate_to)) else: module = self_templartemplate('{{ansible_service_mgr}}') except Exception: pass if ((module == 'auto') or (module != 'systemd')): facts = self_execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars) self_displaydebug(('Facts %s' % facts)) if (('ansible_facts' in facts) and ('ansible_service_mgr' in facts['ansible_facts'])): module = facts['ansible_facts']['ansible_service_mgr'] if (module != 'systemd'): distrib = self_execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_distribution*'), task_vars=task_vars)get('ansible_facts', {}) dv = distribget('ansible_distribution_version', )lower() ldv = LooseVersion(dv) osd = distribget('ansible_distribution', )lower() if (osd == 'ubuntu'): if (ldv >= LooseVersion('1504')): module = 'systemd' elif (osd in ['linuxmint', 'mint']): if (ldv >= LooseVersion('1800')): module = 'systemd' elif (osd in ['arch', 'archlinux']): module = 'systemd' elif (osd in ['fedora']): if (ldv >= LooseVersion('20')): module = 'systemd' elif (osd in ['redhat', 'red-hat']): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'centos'): if (ldv >= LooseVersion('7')): module = 'systemd' elif (osd == 'debian'): if (ldv >= LooseVersion('8')): module = 'systemd' if ((not module) or (module == 'auto') or (module not in self_shared_loader_objmodule_loader)): module = 'service' if (module != 'auto'): new_module_args = self_taskargscopy() if (module == 'systemd'): margs = (self_task, self_connection, self_play_context, self_loader, self_templar, self_shared_loader_obj) self_ah = self_shared_loader_objaction_loaderget('cops_actionhelper', *margs) self_ahset_forwarded_task_vars(task_vars) ret1 = self_ahexec_command('systemctl --no-pager is-system-running') ret2 = self_ahexec_command('systemctl --no-pager --version 2>&1') if (('offline' in ret1['stdout']) or DBUS_REsearch(ret1['stderr']) or ((ret2['rc'] == 0) and ('systemd' not in ret2['stdout']lower()))): module = 'cops_systemd' if ('use' in new_module_args): del new_module_args['use'] if ('state' in new_module_args): if (new_module_args['state'] == 'loaded'): new_module_args['state'] = 'reloaded' if (new_module_args['state'] == 'running'): new_module_args['state'] = 'started' if (module in selfUNUSED_PARAMS): for unused in selfUNUSED_PARAMS[module]: if (unused in new_module_args): del new_module_args[unused] self_displaywarning(('Ignoring "%s" as it is not used in "%s"' % (unused, module))) self_displayvvvv(('Running %s' % module)) resultupdate(self_execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)) else: result['failed'] = True result['msg'] = 'Could not detect which service manager to use Try gathering facts or setting the "use" option' return result<|docstring|>.<|endoftext|>
8bac40f34c3dbf14e9f94667639499b52a1f2040ea14c676f8258c46d0cfc246
def find_all_images(rootdir): ' Find all the images under rootdir ' return (p for p in sorted(rootdir.iterdir()) if (p.is_file() and (p.suffix in ('.tif', '.png'))))
Find all the images under rootdir
cell_locator.py
find_all_images
david-a-joy/hipsc-cell-locator
0
python
def find_all_images(rootdir): ' ' return (p for p in sorted(rootdir.iterdir()) if (p.is_file() and (p.suffix in ('.tif', '.png'))))
def find_all_images(rootdir): ' ' return (p for p in sorted(rootdir.iterdir()) if (p.is_file() and (p.suffix in ('.tif', '.png'))))<|docstring|>Find all the images under rootdir<|endoftext|>
f2d9e47b65a4c5a4b55e816d62415358d66bb3383171170361938836a8fe706b
def add(self, x=0.0, y=0.0): ' Add a cross at these coordinates ' bbox = self.window.fig.get_window_extent().bounds (x0, y0, width, height) = bbox horz_line = Line2D([x0, (x0 + width)], [y, y], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) vert_line = Line2D([x, x], [y0, (y0 + height)], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) horz_line.set_animated(True) vert_line.set_animated(True) self.cur_region = self.window.canvas.copy_from_bbox(self.window.fig.bbox) self.window.fig.lines.append(horz_line) self.window.fig.lines.append(vert_line) self.cur_cross = (horz_line, vert_line)
Add a cross at these coordinates
cell_locator.py
add
david-a-joy/hipsc-cell-locator
0
python
def add(self, x=0.0, y=0.0): ' ' bbox = self.window.fig.get_window_extent().bounds (x0, y0, width, height) = bbox horz_line = Line2D([x0, (x0 + width)], [y, y], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) vert_line = Line2D([x, x], [y0, (y0 + height)], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) horz_line.set_animated(True) vert_line.set_animated(True) self.cur_region = self.window.canvas.copy_from_bbox(self.window.fig.bbox) self.window.fig.lines.append(horz_line) self.window.fig.lines.append(vert_line) self.cur_cross = (horz_line, vert_line)
def add(self, x=0.0, y=0.0): ' ' bbox = self.window.fig.get_window_extent().bounds (x0, y0, width, height) = bbox horz_line = Line2D([x0, (x0 + width)], [y, y], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) vert_line = Line2D([x, x], [y0, (y0 + height)], linewidth=2, linestyle='--', color=(0.6, 0.6, 0.6)) horz_line.set_animated(True) vert_line.set_animated(True) self.cur_region = self.window.canvas.copy_from_bbox(self.window.fig.bbox) self.window.fig.lines.append(horz_line) self.window.fig.lines.append(vert_line) self.cur_cross = (horz_line, vert_line)<|docstring|>Add a cross at these coordinates<|endoftext|>
a914777a256ec63b8aea5794feead13f4bc70d76dbc5abe4b892d271321c9ff2
def get_color(self, sel_class=None): ' Get the color for the current box ' if (sel_class is None): sel_class = self.cur_sel_class return SEL_CLASS_COLORS[sel_class]
Get the color for the current box
cell_locator.py
get_color
david-a-joy/hipsc-cell-locator
0
python
def get_color(self, sel_class=None): ' ' if (sel_class is None): sel_class = self.cur_sel_class return SEL_CLASS_COLORS[sel_class]
def get_color(self, sel_class=None): ' ' if (sel_class is None): sel_class = self.cur_sel_class return SEL_CLASS_COLORS[sel_class]<|docstring|>Get the color for the current box<|endoftext|>
e0b571b2078b9b9a2f24b34ed1da3dfe65ecb3356b8dc96355d9eb7bc1462723
def load_window(self): ' Create the figure, axis, and canvas ' window = plt.get_current_fig_manager().window (screen_x, screen_y) = (None, None) (screen_x, screen_y) = window.wm_maxsize() print('Screen: {}x{}'.format(screen_x, screen_y)) self.dpi = int(mpl.rcParams['figure.dpi']) print('DPI: {}'.format(self.dpi)) figsize = ((screen_x / self.dpi), (screen_y / self.dpi)) self.fig = plt.gcf() self.fig.set_size_inches(figsize[0], figsize[1]) self.fig.canvas.set_window_title('Cell Locator') try: window.state('zoomed') except tkinter.TclError: window.state('normal') plt.draw() self.ax = self.fig.gca() self.canvas = self.fig.canvas if NIGHT_MODE: self.fig.patch.set_facecolor('black') self.canvas.mpl_disconnect(self.canvas.manager.key_press_handler_id) self.ax_img = None
Create the figure, axis, and canvas
cell_locator.py
load_window
david-a-joy/hipsc-cell-locator
0
python
def load_window(self): ' ' window = plt.get_current_fig_manager().window (screen_x, screen_y) = (None, None) (screen_x, screen_y) = window.wm_maxsize() print('Screen: {}x{}'.format(screen_x, screen_y)) self.dpi = int(mpl.rcParams['figure.dpi']) print('DPI: {}'.format(self.dpi)) figsize = ((screen_x / self.dpi), (screen_y / self.dpi)) self.fig = plt.gcf() self.fig.set_size_inches(figsize[0], figsize[1]) self.fig.canvas.set_window_title('Cell Locator') try: window.state('zoomed') except tkinter.TclError: window.state('normal') plt.draw() self.ax = self.fig.gca() self.canvas = self.fig.canvas if NIGHT_MODE: self.fig.patch.set_facecolor('black') self.canvas.mpl_disconnect(self.canvas.manager.key_press_handler_id) self.ax_img = None
def load_window(self): ' ' window = plt.get_current_fig_manager().window (screen_x, screen_y) = (None, None) (screen_x, screen_y) = window.wm_maxsize() print('Screen: {}x{}'.format(screen_x, screen_y)) self.dpi = int(mpl.rcParams['figure.dpi']) print('DPI: {}'.format(self.dpi)) figsize = ((screen_x / self.dpi), (screen_y / self.dpi)) self.fig = plt.gcf() self.fig.set_size_inches(figsize[0], figsize[1]) self.fig.canvas.set_window_title('Cell Locator') try: window.state('zoomed') except tkinter.TclError: window.state('normal') plt.draw() self.ax = self.fig.gca() self.canvas = self.fig.canvas if NIGHT_MODE: self.fig.patch.set_facecolor('black') self.canvas.mpl_disconnect(self.canvas.manager.key_press_handler_id) self.ax_img = None<|docstring|>Create the figure, axis, and canvas<|endoftext|>
3ccc81e3b1616633cb28aa45be46a715b46206ac0f334707969509eb36aaa645
def load_image(self, step=1): ' Load the next image ' self.cur_record_idx = ((self.cur_record_idx + step) % len(self.records)) self.cur_record = self.records[self.cur_record_idx] self.cur_record_start = time.monotonic() img = Image.open(str(self.cur_record)) img = np.asarray(img) if (img.ndim == 2): img = np.stack([img, img, img], axis=2) elif (img.shape[2] == 1): img = np.concatenate([img, img, img], axis=2) assert (img.ndim == 3) assert (img.shape[2] == 3) self.cur_image = img if (self.ax_img is None): self.ax_img = self.ax.imshow(self.cur_image, aspect='equal') else: (rows, cols) = img.shape[:2] self.ax_img.set_data(self.cur_image) self.ax_img.set_extent((0, cols, rows, 0)) self.ax.set_xticks([]) self.ax.set_yticks([]) plt.tight_layout()
Load the next image
cell_locator.py
load_image
david-a-joy/hipsc-cell-locator
0
python
def load_image(self, step=1): ' ' self.cur_record_idx = ((self.cur_record_idx + step) % len(self.records)) self.cur_record = self.records[self.cur_record_idx] self.cur_record_start = time.monotonic() img = Image.open(str(self.cur_record)) img = np.asarray(img) if (img.ndim == 2): img = np.stack([img, img, img], axis=2) elif (img.shape[2] == 1): img = np.concatenate([img, img, img], axis=2) assert (img.ndim == 3) assert (img.shape[2] == 3) self.cur_image = img if (self.ax_img is None): self.ax_img = self.ax.imshow(self.cur_image, aspect='equal') else: (rows, cols) = img.shape[:2] self.ax_img.set_data(self.cur_image) self.ax_img.set_extent((0, cols, rows, 0)) self.ax.set_xticks([]) self.ax.set_yticks([]) plt.tight_layout()
def load_image(self, step=1): ' ' self.cur_record_idx = ((self.cur_record_idx + step) % len(self.records)) self.cur_record = self.records[self.cur_record_idx] self.cur_record_start = time.monotonic() img = Image.open(str(self.cur_record)) img = np.asarray(img) if (img.ndim == 2): img = np.stack([img, img, img], axis=2) elif (img.shape[2] == 1): img = np.concatenate([img, img, img], axis=2) assert (img.ndim == 3) assert (img.shape[2] == 3) self.cur_image = img if (self.ax_img is None): self.ax_img = self.ax.imshow(self.cur_image, aspect='equal') else: (rows, cols) = img.shape[:2] self.ax_img.set_data(self.cur_image) self.ax_img.set_extent((0, cols, rows, 0)) self.ax.set_xticks([]) self.ax.set_yticks([]) plt.tight_layout()<|docstring|>Load the next image<|endoftext|>
9b3379b6920d5f39de8c89f2b28768868c67e45c5d89fcfbf51bfa30f934a085
def load_bounds(self): ' Calculate absolute bounds ' ax_tight_bbox = self.ax.get_tightbbox(self.canvas) im_bbox = ((ax_tight_bbox.x0, ax_tight_bbox.y0), (ax_tight_bbox.x1, ax_tight_bbox.y1)) aspect = self.ax.get_data_ratio() self.shape_manager.load_axis_bounds(im_bbox, aspect)
Calculate absolute bounds
cell_locator.py
load_bounds
david-a-joy/hipsc-cell-locator
0
python
def load_bounds(self): ' ' ax_tight_bbox = self.ax.get_tightbbox(self.canvas) im_bbox = ((ax_tight_bbox.x0, ax_tight_bbox.y0), (ax_tight_bbox.x1, ax_tight_bbox.y1)) aspect = self.ax.get_data_ratio() self.shape_manager.load_axis_bounds(im_bbox, aspect)
def load_bounds(self): ' ' ax_tight_bbox = self.ax.get_tightbbox(self.canvas) im_bbox = ((ax_tight_bbox.x0, ax_tight_bbox.y0), (ax_tight_bbox.x1, ax_tight_bbox.y1)) aspect = self.ax.get_data_ratio() self.shape_manager.load_axis_bounds(im_bbox, aspect)<|docstring|>Calculate absolute bounds<|endoftext|>
893ffdba25277c38fc0c713547b9a31b32286818e6305e6800eb6b27750c1142
def clear_shapes(self, draw=True): ' Clear all the rects ' self.shape_manager.on_clear_all() if draw: self.canvas.draw()
Clear all the rects
cell_locator.py
clear_shapes
david-a-joy/hipsc-cell-locator
0
python
def clear_shapes(self, draw=True): ' ' self.shape_manager.on_clear_all() if draw: self.canvas.draw()
def clear_shapes(self, draw=True): ' ' self.shape_manager.on_clear_all() if draw: self.canvas.draw()<|docstring|>Clear all the rects<|endoftext|>
f870958b7d22c85b79050035f722868337d23001d5951f169874fca53768736b
def load_points(self): ' Load the points from the database ' points = self.db.find_points(self.cur_filepath) for (p_class, px, py) in points: self.shape_manager.on_point_complete(p_class, px, py) self.canvas.draw()
Load the points from the database
cell_locator.py
load_points
david-a-joy/hipsc-cell-locator
0
python
def load_points(self): ' ' points = self.db.find_points(self.cur_filepath) for (p_class, px, py) in points: self.shape_manager.on_point_complete(p_class, px, py) self.canvas.draw()
def load_points(self): ' ' points = self.db.find_points(self.cur_filepath) for (p_class, px, py) in points: self.shape_manager.on_point_complete(p_class, px, py) self.canvas.draw()<|docstring|>Load the points from the database<|endoftext|>
396e81fba3d8fa30519061e17b0c266f32f80a35f1bd39130b65e97b36a385df
def save_points(self): ' Save the selected points to the database ' points = self.shape_manager.points classes = [s.sel_class for s in points] points = [(s.x, s.y) for s in points] self.db.set_points(self.cur_filepath, classes=classes, points=points) self.db.add_view(self.cur_filepath, self.cur_record_start, time.monotonic())
Save the selected points to the database
cell_locator.py
save_points
david-a-joy/hipsc-cell-locator
0
python
def save_points(self): ' ' points = self.shape_manager.points classes = [s.sel_class for s in points] points = [(s.x, s.y) for s in points] self.db.set_points(self.cur_filepath, classes=classes, points=points) self.db.add_view(self.cur_filepath, self.cur_record_start, time.monotonic())
def save_points(self): ' ' points = self.shape_manager.points classes = [s.sel_class for s in points] points = [(s.x, s.y) for s in points] self.db.set_points(self.cur_filepath, classes=classes, points=points) self.db.add_view(self.cur_filepath, self.cur_record_start, time.monotonic())<|docstring|>Save the selected points to the database<|endoftext|>
76dcf422fc07f445aacb3fa893f676881c22e244130ffea10bf323375f1db607
def draw_point(self, point_obj): ' Draw a single point ' if (point_obj in self.points): return (p_class, px, py) = point_obj (fx, fy) = self.shape_manager.warp_to_figure(px, py) p_color = self.get_color(p_class) bbox = self.fig.get_window_extent().bounds (x0, y0, _, _) = bbox line = Line2D([(fx + x0)], [(fy + y0)], markersize=self.markersize, linestyle='-', marker='o', color=p_color) self.fig.lines.append(line) self.points[point_obj] = line
Draw a single point
cell_locator.py
draw_point
david-a-joy/hipsc-cell-locator
0
python
def draw_point(self, point_obj): ' ' if (point_obj in self.points): return (p_class, px, py) = point_obj (fx, fy) = self.shape_manager.warp_to_figure(px, py) p_color = self.get_color(p_class) bbox = self.fig.get_window_extent().bounds (x0, y0, _, _) = bbox line = Line2D([(fx + x0)], [(fy + y0)], markersize=self.markersize, linestyle='-', marker='o', color=p_color) self.fig.lines.append(line) self.points[point_obj] = line
def draw_point(self, point_obj): ' ' if (point_obj in self.points): return (p_class, px, py) = point_obj (fx, fy) = self.shape_manager.warp_to_figure(px, py) p_color = self.get_color(p_class) bbox = self.fig.get_window_extent().bounds (x0, y0, _, _) = bbox line = Line2D([(fx + x0)], [(fy + y0)], markersize=self.markersize, linestyle='-', marker='o', color=p_color) self.fig.lines.append(line) self.points[point_obj] = line<|docstring|>Draw a single point<|endoftext|>
4d0a33c6ec29003ce709eba09bbbe02ba4b139a6dc61e291ff1892a24b82b5cd
def remove_point(self, point_obj): ' Remove a single point ' if (point_obj not in self.points): return line = self.points[point_obj] self.fig.lines.remove(line) del self.points[point_obj]
Remove a single point
cell_locator.py
remove_point
david-a-joy/hipsc-cell-locator
0
python
def remove_point(self, point_obj): ' ' if (point_obj not in self.points): return line = self.points[point_obj] self.fig.lines.remove(line) del self.points[point_obj]
def remove_point(self, point_obj): ' ' if (point_obj not in self.points): return line = self.points[point_obj] self.fig.lines.remove(line) del self.points[point_obj]<|docstring|>Remove a single point<|endoftext|>
f96496d0ff110b32c5130dd8b8208c298adf147f4c4b2cc6ff6df7f840a890c8
def load_last_index(self): ' Work out the index of the last image loaded ' last_record = self.db.get_last_viewed() if (last_record is not None): last_record = last_record[0] last_index = [i for (i, r) in enumerate(self.records) if (r.name == last_record)] if (len(last_index) != 1): cur_record_idx = 0 else: cur_record_idx = last_index[0] self.cur_record_idx = cur_record_idx
Work out the index of the last image loaded
cell_locator.py
load_last_index
david-a-joy/hipsc-cell-locator
0
python
def load_last_index(self): ' ' last_record = self.db.get_last_viewed() if (last_record is not None): last_record = last_record[0] last_index = [i for (i, r) in enumerate(self.records) if (r.name == last_record)] if (len(last_index) != 1): cur_record_idx = 0 else: cur_record_idx = last_index[0] self.cur_record_idx = cur_record_idx
def load_last_index(self): ' ' last_record = self.db.get_last_viewed() if (last_record is not None): last_record = last_record[0] last_index = [i for (i, r) in enumerate(self.records) if (r.name == last_record)] if (len(last_index) != 1): cur_record_idx = 0 else: cur_record_idx = last_index[0] self.cur_record_idx = cur_record_idx<|docstring|>Work out the index of the last image loaded<|endoftext|>
394b1685e6343c19f22bf8cf4891464488fad257bdf370e65c344ad06667adfc
def load_next_record(self, step=1): ' Load the next image tile ' self.points = {} self.cur_record = None self.shape_manager.on_reset_actions() self.load_image(step=step) if (self.cur_record is None): print('No more records to process...') plt.close() return self.load_bounds() self.load_points() self.canvas.draw()
Load the next image tile
cell_locator.py
load_next_record
david-a-joy/hipsc-cell-locator
0
python
def load_next_record(self, step=1): ' ' self.points = {} self.cur_record = None self.shape_manager.on_reset_actions() self.load_image(step=step) if (self.cur_record is None): print('No more records to process...') plt.close() return self.load_bounds() self.load_points() self.canvas.draw()
def load_next_record(self, step=1): ' ' self.points = {} self.cur_record = None self.shape_manager.on_reset_actions() self.load_image(step=step) if (self.cur_record is None): print('No more records to process...') plt.close() return self.load_bounds() self.load_points() self.canvas.draw()<|docstring|>Load the next image tile<|endoftext|>
71cbd29b55654124af697eb8e02a05203c08bc64baa4130105ec94faa730a303
def maybe_draw_encouragement(self): ' Try to draw a screen to encourage the user ' if (self.display_mode != 'normal'): return annotated_records = self.db.find_annotated_records() new_records = (annotated_records - self.annotated_records) if (new_records == set()): return milestones = [float(p.stem) for p in self.imagedir.iterdir() if (p.suffix == '.jpg')] pct_new = ((len(annotated_records) / len(self.records)) * 100) pct_old = ((len(self.annotated_records) / len(self.records)) * 100) print('{:0.1f}% done!'.format(pct_new)) new_milestone = None for milestone in milestones: if ((pct_new >= milestone) and (pct_old < milestone)): new_milestone = milestone break self.annotated_records = annotated_records if (new_milestone is None): return image_file = (self.imagedir / '{:d}.jpg'.format(int(round(new_milestone)))) img = np.asarray(Image.open(str(image_file))) (rows, cols, _) = img.shape self.display_mode = ('encouragement' if (new_milestone < 100) else 'finished') encourage_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox xct = ((x1 + x0) / 2) yct = ((y1 + y0) / 2) bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) encourage_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, '{:1.0f}% Complete!'.format(new_milestone), color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) encourage_objects.append(title) if (new_milestone >= 100): enc_text = self.fig.text(0.5, 0.1, 'Press any key to exit', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) else: enc_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) encourage_objects.append(enc_text) yext = (abs((y1 - y0)) * 0.65) xext = ((cols / rows) * yext) simg = Image.fromarray(img) simg = simg.resize((int(np.floor(xext)), int(np.floor(yext)))) simg = np.asarray(simg) (srows, scols, _) = simg.shape enc_img = self.fig.figimage(simg, xo=(xct - (scols // 2)), yo=(yct - (srows // 2)), zorder=100, alpha=1.0) encourage_objects.append(enc_img) self.encourage_objects = encourage_objects plt.draw()
Try to draw a screen to encourage the user
cell_locator.py
maybe_draw_encouragement
david-a-joy/hipsc-cell-locator
0
python
def maybe_draw_encouragement(self): ' ' if (self.display_mode != 'normal'): return annotated_records = self.db.find_annotated_records() new_records = (annotated_records - self.annotated_records) if (new_records == set()): return milestones = [float(p.stem) for p in self.imagedir.iterdir() if (p.suffix == '.jpg')] pct_new = ((len(annotated_records) / len(self.records)) * 100) pct_old = ((len(self.annotated_records) / len(self.records)) * 100) print('{:0.1f}% done!'.format(pct_new)) new_milestone = None for milestone in milestones: if ((pct_new >= milestone) and (pct_old < milestone)): new_milestone = milestone break self.annotated_records = annotated_records if (new_milestone is None): return image_file = (self.imagedir / '{:d}.jpg'.format(int(round(new_milestone)))) img = np.asarray(Image.open(str(image_file))) (rows, cols, _) = img.shape self.display_mode = ('encouragement' if (new_milestone < 100) else 'finished') encourage_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox xct = ((x1 + x0) / 2) yct = ((y1 + y0) / 2) bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) encourage_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, '{:1.0f}% Complete!'.format(new_milestone), color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) encourage_objects.append(title) if (new_milestone >= 100): enc_text = self.fig.text(0.5, 0.1, 'Press any key to exit', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) else: enc_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) encourage_objects.append(enc_text) yext = (abs((y1 - y0)) * 0.65) xext = ((cols / rows) * yext) simg = Image.fromarray(img) simg = simg.resize((int(np.floor(xext)), int(np.floor(yext)))) simg = np.asarray(simg) (srows, scols, _) = simg.shape enc_img = self.fig.figimage(simg, xo=(xct - (scols // 2)), yo=(yct - (srows // 2)), zorder=100, alpha=1.0) encourage_objects.append(enc_img) self.encourage_objects = encourage_objects plt.draw()
def maybe_draw_encouragement(self): ' ' if (self.display_mode != 'normal'): return annotated_records = self.db.find_annotated_records() new_records = (annotated_records - self.annotated_records) if (new_records == set()): return milestones = [float(p.stem) for p in self.imagedir.iterdir() if (p.suffix == '.jpg')] pct_new = ((len(annotated_records) / len(self.records)) * 100) pct_old = ((len(self.annotated_records) / len(self.records)) * 100) print('{:0.1f}% done!'.format(pct_new)) new_milestone = None for milestone in milestones: if ((pct_new >= milestone) and (pct_old < milestone)): new_milestone = milestone break self.annotated_records = annotated_records if (new_milestone is None): return image_file = (self.imagedir / '{:d}.jpg'.format(int(round(new_milestone)))) img = np.asarray(Image.open(str(image_file))) (rows, cols, _) = img.shape self.display_mode = ('encouragement' if (new_milestone < 100) else 'finished') encourage_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox xct = ((x1 + x0) / 2) yct = ((y1 + y0) / 2) bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) encourage_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, '{:1.0f}% Complete!'.format(new_milestone), color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) encourage_objects.append(title) if (new_milestone >= 100): enc_text = self.fig.text(0.5, 0.1, 'Press any key to exit', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) else: enc_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) encourage_objects.append(enc_text) yext = (abs((y1 - y0)) * 0.65) xext = ((cols / rows) * yext) simg = Image.fromarray(img) simg = simg.resize((int(np.floor(xext)), int(np.floor(yext)))) simg = np.asarray(simg) (srows, scols, _) = simg.shape enc_img = self.fig.figimage(simg, xo=(xct - (scols // 2)), yo=(yct - (srows // 2)), zorder=100, alpha=1.0) encourage_objects.append(enc_img) self.encourage_objects = encourage_objects plt.draw()<|docstring|>Try to draw a screen to encourage the user<|endoftext|>
9cb3ede4e0b89491bf965be81d1fa258a4010fe1c5cbbbbb3a1bfadd6ca8d235
def clear_encouragement(self): ' Clear the encouragement display ' self.display_mode = 'normal' for obj in self.encourage_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) if (obj in self.fig.images): self.fig.images.remove(obj) self.encourage_objects = [] plt.draw()
Clear the encouragement display
cell_locator.py
clear_encouragement
david-a-joy/hipsc-cell-locator
0
python
def clear_encouragement(self): ' ' self.display_mode = 'normal' for obj in self.encourage_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) if (obj in self.fig.images): self.fig.images.remove(obj) self.encourage_objects = [] plt.draw()
def clear_encouragement(self): ' ' self.display_mode = 'normal' for obj in self.encourage_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) if (obj in self.fig.images): self.fig.images.remove(obj) self.encourage_objects = [] plt.draw()<|docstring|>Clear the encouragement display<|endoftext|>
b1175cc172cd715f14be0d4c2409c19d19c809c469e37970e088b7e0cf962f06
def draw_help(self): ' Draw the help overlay ' if (self.display_mode != 'normal'): return self.display_mode = 'help' help_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) help_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, 'Cell Locator', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) help_objects.append(title) help_text = self.fig.text(0.5, 0.75, 'Select the center of each cell', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.6, 'Single Press: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.6, 'Left Click: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.5, 'Double Press: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.5, 'Right Click: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.4, 'Spacebar: Save and Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.3, 'Escape: Save and Exit', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.6, 'Left Arrow: Previous Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.5, 'Right Arrow: Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.3, 'F1 or "h": Show Help', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) self.help_objects = help_objects plt.draw()
Draw the help overlay
cell_locator.py
draw_help
david-a-joy/hipsc-cell-locator
0
python
def draw_help(self): ' ' if (self.display_mode != 'normal'): return self.display_mode = 'help' help_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) help_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, 'Cell Locator', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) help_objects.append(title) help_text = self.fig.text(0.5, 0.75, 'Select the center of each cell', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.6, 'Single Press: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.6, 'Left Click: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.5, 'Double Press: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.5, 'Right Click: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.4, 'Spacebar: Save and Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.3, 'Escape: Save and Exit', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.6, 'Left Arrow: Previous Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.5, 'Right Arrow: Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.3, 'F1 or "h": Show Help', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) self.help_objects = help_objects plt.draw()
def draw_help(self): ' ' if (self.display_mode != 'normal'): return self.display_mode = 'help' help_objects = [] bbox = self.fig.get_window_extent().bounds (x0, y0, x1, y1) = bbox bg_patch = Rectangle((x0, y0), (x1 - x0), (y1 - y0), fill=True, alpha=0.9, color=(0, 0, 0), zorder=99) help_objects.append(bg_patch) self.fig.patches.append(bg_patch) title = self.fig.text(0.5, 0.9, 'Cell Locator', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=32) help_objects.append(title) help_text = self.fig.text(0.5, 0.75, 'Select the center of each cell', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.6, 'Single Press: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.6, 'Left Click: Select a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) if (UNAME == 'darwin'): help_text = self.fig.text(0.2, 0.5, 'Double Press: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) else: help_text = self.fig.text(0.2, 0.5, 'Right Click: Unselect a cell', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.4, 'Spacebar: Save and Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.2, 0.3, 'Escape: Save and Exit', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.6, 'Left Arrow: Previous Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.5, 'Right Arrow: Next Image', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.55, 0.3, 'F1 or "h": Show Help', color='white', visible=True, horizontalalignment='left', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) help_text = self.fig.text(0.5, 0.1, 'Press any key to continue', color='white', visible=True, horizontalalignment='center', family='sans-serif', zorder=100, fontsize=24) help_objects.append(help_text) self.help_objects = help_objects plt.draw()<|docstring|>Draw the help overlay<|endoftext|>
76af721423401207ef955aa178cec803b30dcd53401cb0e78e9bb2532533bd61
def clear_help(self): ' Clear the help display ' self.display_mode = 'normal' for obj in self.help_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) self.help_objects = [] plt.draw()
Clear the help display
cell_locator.py
clear_help
david-a-joy/hipsc-cell-locator
0
python
def clear_help(self): ' ' self.display_mode = 'normal' for obj in self.help_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) self.help_objects = [] plt.draw()
def clear_help(self): ' ' self.display_mode = 'normal' for obj in self.help_objects: if (obj in self.fig.patches): self.fig.patches.remove(obj) if (obj in self.fig.texts): self.fig.texts.remove(obj) self.help_objects = [] plt.draw()<|docstring|>Clear the help display<|endoftext|>
cf9ef5cdb1e36afbf8be58ae4defff1104d91d3384940b8922e94efe778711e8
def on_mouse_press(self, event): ' When the mouse button is pressed ' if (self.display_mode == 'help'): self.clear_help() return if (self.display_mode == 'encouragement'): self.clear_encouragement() return if (self.display_mode == 'finished'): self.save_points() plt.close() return if (self.cur_sel_mode == 'point'): if (event.button == 1): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) self.shape_manager.on_point_complete(sel_class=self.cur_sel_class, x=px, y=py) elif (event.button == 3): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) for point in self.shape_manager.find_near_points(px, py, radius=0.01): self.shape_manager.remove_shape(point) self.canvas.draw()
When the mouse button is pressed
cell_locator.py
on_mouse_press
david-a-joy/hipsc-cell-locator
0
python
def on_mouse_press(self, event): ' ' if (self.display_mode == 'help'): self.clear_help() return if (self.display_mode == 'encouragement'): self.clear_encouragement() return if (self.display_mode == 'finished'): self.save_points() plt.close() return if (self.cur_sel_mode == 'point'): if (event.button == 1): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) self.shape_manager.on_point_complete(sel_class=self.cur_sel_class, x=px, y=py) elif (event.button == 3): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) for point in self.shape_manager.find_near_points(px, py, radius=0.01): self.shape_manager.remove_shape(point) self.canvas.draw()
def on_mouse_press(self, event): ' ' if (self.display_mode == 'help'): self.clear_help() return if (self.display_mode == 'encouragement'): self.clear_encouragement() return if (self.display_mode == 'finished'): self.save_points() plt.close() return if (self.cur_sel_mode == 'point'): if (event.button == 1): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) self.shape_manager.on_point_complete(sel_class=self.cur_sel_class, x=px, y=py) elif (event.button == 3): (px, py) = self.shape_manager.warp_to_axis(event.x, event.y) for point in self.shape_manager.find_near_points(px, py, radius=0.01): self.shape_manager.remove_shape(point) self.canvas.draw()<|docstring|>When the mouse button is pressed<|endoftext|>
6c33fbb965ae91610f9663a2287c85e3bf450745c66f83acbb56419ff5d4c032
def on_resize(self, event): ' Resize the window ' self.shape_manager.on_window_resize() self.canvas.draw()
Resize the window
cell_locator.py
on_resize
david-a-joy/hipsc-cell-locator
0
python
def on_resize(self, event): ' ' self.shape_manager.on_window_resize() self.canvas.draw()
def on_resize(self, event): ' ' self.shape_manager.on_window_resize() self.canvas.draw()<|docstring|>Resize the window<|endoftext|>
da4d0013162a1fa0d02410498016b9b2f71209091190c963c798654519903a49
def extract_handler(self, application): "\n This accepts the application passed in to the context and returns the handler\n\n Most importantly, it doesn't technically have to be an application: if passed a simple function\n it will assume you mean to use the callable handler, and if passed a dictionary with 'handler_class'\n and 'handler_config' keys, it will build a handler from that.\n " if hasattr(application, 'handler_class'): handler = self.di.build(application.handler_class, cache=False) handler.configure(self.finalize_handler_config(application.handler_config)) return handler if (hasattr(application, '__getitem__') and ('handler_class' in application)): if (not ('handler_config' in application)): raise ValueError(("context was passed a dictionary-like object with 'handler_class', but not " + "'handler_config'. Both are required to execute the handler")) handler = self.di.build(application['handler_class'], cache=False) handler.configure(self.finalize_handler_config(application['handler_config'])) return handler if callable(application): handler = self.di.build(callable_module.Callable, cache=False) handler.configure(self.finalize_handler_config({'callable': application})) return handler raise ValueError((('The context received an object it did not know how to handle! You should pass in either an instance ' + "of clearskies.Application, a dictionary with 'handler_class' and 'handler_config' keys, or a ") + 'function/lambda to be executed'))
This accepts the application passed in to the context and returns the handler Most importantly, it doesn't technically have to be an application: if passed a simple function it will assume you mean to use the callable handler, and if passed a dictionary with 'handler_class' and 'handler_config' keys, it will build a handler from that.
src/clearskies/contexts/context.py
extract_handler
cmancone/clearskies
4
python
def extract_handler(self, application): "\n This accepts the application passed in to the context and returns the handler\n\n Most importantly, it doesn't technically have to be an application: if passed a simple function\n it will assume you mean to use the callable handler, and if passed a dictionary with 'handler_class'\n and 'handler_config' keys, it will build a handler from that.\n " if hasattr(application, 'handler_class'): handler = self.di.build(application.handler_class, cache=False) handler.configure(self.finalize_handler_config(application.handler_config)) return handler if (hasattr(application, '__getitem__') and ('handler_class' in application)): if (not ('handler_config' in application)): raise ValueError(("context was passed a dictionary-like object with 'handler_class', but not " + "'handler_config'. Both are required to execute the handler")) handler = self.di.build(application['handler_class'], cache=False) handler.configure(self.finalize_handler_config(application['handler_config'])) return handler if callable(application): handler = self.di.build(callable_module.Callable, cache=False) handler.configure(self.finalize_handler_config({'callable': application})) return handler raise ValueError((('The context received an object it did not know how to handle! You should pass in either an instance ' + "of clearskies.Application, a dictionary with 'handler_class' and 'handler_config' keys, or a ") + 'function/lambda to be executed'))
def extract_handler(self, application): "\n This accepts the application passed in to the context and returns the handler\n\n Most importantly, it doesn't technically have to be an application: if passed a simple function\n it will assume you mean to use the callable handler, and if passed a dictionary with 'handler_class'\n and 'handler_config' keys, it will build a handler from that.\n " if hasattr(application, 'handler_class'): handler = self.di.build(application.handler_class, cache=False) handler.configure(self.finalize_handler_config(application.handler_config)) return handler if (hasattr(application, '__getitem__') and ('handler_class' in application)): if (not ('handler_config' in application)): raise ValueError(("context was passed a dictionary-like object with 'handler_class', but not " + "'handler_config'. Both are required to execute the handler")) handler = self.di.build(application['handler_class'], cache=False) handler.configure(self.finalize_handler_config(application['handler_config'])) return handler if callable(application): handler = self.di.build(callable_module.Callable, cache=False) handler.configure(self.finalize_handler_config({'callable': application})) return handler raise ValueError((('The context received an object it did not know how to handle! You should pass in either an instance ' + "of clearskies.Application, a dictionary with 'handler_class' and 'handler_config' keys, or a ") + 'function/lambda to be executed'))<|docstring|>This accepts the application passed in to the context and returns the handler Most importantly, it doesn't technically have to be an application: if passed a simple function it will assume you mean to use the callable handler, and if passed a dictionary with 'handler_class' and 'handler_config' keys, it will build a handler from that.<|endoftext|>
94624a5dceaf40fbc01e149497a61f08af952aee10dda1deded42f66cab57619
def show_window(image, title='untitled', max_height=700): ' ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ๋ฅผ ์—ด์–ด์„œ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค.\n\n :param image: ๋ณด์—ฌ์ค„ ์ด๋ฏธ์ง€ (OpenCV image ๊ฐ์ฒด)\n :param title: ์œˆ๋„์šฐ ์ œ๋ชฉ\n :param max_height: ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ ์‚ฌ์ด์ฆˆ์˜ ์ตœ๋Œ€ ๋†’์ด\n :return:\n ' (height, width) = image.shape[:2] if (height > max_height): rate = (max_height / height) height = round((height * rate)) width = round((width * rate)) cv2.namedWindow(title, cv2.WINDOW_NORMAL) cv2.resizeWindow(title, width, height) cv2.imshow(title, image) key = cv2.waitKey(0) cv2.destroyAllWindows() return key
์ด๋ฏธ์ง€ ์œˆ๋„์šฐ๋ฅผ ์—ด์–ด์„œ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. :param image: ๋ณด์—ฌ์ค„ ์ด๋ฏธ์ง€ (OpenCV image ๊ฐ์ฒด) :param title: ์œˆ๋„์šฐ ์ œ๋ชฉ :param max_height: ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ ์‚ฌ์ด์ฆˆ์˜ ์ตœ๋Œ€ ๋†’์ด :return:
text_detect/src/test_pre_process.py
show_window
vikira/concentration_project
0
python
def show_window(image, title='untitled', max_height=700): ' ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ๋ฅผ ์—ด์–ด์„œ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค.\n\n :param image: ๋ณด์—ฌ์ค„ ์ด๋ฏธ์ง€ (OpenCV image ๊ฐ์ฒด)\n :param title: ์œˆ๋„์šฐ ์ œ๋ชฉ\n :param max_height: ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ ์‚ฌ์ด์ฆˆ์˜ ์ตœ๋Œ€ ๋†’์ด\n :return:\n ' (height, width) = image.shape[:2] if (height > max_height): rate = (max_height / height) height = round((height * rate)) width = round((width * rate)) cv2.namedWindow(title, cv2.WINDOW_NORMAL) cv2.resizeWindow(title, width, height) cv2.imshow(title, image) key = cv2.waitKey(0) cv2.destroyAllWindows() return key
def show_window(image, title='untitled', max_height=700): ' ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ๋ฅผ ์—ด์–ด์„œ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค.\n\n :param image: ๋ณด์—ฌ์ค„ ์ด๋ฏธ์ง€ (OpenCV image ๊ฐ์ฒด)\n :param title: ์œˆ๋„์šฐ ์ œ๋ชฉ\n :param max_height: ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ ์‚ฌ์ด์ฆˆ์˜ ์ตœ๋Œ€ ๋†’์ด\n :return:\n ' (height, width) = image.shape[:2] if (height > max_height): rate = (max_height / height) height = round((height * rate)) width = round((width * rate)) cv2.namedWindow(title, cv2.WINDOW_NORMAL) cv2.resizeWindow(title, width, height) cv2.imshow(title, image) key = cv2.waitKey(0) cv2.destroyAllWindows() return key<|docstring|>์ด๋ฏธ์ง€ ์œˆ๋„์šฐ๋ฅผ ์—ด์–ด์„œ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. :param image: ๋ณด์—ฌ์ค„ ์ด๋ฏธ์ง€ (OpenCV image ๊ฐ์ฒด) :param title: ์œˆ๋„์šฐ ์ œ๋ชฉ :param max_height: ์ด๋ฏธ์ง€ ์œˆ๋„์šฐ ์‚ฌ์ด์ฆˆ์˜ ์ตœ๋Œ€ ๋†’์ด :return:<|endoftext|>
b3aa29dec61a47f74dc16aa6fae791077881cd611bbe7a349ad97ea5090772ca
def merge_horizontal(image_gray, image_bgr): ' Height ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_horizontal = np.hstack((image_cr, image_bgr)) return numpy_horizontal
Height ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ, 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค. :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€ :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€ :return: ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€
text_detect/src/test_pre_process.py
merge_horizontal
vikira/concentration_project
0
python
def merge_horizontal(image_gray, image_bgr): ' Height ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_horizontal = np.hstack((image_cr, image_bgr)) return numpy_horizontal
def merge_horizontal(image_gray, image_bgr): ' Height ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_horizontal = np.hstack((image_cr, image_bgr)) return numpy_horizontal<|docstring|>Height ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ, 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค. :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€ :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€ :return: ์˜†์œผ๋กœ(Horizontally) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€<|endoftext|>
aaa7f7e93303f931c311a39cec7678c981601783ed913edc44e5664570adba00
def merge_vertical(image_gray, image_bgr): ' Width ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_vertical = np.vstack((image_cr, image_bgr)) return numpy_vertical
Width ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ, 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค. :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€ :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€ :return: ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€
text_detect/src/test_pre_process.py
merge_vertical
vikira/concentration_project
0
python
def merge_vertical(image_gray, image_bgr): ' Width ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_vertical = np.vstack((image_cr, image_bgr)) return numpy_vertical
def merge_vertical(image_gray, image_bgr): ' Width ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค.\n ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ,\n 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€\n :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€\n :return: ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€\n ' image_cr = cv2.cvtColor(image_gray, cv2.COLOR_GRAY2BGR) numpy_vertical = np.vstack((image_cr, image_bgr)) return numpy_vertical<|docstring|>Width ์‚ฌ์ด์ฆˆ๊ฐ€ ๊ฐ™์€ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ(Image processing) ๋‹จ๊ณ„๋ฅผ ์›๋ณธ๊ณผ ๋น„๊ตํ•˜๊ธฐ์œ„ํ•œ ๋ชฉ์ ์œผ๋กœ, 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€์™€ 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€๋ฅผ ์ธ์ž๋กœ ๋ฐ›์•„ ๋ณ‘ํ•ฉํ•ฉ๋‹ˆ๋‹ค. :param image_gray: 2์ฐจ์›(2 dimension) ํ‘๋ฐฑ ์ด๋ฏธ์ง€ :param image_bgr: 3์ฐจ์›(3 dimension) BGR ์ปฌ๋ฆฌ ์ด๋ฏธ์ง€ :return: ์œ„์•„๋ž˜๋กœ(Vertically) ๋ณ‘ํ•ฉ๋œ ์ด๋ฏธ์ง€<|endoftext|>
bf6267c2f061af01e9f76fca715144cd8a720166f838a68b9d66b1889b740e34
def detect_line(image_binary): ' ์ด๋ฏธ์ง€์—์„œ ์ง์„ ์„ ์ฐพ์•„์„œ ์ดˆ๋ก์ƒ‰์œผ๋กœ ํ‘œ์‹œํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_binary: ํ‘๋ฐฑ(Binary) OpenCV image (2 dimension)\n :return: ๋ผ์ธ์ด ์‚ญ์ œ๋œ ์ด๋ฏธ์ง€ (OpenCV image)\n ' copy = image_binary.copy() copy_rbg = cv2.cvtColor(copy, cv2.COLOR_GRAY2RGB) threshold = pp.configs['remove_line']['threshold'] min_line_length = pp.configs['remove_line']['min_line_length'] max_line_gap = pp.configs['remove_line']['max_line_gap'] lines = cv2.HoughLinesP(copy, 1, (np.pi / 180), threshold, np.array([]), min_line_length, max_line_gap) if (lines is not None): for line in lines: (x1, y1, x2, y2) = line[0] cv2.line(copy_rbg, (x1, y1), (x2, y2), (0, 155, 0), 2) return copy_rbg
์ด๋ฏธ์ง€์—์„œ ์ง์„ ์„ ์ฐพ์•„์„œ ์ดˆ๋ก์ƒ‰์œผ๋กœ ํ‘œ์‹œํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param image_binary: ํ‘๋ฐฑ(Binary) OpenCV image (2 dimension) :return: ๋ผ์ธ์ด ์‚ญ์ œ๋œ ์ด๋ฏธ์ง€ (OpenCV image)
text_detect/src/test_pre_process.py
detect_line
vikira/concentration_project
0
python
def detect_line(image_binary): ' ์ด๋ฏธ์ง€์—์„œ ์ง์„ ์„ ์ฐพ์•„์„œ ์ดˆ๋ก์ƒ‰์œผ๋กœ ํ‘œ์‹œํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_binary: ํ‘๋ฐฑ(Binary) OpenCV image (2 dimension)\n :return: ๋ผ์ธ์ด ์‚ญ์ œ๋œ ์ด๋ฏธ์ง€ (OpenCV image)\n ' copy = image_binary.copy() copy_rbg = cv2.cvtColor(copy, cv2.COLOR_GRAY2RGB) threshold = pp.configs['remove_line']['threshold'] min_line_length = pp.configs['remove_line']['min_line_length'] max_line_gap = pp.configs['remove_line']['max_line_gap'] lines = cv2.HoughLinesP(copy, 1, (np.pi / 180), threshold, np.array([]), min_line_length, max_line_gap) if (lines is not None): for line in lines: (x1, y1, x2, y2) = line[0] cv2.line(copy_rbg, (x1, y1), (x2, y2), (0, 155, 0), 2) return copy_rbg
def detect_line(image_binary): ' ์ด๋ฏธ์ง€์—์„œ ์ง์„ ์„ ์ฐพ์•„์„œ ์ดˆ๋ก์ƒ‰์œผ๋กœ ํ‘œ์‹œํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param image_binary: ํ‘๋ฐฑ(Binary) OpenCV image (2 dimension)\n :return: ๋ผ์ธ์ด ์‚ญ์ œ๋œ ์ด๋ฏธ์ง€ (OpenCV image)\n ' copy = image_binary.copy() copy_rbg = cv2.cvtColor(copy, cv2.COLOR_GRAY2RGB) threshold = pp.configs['remove_line']['threshold'] min_line_length = pp.configs['remove_line']['min_line_length'] max_line_gap = pp.configs['remove_line']['max_line_gap'] lines = cv2.HoughLinesP(copy, 1, (np.pi / 180), threshold, np.array([]), min_line_length, max_line_gap) if (lines is not None): for line in lines: (x1, y1, x2, y2) = line[0] cv2.line(copy_rbg, (x1, y1), (x2, y2), (0, 155, 0), 2) return copy_rbg<|docstring|>์ด๋ฏธ์ง€์—์„œ ์ง์„ ์„ ์ฐพ์•„์„œ ์ดˆ๋ก์ƒ‰์œผ๋กœ ํ‘œ์‹œํ•œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param image_binary: ํ‘๋ฐฑ(Binary) OpenCV image (2 dimension) :return: ๋ผ์ธ์ด ์‚ญ์ œ๋œ ์ด๋ฏธ์ง€ (OpenCV image)<|endoftext|>
79d713b7e5f9982d33e02bffb1a7da265d4a9406c398b36a4c2a47ecf1733d8d
def get_step_compare_image(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ ์ „ ๋‹จ๊ณ„์˜ ์ค‘๊ฐ„ ๊ฒฐ๊ณผ๋ฌผ์„ ํ•˜๋‚˜๋กœ ๋ณ‘ํ•ฉํ•˜์—ฌ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) comparing_images = [] image_gray = pp.get_gray(image_origin) contours = pp.get_contours(image_gray) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gray, image_with_contours) comparing_images.append(compare_set) image_gradient = pp.get_gradient(image_gray) contours = pp.get_contours(image_gradient) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gradient, image_with_contours) comparing_images.append(compare_set) image_threshold = pp.get_threshold(image_gradient) contours = pp.get_contours(image_threshold) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_threshold, image_with_contours) comparing_images.append(compare_set) image_line_removed = pp.remove_long_line(image_threshold) contours = pp.get_contours(image_line_removed) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_line_removed, image_with_contours) comparing_images.append(compare_set) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_close, image_with_contours) comparing_images.append(compare_set) image_merged_all = np.hstack(comparing_images) return image_merged_all
์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ ์ „ ๋‹จ๊ณ„์˜ ์ค‘๊ฐ„ ๊ฒฐ๊ณผ๋ฌผ์„ ํ•˜๋‚˜๋กœ ๋ณ‘ํ•ฉํ•˜์—ฌ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param path_of_image: :return:
text_detect/src/test_pre_process.py
get_step_compare_image
vikira/concentration_project
0
python
def get_step_compare_image(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ ์ „ ๋‹จ๊ณ„์˜ ์ค‘๊ฐ„ ๊ฒฐ๊ณผ๋ฌผ์„ ํ•˜๋‚˜๋กœ ๋ณ‘ํ•ฉํ•˜์—ฌ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) comparing_images = [] image_gray = pp.get_gray(image_origin) contours = pp.get_contours(image_gray) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gray, image_with_contours) comparing_images.append(compare_set) image_gradient = pp.get_gradient(image_gray) contours = pp.get_contours(image_gradient) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gradient, image_with_contours) comparing_images.append(compare_set) image_threshold = pp.get_threshold(image_gradient) contours = pp.get_contours(image_threshold) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_threshold, image_with_contours) comparing_images.append(compare_set) image_line_removed = pp.remove_long_line(image_threshold) contours = pp.get_contours(image_line_removed) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_line_removed, image_with_contours) comparing_images.append(compare_set) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_close, image_with_contours) comparing_images.append(compare_set) image_merged_all = np.hstack(comparing_images) return image_merged_all
def get_step_compare_image(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ ์ „ ๋‹จ๊ณ„์˜ ์ค‘๊ฐ„ ๊ฒฐ๊ณผ๋ฌผ์„ ํ•˜๋‚˜๋กœ ๋ณ‘ํ•ฉํ•˜์—ฌ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) comparing_images = [] image_gray = pp.get_gray(image_origin) contours = pp.get_contours(image_gray) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gray, image_with_contours) comparing_images.append(compare_set) image_gradient = pp.get_gradient(image_gray) contours = pp.get_contours(image_gradient) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_gradient, image_with_contours) comparing_images.append(compare_set) image_threshold = pp.get_threshold(image_gradient) contours = pp.get_contours(image_threshold) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_threshold, image_with_contours) comparing_images.append(compare_set) image_line_removed = pp.remove_long_line(image_threshold) contours = pp.get_contours(image_line_removed) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_line_removed, image_with_contours) comparing_images.append(compare_set) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) compare_set = merge_vertical(image_close, image_with_contours) comparing_images.append(compare_set) image_merged_all = np.hstack(comparing_images) return image_merged_all<|docstring|>์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ ์ „ ๋‹จ๊ณ„์˜ ์ค‘๊ฐ„ ๊ฒฐ๊ณผ๋ฌผ์„ ํ•˜๋‚˜๋กœ ๋ณ‘ํ•ฉํ•˜์—ฌ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param path_of_image: :return:<|endoftext|>
a351a9aa7ec2b19a2b36438b108a850b1e983e78c6fd8d4bfe563ab976705c94
def get_image_with_contours(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ์„ ๊ฑฐ์นœ ํ›„,\n ์ตœ์ข…์ ์œผ๋กœ ์–ป์€ Contours ๋ฅผ ์›๋ณธ ์ด๋ฏธ์ง€ ์œ„์— ๊ทธ๋ ค์„œ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) image_gray = pp.get_gray(image_origin) image_gradient = pp.get_gradient(image_gray) image_threshold = pp.get_threshold(image_gradient) image_line_removed = pp.remove_long_line(image_threshold) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) return image_with_contours
์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ์„ ๊ฑฐ์นœ ํ›„, ์ตœ์ข…์ ์œผ๋กœ ์–ป์€ Contours ๋ฅผ ์›๋ณธ ์ด๋ฏธ์ง€ ์œ„์— ๊ทธ๋ ค์„œ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param path_of_image: :return:
text_detect/src/test_pre_process.py
get_image_with_contours
vikira/concentration_project
0
python
def get_image_with_contours(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ์„ ๊ฑฐ์นœ ํ›„,\n ์ตœ์ข…์ ์œผ๋กœ ์–ป์€ Contours ๋ฅผ ์›๋ณธ ์ด๋ฏธ์ง€ ์œ„์— ๊ทธ๋ ค์„œ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) image_gray = pp.get_gray(image_origin) image_gradient = pp.get_gradient(image_gray) image_threshold = pp.get_threshold(image_gradient) image_line_removed = pp.remove_long_line(image_threshold) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) return image_with_contours
def get_image_with_contours(path_of_image): ' ์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ์„ ๊ฑฐ์นœ ํ›„,\n ์ตœ์ข…์ ์œผ๋กœ ์–ป์€ Contours ๋ฅผ ์›๋ณธ ์ด๋ฏธ์ง€ ์œ„์— ๊ทธ๋ ค์„œ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n\n :param path_of_image:\n :return:\n ' image_origin = pp.open_original(path_of_image) image_origin = cv2.pyrUp(image_origin) image_gray = pp.get_gray(image_origin) image_gradient = pp.get_gradient(image_gray) image_threshold = pp.get_threshold(image_gradient) image_line_removed = pp.remove_long_line(image_threshold) image_close = pp.get_closing(image_line_removed) contours = pp.get_contours(image_close) image_with_contours = pp.draw_contour_rect(image_origin, contours) return image_with_contours<|docstring|>์ด๋ฏธ์ง€ ํ”„๋กœ์„ธ์‹ฑ์„ ๊ฑฐ์นœ ํ›„, ์ตœ์ข…์ ์œผ๋กœ ์–ป์€ Contours ๋ฅผ ์›๋ณธ ์ด๋ฏธ์ง€ ์œ„์— ๊ทธ๋ ค์„œ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. :param path_of_image: :return:<|endoftext|>
71d25937cb195d5fac403171b2ca440f224898812527cc92ec5c17601807b63d
def get_file_list(path): ' path ๊ฐ€ ๊ฐ€๋ฆฌํ‚ค๋Š” directory ์˜ ๋ชจ๋“  ํŒŒ์ผ๋ช…์„ ์ฝ์–ด์„œ string ์œผ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n ํŒŒ์ผ๋ช…์€ Absolute path ๊ฐ€ ํฌํ•จ๋œ ์ด๋ฆ„์ž…๋‹ˆ๋‹ค.\n\n :param path: ์ฝ์–ด ๋“ค์ผ directory ์˜ ์ ˆ๋Œ€๊ฒฝ๋กœ\n :return: directory ์˜ ๋ชจ๋“  file path ์„ String ํ˜•์œผ๋กœ Array ์— ๋‹ด์•„ ๋ฐ˜ํ™˜\n ' image_path_list = [] for (root, dirs, files) in os.walk(path): root_path = os.path.join(os.path.abspath(path), root) for file in files: file_path = os.path.join(root_path, file) image_path_list.append(file_path) return image_path_list
path ๊ฐ€ ๊ฐ€๋ฆฌํ‚ค๋Š” directory ์˜ ๋ชจ๋“  ํŒŒ์ผ๋ช…์„ ์ฝ์–ด์„œ string ์œผ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. ํŒŒ์ผ๋ช…์€ Absolute path ๊ฐ€ ํฌํ•จ๋œ ์ด๋ฆ„์ž…๋‹ˆ๋‹ค. :param path: ์ฝ์–ด ๋“ค์ผ directory ์˜ ์ ˆ๋Œ€๊ฒฝ๋กœ :return: directory ์˜ ๋ชจ๋“  file path ์„ String ํ˜•์œผ๋กœ Array ์— ๋‹ด์•„ ๋ฐ˜ํ™˜
text_detect/src/test_pre_process.py
get_file_list
vikira/concentration_project
0
python
def get_file_list(path): ' path ๊ฐ€ ๊ฐ€๋ฆฌํ‚ค๋Š” directory ์˜ ๋ชจ๋“  ํŒŒ์ผ๋ช…์„ ์ฝ์–ด์„œ string ์œผ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n ํŒŒ์ผ๋ช…์€ Absolute path ๊ฐ€ ํฌํ•จ๋œ ์ด๋ฆ„์ž…๋‹ˆ๋‹ค.\n\n :param path: ์ฝ์–ด ๋“ค์ผ directory ์˜ ์ ˆ๋Œ€๊ฒฝ๋กœ\n :return: directory ์˜ ๋ชจ๋“  file path ์„ String ํ˜•์œผ๋กœ Array ์— ๋‹ด์•„ ๋ฐ˜ํ™˜\n ' image_path_list = [] for (root, dirs, files) in os.walk(path): root_path = os.path.join(os.path.abspath(path), root) for file in files: file_path = os.path.join(root_path, file) image_path_list.append(file_path) return image_path_list
def get_file_list(path): ' path ๊ฐ€ ๊ฐ€๋ฆฌํ‚ค๋Š” directory ์˜ ๋ชจ๋“  ํŒŒ์ผ๋ช…์„ ์ฝ์–ด์„œ string ์œผ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.\n ํŒŒ์ผ๋ช…์€ Absolute path ๊ฐ€ ํฌํ•จ๋œ ์ด๋ฆ„์ž…๋‹ˆ๋‹ค.\n\n :param path: ์ฝ์–ด ๋“ค์ผ directory ์˜ ์ ˆ๋Œ€๊ฒฝ๋กœ\n :return: directory ์˜ ๋ชจ๋“  file path ์„ String ํ˜•์œผ๋กœ Array ์— ๋‹ด์•„ ๋ฐ˜ํ™˜\n ' image_path_list = [] for (root, dirs, files) in os.walk(path): root_path = os.path.join(os.path.abspath(path), root) for file in files: file_path = os.path.join(root_path, file) image_path_list.append(file_path) return image_path_list<|docstring|>path ๊ฐ€ ๊ฐ€๋ฆฌํ‚ค๋Š” directory ์˜ ๋ชจ๋“  ํŒŒ์ผ๋ช…์„ ์ฝ์–ด์„œ string ์œผ๋กœ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค. ํŒŒ์ผ๋ช…์€ Absolute path ๊ฐ€ ํฌํ•จ๋œ ์ด๋ฆ„์ž…๋‹ˆ๋‹ค. :param path: ์ฝ์–ด ๋“ค์ผ directory ์˜ ์ ˆ๋Œ€๊ฒฝ๋กœ :return: directory ์˜ ๋ชจ๋“  file path ์„ String ํ˜•์œผ๋กœ Array ์— ๋‹ด์•„ ๋ฐ˜ํ™˜<|endoftext|>
9d32d752c48ca9ad80d0e4ab629a63c7164ea9c987f5cf9f7fd7ecadada6f0f2
def __get_label(self, name: str, color_if_not_found: str, description_if_not_found: str) -> Label: "\n Gets a label given its name if it's exists or creates it of otherwise. None is returned if an unexpected exception is thrown.\n " try: return self.pyGithubRepo.get_label(name) except UnknownObjectException: try: self.__log.debug('Meterian pr label was not found, will be created') return self.pyGithubRepo.create_label(name, color_if_not_found, description_if_not_found) except GithubException: pass except GithubException: pass self.__log.debug('Unexpected error while creating label', exc_info=1) return None
Gets a label given its name if it's exists or creates it of otherwise. None is returned if an unexpected exception is thrown.
src/vcs/github/GithubRepo.py
__get_label
MeterianHQ/meterian-autofix-interpreter
0
python
def __get_label(self, name: str, color_if_not_found: str, description_if_not_found: str) -> Label: "\n \n " try: return self.pyGithubRepo.get_label(name) except UnknownObjectException: try: self.__log.debug('Meterian pr label was not found, will be created') return self.pyGithubRepo.create_label(name, color_if_not_found, description_if_not_found) except GithubException: pass except GithubException: pass self.__log.debug('Unexpected error while creating label', exc_info=1) return None
def __get_label(self, name: str, color_if_not_found: str, description_if_not_found: str) -> Label: "\n \n " try: return self.pyGithubRepo.get_label(name) except UnknownObjectException: try: self.__log.debug('Meterian pr label was not found, will be created') return self.pyGithubRepo.create_label(name, color_if_not_found, description_if_not_found) except GithubException: pass except GithubException: pass self.__log.debug('Unexpected error while creating label', exc_info=1) return None<|docstring|>Gets a label given its name if it's exists or creates it of otherwise. None is returned if an unexpected exception is thrown.<|endoftext|>
dbbeb347742aad3e14dcea8a2cae7a727b3cec132626cf5913b3d8cbd34f794d
def find_clustering_drivers(dp, clusters, driver_detector_filename, driver_power_filename, decision_line=0.01): '\n Select strong features. Output a strong feature report embracing\n features characterising each clusters and overall good features.\n Output the driver power counts.\n\n Parameters\n -----------\n dp: dataframe\n A dataframe with numerical columns.\n clusters: series\n The clustering results from get_cluster method.\n p_value_decision_line: float, default: 1e-2\n Under which two clusters are considered distinct.\n driver_detector_filename: string\n Filename storing features differentiating two groups.\n driver_power_filename: string\n Filename storing driver power counts.\n\n Outputs\n -------\n A good feature report.\n Driver power counts.\n Returns\n -------\n Returns the strong features.\n ' df = dp.raw_data.loc[(:, dp.cleaned_data.columns)] def get_cluster_id_pair(n_clusters): for l1 in range(n_clusters): for l2 in range((l1 + 1), n_clusters): (yield (l1, l2)) n_clusters = len(set(clusters)) driver_powers = pd.DataFrame(0, index=df.columns, columns=range(n_clusters)) driver_detectors = pd.DataFrame('', index=range(n_clusters), columns=range(n_clusters)) for (col_name, col) in df.iteritems(): for (cluster_id1, cluster_id2) in get_cluster_id_pair(n_clusters): c1 = col.loc[(clusters == cluster_id1).values] c2 = col.loc[(clusters == cluster_id2).values] p_value = f_oneway(c1.values, c2.values)[1] if (p_value < decision_line): driver_powers.loc[(col_name, [cluster_id1, cluster_id2])] += 1 driver_detectors.loc[(cluster_id1, cluster_id2)] = (((driver_detectors.loc[(cluster_id1, cluster_id2)] + ',') + col_name) if (driver_detectors.loc[(cluster_id1, cluster_id2)] != '') else col_name) driver_powers.to_csv(driver_power_filename) driver_detectors.to_csv(driver_detector_filename)
Select strong features. Output a strong feature report embracing features characterising each clusters and overall good features. Output the driver power counts. Parameters ----------- dp: dataframe A dataframe with numerical columns. clusters: series The clustering results from get_cluster method. p_value_decision_line: float, default: 1e-2 Under which two clusters are considered distinct. driver_detector_filename: string Filename storing features differentiating two groups. driver_power_filename: string Filename storing driver power counts. Outputs ------- A good feature report. Driver power counts. Returns ------- Returns the strong features.
clusteror/implementation_layer.py
find_clustering_drivers
enfeizhan/clusteror
0
python
def find_clustering_drivers(dp, clusters, driver_detector_filename, driver_power_filename, decision_line=0.01): '\n Select strong features. Output a strong feature report embracing\n features characterising each clusters and overall good features.\n Output the driver power counts.\n\n Parameters\n -----------\n dp: dataframe\n A dataframe with numerical columns.\n clusters: series\n The clustering results from get_cluster method.\n p_value_decision_line: float, default: 1e-2\n Under which two clusters are considered distinct.\n driver_detector_filename: string\n Filename storing features differentiating two groups.\n driver_power_filename: string\n Filename storing driver power counts.\n\n Outputs\n -------\n A good feature report.\n Driver power counts.\n Returns\n -------\n Returns the strong features.\n ' df = dp.raw_data.loc[(:, dp.cleaned_data.columns)] def get_cluster_id_pair(n_clusters): for l1 in range(n_clusters): for l2 in range((l1 + 1), n_clusters): (yield (l1, l2)) n_clusters = len(set(clusters)) driver_powers = pd.DataFrame(0, index=df.columns, columns=range(n_clusters)) driver_detectors = pd.DataFrame(, index=range(n_clusters), columns=range(n_clusters)) for (col_name, col) in df.iteritems(): for (cluster_id1, cluster_id2) in get_cluster_id_pair(n_clusters): c1 = col.loc[(clusters == cluster_id1).values] c2 = col.loc[(clusters == cluster_id2).values] p_value = f_oneway(c1.values, c2.values)[1] if (p_value < decision_line): driver_powers.loc[(col_name, [cluster_id1, cluster_id2])] += 1 driver_detectors.loc[(cluster_id1, cluster_id2)] = (((driver_detectors.loc[(cluster_id1, cluster_id2)] + ',') + col_name) if (driver_detectors.loc[(cluster_id1, cluster_id2)] != ) else col_name) driver_powers.to_csv(driver_power_filename) driver_detectors.to_csv(driver_detector_filename)
def find_clustering_drivers(dp, clusters, driver_detector_filename, driver_power_filename, decision_line=0.01): '\n Select strong features. Output a strong feature report embracing\n features characterising each clusters and overall good features.\n Output the driver power counts.\n\n Parameters\n -----------\n dp: dataframe\n A dataframe with numerical columns.\n clusters: series\n The clustering results from get_cluster method.\n p_value_decision_line: float, default: 1e-2\n Under which two clusters are considered distinct.\n driver_detector_filename: string\n Filename storing features differentiating two groups.\n driver_power_filename: string\n Filename storing driver power counts.\n\n Outputs\n -------\n A good feature report.\n Driver power counts.\n Returns\n -------\n Returns the strong features.\n ' df = dp.raw_data.loc[(:, dp.cleaned_data.columns)] def get_cluster_id_pair(n_clusters): for l1 in range(n_clusters): for l2 in range((l1 + 1), n_clusters): (yield (l1, l2)) n_clusters = len(set(clusters)) driver_powers = pd.DataFrame(0, index=df.columns, columns=range(n_clusters)) driver_detectors = pd.DataFrame(, index=range(n_clusters), columns=range(n_clusters)) for (col_name, col) in df.iteritems(): for (cluster_id1, cluster_id2) in get_cluster_id_pair(n_clusters): c1 = col.loc[(clusters == cluster_id1).values] c2 = col.loc[(clusters == cluster_id2).values] p_value = f_oneway(c1.values, c2.values)[1] if (p_value < decision_line): driver_powers.loc[(col_name, [cluster_id1, cluster_id2])] += 1 driver_detectors.loc[(cluster_id1, cluster_id2)] = (((driver_detectors.loc[(cluster_id1, cluster_id2)] + ',') + col_name) if (driver_detectors.loc[(cluster_id1, cluster_id2)] != ) else col_name) driver_powers.to_csv(driver_power_filename) driver_detectors.to_csv(driver_detector_filename)<|docstring|>Select strong features. Output a strong feature report embracing features characterising each clusters and overall good features. Output the driver power counts. Parameters ----------- dp: dataframe A dataframe with numerical columns. clusters: series The clustering results from get_cluster method. p_value_decision_line: float, default: 1e-2 Under which two clusters are considered distinct. driver_detector_filename: string Filename storing features differentiating two groups. driver_power_filename: string Filename storing driver power counts. Outputs ------- A good feature report. Driver power counts. Returns ------- Returns the strong features.<|endoftext|>
a02bae7aed35b73d4cc73cdeb517d061e66f4b45517c18d74149e8a48188d9d6
def analyse_categorical_variables(dp, cat_features, labels, chi_decision_line, flagged_to_output='flagged_categorical_variables.csv'): "\n Analyse the categorical variables. Chi-square test detects variables\n distinct from expected.\n\n Parameters\n ----------\n df: dataframe\n Dataframe of only categorical variables.\n n_rows: int\n Number of rows, i.e., number of clients.\n chi_decision_line: float, default: 5e-2\n The decision line below which a variable is considered as important.\n flagged_to_output: object, default: 'flagged_categorical_variables.csv'\n The filename for output csv file storing difference over\n the expectation ratio.\n\n Returns and outputs\n -------------------\n Output a csv file storing the (observed-expected)/expected ratio\n for variables that are flagged as important.\n " df = dp.raw_data.loc[(:, cat_features)] n_rows = df.shape[0] n_clusters = len(set(labels.values)) cluster_sizes = labels.value_counts() cluster_labels = cluster_sizes.index.tolist() info = input(('Reports on categorical variables will be lost by proceeding!' + ' Would you like to proceed ([y]/n)?')) confirm_proceed(info, 'No progress. Original file kept.') if os.path.exists(flagged_to_output): os.remove(flagged_to_output) for (col_ind, (col_name, col)) in enumerate(df.iteritems()): value_frac = col.value_counts() values = value_frac.index n_values = len(values) value_frac = (value_frac.values / n_rows) value_frac = value_frac.reshape((n_values, 1)) expected = np.dot(value_frac, cluster_sizes.values.reshape((1, n_clusters))) observed = pd.DataFrame(0, index=values, columns=cluster_labels) for cluster_id in cluster_labels: observed.loc[(:, cluster_id)].update(col.loc[(labels == cluster_id).values].value_counts()) index = pd.MultiIndex.from_product([[col_name], values]) ratio_over_exp = pd.DataFrame(np.nan, index=index, columns=cluster_labels) chi_p_vals = chisquare(observed.values, expected)[1] for (cluster_ind, p_val) in enumerate(chi_p_vals): if (p_val < chi_decision_line): ratio = (((observed.iloc[(:, cluster_ind)] - expected[(:, cluster_ind)]) / (expected[(:, cluster_ind)] + 1e-05)) * 100) ratio_over_exp.iloc[(:, cluster_ind)] = ratio.values is_scarce = ((observed.iloc[(:, cluster_ind)] < 50) & (observed.iloc[(:, cluster_ind)] < (cluster_sizes.iloc[cluster_ind] * 0.2))) ratio_over_exp.iloc[(is_scarce.values, cluster_ind)] = np.nan header = (col_ind == 0) ratio_over_exp.to_csv(flagged_to_output, mode='a', header=header, float_format='%.2f%%')
Analyse the categorical variables. Chi-square test detects variables distinct from expected. Parameters ---------- df: dataframe Dataframe of only categorical variables. n_rows: int Number of rows, i.e., number of clients. chi_decision_line: float, default: 5e-2 The decision line below which a variable is considered as important. flagged_to_output: object, default: 'flagged_categorical_variables.csv' The filename for output csv file storing difference over the expectation ratio. Returns and outputs ------------------- Output a csv file storing the (observed-expected)/expected ratio for variables that are flagged as important.
clusteror/implementation_layer.py
analyse_categorical_variables
enfeizhan/clusteror
0
python
def analyse_categorical_variables(dp, cat_features, labels, chi_decision_line, flagged_to_output='flagged_categorical_variables.csv'): "\n Analyse the categorical variables. Chi-square test detects variables\n distinct from expected.\n\n Parameters\n ----------\n df: dataframe\n Dataframe of only categorical variables.\n n_rows: int\n Number of rows, i.e., number of clients.\n chi_decision_line: float, default: 5e-2\n The decision line below which a variable is considered as important.\n flagged_to_output: object, default: 'flagged_categorical_variables.csv'\n The filename for output csv file storing difference over\n the expectation ratio.\n\n Returns and outputs\n -------------------\n Output a csv file storing the (observed-expected)/expected ratio\n for variables that are flagged as important.\n " df = dp.raw_data.loc[(:, cat_features)] n_rows = df.shape[0] n_clusters = len(set(labels.values)) cluster_sizes = labels.value_counts() cluster_labels = cluster_sizes.index.tolist() info = input(('Reports on categorical variables will be lost by proceeding!' + ' Would you like to proceed ([y]/n)?')) confirm_proceed(info, 'No progress. Original file kept.') if os.path.exists(flagged_to_output): os.remove(flagged_to_output) for (col_ind, (col_name, col)) in enumerate(df.iteritems()): value_frac = col.value_counts() values = value_frac.index n_values = len(values) value_frac = (value_frac.values / n_rows) value_frac = value_frac.reshape((n_values, 1)) expected = np.dot(value_frac, cluster_sizes.values.reshape((1, n_clusters))) observed = pd.DataFrame(0, index=values, columns=cluster_labels) for cluster_id in cluster_labels: observed.loc[(:, cluster_id)].update(col.loc[(labels == cluster_id).values].value_counts()) index = pd.MultiIndex.from_product([[col_name], values]) ratio_over_exp = pd.DataFrame(np.nan, index=index, columns=cluster_labels) chi_p_vals = chisquare(observed.values, expected)[1] for (cluster_ind, p_val) in enumerate(chi_p_vals): if (p_val < chi_decision_line): ratio = (((observed.iloc[(:, cluster_ind)] - expected[(:, cluster_ind)]) / (expected[(:, cluster_ind)] + 1e-05)) * 100) ratio_over_exp.iloc[(:, cluster_ind)] = ratio.values is_scarce = ((observed.iloc[(:, cluster_ind)] < 50) & (observed.iloc[(:, cluster_ind)] < (cluster_sizes.iloc[cluster_ind] * 0.2))) ratio_over_exp.iloc[(is_scarce.values, cluster_ind)] = np.nan header = (col_ind == 0) ratio_over_exp.to_csv(flagged_to_output, mode='a', header=header, float_format='%.2f%%')
def analyse_categorical_variables(dp, cat_features, labels, chi_decision_line, flagged_to_output='flagged_categorical_variables.csv'): "\n Analyse the categorical variables. Chi-square test detects variables\n distinct from expected.\n\n Parameters\n ----------\n df: dataframe\n Dataframe of only categorical variables.\n n_rows: int\n Number of rows, i.e., number of clients.\n chi_decision_line: float, default: 5e-2\n The decision line below which a variable is considered as important.\n flagged_to_output: object, default: 'flagged_categorical_variables.csv'\n The filename for output csv file storing difference over\n the expectation ratio.\n\n Returns and outputs\n -------------------\n Output a csv file storing the (observed-expected)/expected ratio\n for variables that are flagged as important.\n " df = dp.raw_data.loc[(:, cat_features)] n_rows = df.shape[0] n_clusters = len(set(labels.values)) cluster_sizes = labels.value_counts() cluster_labels = cluster_sizes.index.tolist() info = input(('Reports on categorical variables will be lost by proceeding!' + ' Would you like to proceed ([y]/n)?')) confirm_proceed(info, 'No progress. Original file kept.') if os.path.exists(flagged_to_output): os.remove(flagged_to_output) for (col_ind, (col_name, col)) in enumerate(df.iteritems()): value_frac = col.value_counts() values = value_frac.index n_values = len(values) value_frac = (value_frac.values / n_rows) value_frac = value_frac.reshape((n_values, 1)) expected = np.dot(value_frac, cluster_sizes.values.reshape((1, n_clusters))) observed = pd.DataFrame(0, index=values, columns=cluster_labels) for cluster_id in cluster_labels: observed.loc[(:, cluster_id)].update(col.loc[(labels == cluster_id).values].value_counts()) index = pd.MultiIndex.from_product([[col_name], values]) ratio_over_exp = pd.DataFrame(np.nan, index=index, columns=cluster_labels) chi_p_vals = chisquare(observed.values, expected)[1] for (cluster_ind, p_val) in enumerate(chi_p_vals): if (p_val < chi_decision_line): ratio = (((observed.iloc[(:, cluster_ind)] - expected[(:, cluster_ind)]) / (expected[(:, cluster_ind)] + 1e-05)) * 100) ratio_over_exp.iloc[(:, cluster_ind)] = ratio.values is_scarce = ((observed.iloc[(:, cluster_ind)] < 50) & (observed.iloc[(:, cluster_ind)] < (cluster_sizes.iloc[cluster_ind] * 0.2))) ratio_over_exp.iloc[(is_scarce.values, cluster_ind)] = np.nan header = (col_ind == 0) ratio_over_exp.to_csv(flagged_to_output, mode='a', header=header, float_format='%.2f%%')<|docstring|>Analyse the categorical variables. Chi-square test detects variables distinct from expected. Parameters ---------- df: dataframe Dataframe of only categorical variables. n_rows: int Number of rows, i.e., number of clients. chi_decision_line: float, default: 5e-2 The decision line below which a variable is considered as important. flagged_to_output: object, default: 'flagged_categorical_variables.csv' The filename for output csv file storing difference over the expectation ratio. Returns and outputs ------------------- Output a csv file storing the (observed-expected)/expected ratio for variables that are flagged as important.<|endoftext|>