function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_jointree_marginals3(self):
ft = FactorTree.create_jointree(self.bn)
resFactor = ft.marginals(["sprinkler"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([0.42, 0.58])) | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_marginals_trivial_evidence(self):
ft = FactorTree.create_jointree(self.bn)
ft.set_evidence({"slippery_road":"true"})
resFactor = ft.marginals(["slippery_road"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([1.0, 0.0])) | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_evidence_trivial(self):
ft = FactorTree.create_jointree(self.bn)
ft.set_evidence({"wet_grass": "false"})
resFactor = ft.marginals(["rain"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([0.158858, 0.841142])) | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_marginal_evidence_trivial_multiple_evidence(self):
ft = FactorTree.create_jointree(self.bn)
ft.set_evidence({"sprinkler": "true", "rain": "false"})
resFactor = ft.marginals(["wet_grass"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([0.1, 0.9... | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_marginal_evidence(self):
ft = FactorTree.create_jointree(self.bn)
ft.set_evidence({"winter": "true"})
resFactor = ft.marginals(["wet_grass"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([0.668, 0.332])) | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_marginal_evidence_multiple_evidence(self):
ft = FactorTree.create_jointree(self.bn)
ft.set_evidence( {"winter": "true", "rain": "false"})
resFactor = ft.marginals(["wet_grass"])
np.testing.assert_array_almost_equal(resFactor.get_potential(), np.array([0.02, 0.98])) | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def test_jointree_marginal_soft_evidence(self):
bn = BayesianNetwork()
cloth = DiscreteNode("cloth", ["green","blue", "red"])
sold = DiscreteNode("sold") | SocialCognitiveSystems/PRIMO | [
4,
3,
4,
10,
1486413504
] |
def __init__(self, get_response=None):
self.get_response = get_response
super(MiddlewareMixin, self).__init__() | rdegges/django-sslify | [
338,
46,
338,
10,
1335654320
] |
def _single_peak(values, relative_cutoff, minval, invalidate_distance):
"""Takes a single peak if it is high enough compared to all other peaks.
Args:
values: 1D tensor of values to take the peaks on.
relative_cutoff: The fraction of the highest peak which all other peaks
should be below.
minval:... | tensorflow/moonlight | [
311,
68,
311,
25,
1523981102
] |
def do_filter_peaks():
"""Process the peaks if they are non-empty.
Returns:
The filtered peaks. Peaks below the cutoff when compared to the highest
peak are removed. If the peaks are invalid, then an empty list is
returned.
"""
histogram_size = tf.shape(staffline... | tensorflow/moonlight | [
311,
68,
311,
25,
1523981102
] |
def _estimate_staffline_thickness(columns, values, lengths, staffline_distance):
"""Estimates the staffline thickness of a music score.
Args:
columns: 1D array. The column indices of each consecutive vertical run.
values: 1D array. The value (0 or 1) of each vertical run.
lengths: 1D array. The length ... | tensorflow/moonlight | [
311,
68,
311,
25,
1523981102
] |
def save(self, *args, **kwargs):
"""Perform descriptor validation and save object."""
if self.descriptor_schema:
try:
validate_schema(self.descriptor, self.descriptor_schema.schema)
self.descriptor_dirty = False
except DirtyError:
s... | genialis/resolwe | [
34,
27,
34,
7,
1428595640
] |
def duplicate(self, contributor):
"""Duplicate (make a copy) ``Collection`` objects."""
return bulk_duplicate(collections=self, contributor=contributor) | genialis/resolwe | [
34,
27,
34,
7,
1428595640
] |
def is_duplicate(self):
"""Return True if collection is a duplicate."""
return bool(self.duplicated) | genialis/resolwe | [
34,
27,
34,
7,
1428595640
] |
def __virtual__():
return True | saltstack/salt | [
13089,
5388,
13089,
3074,
1298233016
] |
def process_document_splitter_sample(
project_id: str, location: str, processor_id: str, file_path: str | googleapis/python-documentai | [
71,
29,
71,
3,
1575936569
] |
def page_refs_to_string(page_refs: dict) -> str:
""" Converts a page ref to a string describing the page or page range."""
if len(page_refs) == 1:
num = str(int(page_refs[0].page) + 1)
return f"page {num} is"
else:
start = str(int(page_refs[0].page) + 1)
end = str(int(page_re... | googleapis/python-documentai | [
71,
29,
71,
3,
1575936569
] |
def sample_complete_trial():
# Create a client
client = aiplatform_v1.VizierServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.CompleteTrialRequest(
name="name_value",
)
# Make the request
response = client.complete_trial(request=request)
# Handle the res... | googleapis/python-aiplatform | [
306,
205,
306,
52,
1600875819
] |
def requires_submit(func):
"""
Decorator to ensure that a submit has been performed before
calling the method.
Args:
func (callable): test function to be decorated.
Returns:
callable: the decorated function.
"""
@functools.wraps(func)
def _wrapper(self, *args, **kwargs)... | QISKit/qiskit-sdk-py | [
3515,
1875,
3515,
1061,
1488560562
] |
def __init__(self, backend, job_id, fn, qobj):
super().__init__(backend, job_id)
self._fn = fn
self._qobj = qobj
self._future = None | QISKit/qiskit-sdk-py | [
3515,
1875,
3515,
1061,
1488560562
] |
def result(self, timeout=None):
# pylint: disable=arguments-differ
"""Get job result. The behavior is the same as the underlying
concurrent Future objects,
https://docs.python.org/3/library/concurrent.futures.html#future-objects
Args:
timeout (float): number of seco... | QISKit/qiskit-sdk-py | [
3515,
1875,
3515,
1061,
1488560562
] |
def cancel(self):
return self._future.cancel() | QISKit/qiskit-sdk-py | [
3515,
1875,
3515,
1061,
1488560562
] |
def status(self):
"""Gets the status of the job by querying the Python's future
Returns:
qiskit.providers.JobStatus: The current JobStatus
Raises:
JobError: If the future is in unexpected state
concurrent.futures.TimeoutError: if timeout occurred.
""... | QISKit/qiskit-sdk-py | [
3515,
1875,
3515,
1061,
1488560562
] |
def on_init(self,
prefix='sqlite',
id=None,
db=None,
**kwargs):
"""
Adds processing to initialization
:param prefix: the main keyword for configuration of this space
:type prefix: str
:param id: the unique identifi... | bernard357/shellbot | [
10,
3,
10,
4,
1491172109
] |
def get_db(self):
"""
Gets a handle on the database
"""
db = self.context.get(self.prefix+'.db', 'store.db')
return sqlite3.connect(db) | bernard357/shellbot | [
10,
3,
10,
4,
1491172109
] |
def _set(self, key, value, handle=None):
"""
Sets a permanent value
:param key: name of the value
:type key: str
:param value: actual value
:type value: any serializable type is accepted
:param handle: an optional instance of a Sqlite database
:type han... | bernard357/shellbot | [
10,
3,
10,
4,
1491172109
] |
def show_metrics(step_value, metric_values, loss_value=None):
print('{}: {}nominal accuracy = {:.2f}%, '
'verified = {:.2f}%, attack = {:.2f}%'.format(
step_value,
'loss = {}, '.format(loss_value) if loss_value is not None else '',
metric_values.nominal_accuracy * 100.,
... | deepmind/interval-bound-propagation | [
135,
31,
135,
2,
1542969398
] |
def main(unused_args):
logging.info('Training IBP on %s...', FLAGS.dataset.upper())
step = tf.train.get_or_create_global_step()
# Learning rate.
learning_rate = ibp.parse_learning_rate(step, FLAGS.learning_rate)
# Dataset.
input_bounds = (0., 1.)
num_classes = 10
if FLAGS.dataset == 'mnist':
data_... | deepmind/interval-bound-propagation | [
135,
31,
135,
2,
1542969398
] |
def _module_dir(handle):
"""Returns the directory where to cache the module."""
cache_dir = resolver.tfhub_cache_dir(use_temp=True)
return resolver.create_local_module_dir(
cache_dir,
hashlib.sha1(handle.encode("utf8")).hexdigest()) | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def is_supported(self, handle):
# HTTP(S) handles are assumed to point to tarfiles.
if not self.is_http_protocol(handle):
return False
# AUTO defaults to COMPRESSED
load_format = resolver.model_load_format()
return load_format in [
resolver.ModelLoadFormat.COMPRESSED.value,
res... | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def download(handle, tmp_dir):
"""Fetch a module via HTTP(S), handling redirect and download headers."""
request = urllib.request.Request(
self._append_compressed_format_query(handle))
response = self._call_urlopen(request)
return resolver.DownloadManager(handle).download_and_uncompres... | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def _lock_file_timeout_sec(self):
# This method is provided as a convenience to simplify testing.
return LOCK_FILE_TIMEOUT_SEC | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def is_supported(self, handle):
return handle.startswith("gs://") and _is_tarfile(handle) | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def download(handle, tmp_dir):
return resolver.DownloadManager(handle).download_and_uncompress(
tf.compat.v1.gfile.GFile(handle, "rb"), tmp_dir) | tensorflow/hub | [
3285,
1698,
3285,
3,
1520841342
] |
def test_kdelta():
assert np.isclose(kdelta(1, 1), 1.)
assert np.isclose(kdelta(0, 1), 0.) | quantumlib/ReCirq | [
232,
110,
232,
34,
1584057093
] |
def test_energy_from_opdm():
"""Build test assuming sampling functions work"""
rhf_objective, molecule, parameters, obi, tbi = make_h6_1_3()
unitary, energy, _ = rhf_func_generator(rhf_objective)
parameters = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
initial_opdm = np.diag([1] * 3 + ... | quantumlib/ReCirq | [
232,
110,
232,
34,
1584057093
] |
def test_mcweeny():
np.random.seed(82)
opdm = np.array([[
0.766034130, -0.27166330, -0.30936072, -0.08471057, -0.04878244,
-0.01285432
],
[
-0.27166330, 0.67657015, -0.37519640, -0.02101843,
-0.03568214, -0.05034585
... | quantumlib/ReCirq | [
232,
110,
232,
34,
1584057093
] |
def GetNotebook():
"""Downloads the ipynb source of Colab notebook"""
notebook = google_message.blocking_request(
"get_ipynb", request="", timeout_sec=120)["ipynb"]
return notebook | google/prog-edu-assistant | [
25,
20,
25,
19,
1550714338
] |
def _accept(random_sample: float, cost_diff: float, temp: float) -> Tuple[bool, float]:
"""Calculates probability and draws if solution should be accepted.
Based on exp(-Delta*E/T) formula.
Args:
random_sample: Uniformly distributed random number in the range [0, 1).
cost_diff: Cost differ... | quantumlib/Cirq | [
3678,
836,
3678,
314,
1513294909
] |
def get_integration_folder():
"""
returns the integration test folder
"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) | cloudfoundry-community/splunk-firehose-nozzle | [
28,
30,
28,
14,
1467750745
] |
def get_all_affiliations(request):
"""
return a dictionary of affiliation indicators.
The first class affiliations:
["all_employee"]: employee or clinician (include student employee)
["employee"]: True if is current employee (not student employee, clinician)
["clinician"]: True if in uw affilia... | uw-it-aca/myuw | [
13,
6,
13,
3,
1417029795
] |
def epsg_3857_from_proj4():
"""
Return a gdal spatial reference object with
3857 crs using the ImportFromProj4 method.
"""
spatial_ref = SpatialReference()
spatial_ref.ImportFromProj4('+init=epsg:3857')
return spatial_ref | ecometrica/gdal2mbtiles | [
134,
24,
134,
12,
1353362021
] |
def epsg_3857_from_epsg():
"""
Return a gdal spatial reference object with
3857 crs using the FromEPSG method.
"""
spatial_ref = SpatialReference.FromEPSG(EPSG_WEB_MERCATOR)
return spatial_ref | ecometrica/gdal2mbtiles | [
134,
24,
134,
12,
1353362021
] |
def __init__(self):
super(JoinsInRoomResource, self).__init__()
self.last_cleared = datetime.utcnow()
self.request = request | thenetcircle/dino | [
139,
6,
139,
11,
1475559609
] |
def do_get_with_params(self, room_id: str = None, room_name: str = None):
return self._do_get(room_id, room_name) | thenetcircle/dino | [
139,
6,
139,
11,
1475559609
] |
def do_get(self):
is_valid, msg, json = self.validate_json(self.request, silent=False)
if not is_valid:
logger.error('invalid json: %s' % msg)
return dict()
logger.debug('GET request: %s' % str(json))
if 'room_ids' not in json and 'room_names' not in json:
... | thenetcircle/dino | [
139,
6,
139,
11,
1475559609
] |
def _get_last_cleared(self):
return self.last_cleared | thenetcircle/dino | [
139,
6,
139,
11,
1475559609
] |
def _start(self, *args, **kwargs):
self._proc = mock.Mock()
self._proc.stdin = None
self._proc.stdout = None
self._proc.stderr = None | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def setUp(self):
self.loop = self.new_test_loop()
self.set_event_loop(self.loop) | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_proc_exited(self):
waiter = asyncio.Future(loop=self.loop)
transport, protocol = self.create_transport(waiter)
transport._process_exited(6)
self.loop.run_until_complete(waiter)
self.assertEqual(transport.get_returncode(), 6)
self.assertTrue(protocol.connection_... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_stdin_stdout(self):
args = PROGRAM_CAT
@asyncio.coroutine
def run(data):
proc = yield From(asyncio.create_subprocess_exec(
*args,
stdin=subprocess.PIPE,
stdout=sub... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def run(data):
proc = yield From(asyncio.create_subprocess_exec(
*args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
loop=self.loop))
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_shell(self):
create = asyncio.create_subprocess_shell('exit 7',
loop=self.loop)
proc = self.loop.run_until_complete(create)
exitcode = self.loop.run_until_complete(proc.wait())
self.assertEqual(exitcode, 7) | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_start_new_session(self):
def start_new_session():
os.setsid()
# start the new process in a new session
create = asyncio.create_subprocess_shell('exit 8',
preexec_fn=start_new_session,
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_terminate(self):
args = PROGRAM_BLOCKED
create = asyncio.create_subprocess_exec(*args, loop=self.loop)
proc = self.loop.run_until_complete(create)
proc.terminate()
returncode = self.loop.run_until_complete(proc.wait())
if sys.platform == 'win32':
self... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_send_signal(self):
code = '; '.join((
'import sys, time',
'print("sleeping")',
'sys.stdout.flush()',
'time.sleep(3600)'))
args = [sys.executable, '-c', code]
create = asyncio.create_subprocess_exec(*args,
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_stdin_broken_pipe(self):
proc, large_data = self.prepare_broken_pipe_test()
@asyncio.coroutine
def write_stdin(proc, data):
proc.stdin.write(data)
yield From(proc.stdin.drain())
coro = write_stdin(proc, large_data)
# drain() must raise BrokenPip... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_pause_reading(self):
limit = 10
size = (limit * 2 + 1)
@asyncio.coroutine
def test_pause_reading():
code = '\n'.join((
'import sys',
'sys.stdout.write("x" * %s)' % size,
'sys.stdout.flush()',
))
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def len_message(message):
code = 'import sys; data = sys.stdin.read(); print(len(data))'
proc = yield From(asyncio.create_subprocess_exec(
sys.executable, '-c', code,
stdin=asyncio.subprocess.PIPE,
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_cancel_process_wait(self):
# Issue #23140: cancel Process.wait()
@asyncio.coroutine
def cancel_wait():
proc = yield From(asyncio.create_subprocess_exec(
*PROGRAM_BLOCKED,
loop=self.loop))
... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def cancel_make_transport():
coro = asyncio.create_subprocess_exec(*PROGRAM_BLOCKED,
loop=self.loop)
task = self.loop.create_task(coro)
self.loop.call_soon(task.cancel)
try:
yield From(task)
ex... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_cancel_post_init(self):
@asyncio.coroutine
def cancel_make_transport():
coro = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
*PROGRAM_BLOCKED)
task = self.loop.create_task(coro)
self.loop.call_soon(task.ca... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def kill_running():
create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
*PROGRAM_BLOCKED)
transport, protocol = yield From(create)
non_local = {'kill_called': False}
def kill():
non_local['kill_cal... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def test_close_dont_kill_finished(self):
@asyncio.coroutine
def kill_running():
create = self.loop.subprocess_exec(asyncio.SubprocessProtocol,
*PROGRAM_BLOCKED)
transport, protocol = yield From(create)
proc = transport.ge... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def setUp(self):
policy = asyncio.get_event_loop_policy()
self.loop = policy.new_event_loop()
self.set_event_loop(self.loop)
watcher = self.Watcher()
watcher.attach_loop(self.loop)
policy.set_child_watcher(watcher)
self.addCleanup(poli... | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def setUp(self):
self.loop = asyncio.ProactorEventLoop()
self.set_event_loop(self.loop) | haypo/trollius | [
186,
27,
186,
6,
1429023346
] |
def testSourceManifest(self):
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# random files we have around in archive...
r.addArchive('asdf.tar.gz', dir='/', package='asdf')
r.addSource('sourcefile', dir='/var/test/') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testISOArchiveJoliet(self):
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('jcd.iso', dir='/')
r.SetModes('%(bindir)s/touch', 0755) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testISOArchiveRockRidge(self):
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('rrcd.iso', dir='/')
r.SetModes('%(bindir)s/touch', 0755) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTest1(self):
"""
Test build.source
"""
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# test unpacking and extracting from an RPM
r.addArchive('tmpwatch-2.9.0.tar.gz', rpm='tmpwatch-2.9.0-2.src.rpm')
# test unpacking and extracting from a src bz2 RPM
r.addSource('mkinitrd.spec', rpm='rpm-with-bzip-5.0.29-1.src.rpm')
# test unpacking and extracting ... | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def access_xz(*args):
if args[0].endswith('/unlzma'):
return False
return realExists(*args) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestUnlzma(self):
"""
Test build.source
"""
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# test unpacking and extracting from an lzma RPM if xz not available
r.addArchive('gnome-main-menu-0.9.10-26.x86_64.rpm', dir='/')
del r.NonMultilibDirectories | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def access_lzma(*args):
if args[0].endswith('/xz'):
return False
return realExists(*args) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestMissinglzma(self):
"""
Test build.source
"""
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# test unpacking and extracting from an lzma RPM if xz not available
r.addArchive('gnome-main-menu-0.9.10-26.x86_64.rpm', dir='/') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def access_nolzma(*args):
if args[0].split(os.sep)[-1] in ('xz', 'unlzma'):
return False
return realExists(*args) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testUnpackOldRpm30(self):
# CNY-3210
# Use a very old version of rpm, that does not have PAYLOADCOMPRESSOR
# set
# Downloaded from
# http://ftpsearch.kreonet.re.kr/pub/tools/utils/rpm/rpm/dist/rpm-3.0.x/
destdir = os.path.join(self.workDir, 'dest')
util.mkdirC... | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourcePerms(self):
recipestr = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# test not preserving world-writeable permissions in builddir
r.addArchive('worldwriteable.tar.bz2', dir='test-1')
r.Install('worldwriteable', '/ww/notworldwriteable')
# test preserving world-writeable permissions in root proxy
r.addArchive('worldwriteable.tar.bz2',... | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestSRPMCache(self):
"""
Test SRPM lookaside handling (CNY-771)
"""
recipe1 = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.macros.release = '1'
r.macros.srpm = '%(name)s-%(version)s-%(release)s.src.rpm'
r.addSource('bar', rpm='%(srpm)s')
r.addSource('baz', rpm='%(srpm)s') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.macros.release = '2'
r.macros.srpm = '%(name)s-%(version)s-%(release)s.src.rpm'
r.addSource('bar', rpm='%(srpm)s')
r.addSource('baz', rpm='%(srpm)s') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestSigCheck(self):
"""
Test signatures
"""
# XXX use smaller bz2 file than distcc
recipestr1 = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('distcc-2.9.tar.bz2', keyid='A0B3E88B')
r.addArchive('tmpwatch-2.9.0.tar.gz', rpm='tmpwatch-2.9.0-2.src.rpm', keyid='sdds', dir='new-subdir') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def mockedDownloadPublicKey(slf):
if slf.keyid == 'A0B3E88B':
f = file(os.path.join(resources.get_archive(), '0xA0B3E88B.pgp'))
return openpgpfile.parseAsciiArmorKey(f)
raise source.SourceError("Failed to retrieve PGP key %s" % slf.keyid) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('distcc-2.9.tar.bz2', keyid='BADBAD') | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestSigCheckFailedDownload(self):
"""
Test a download failure for the key
"""
recipestr1 = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('distcc-2.9.tar.bz2', keyid='A0B3E88B')
r.Create("/usr/foo", contents="Bar!!!\\n") | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def mockedDoDownloadPublicKey(slf, keyServer, lc = listcounter):
lc.append(None)
if len(lc) < 7:
raise transport.TransportError("Blah!")
f = file(os.path.join(resources.get_archive(), '0xA0B3E88B.pgp'))
data = openpgpfile.parseAsciiArmorKey(f)
... | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testDontCheckKeyOfCommitedSource(self):
# We choose not to check the public key for sources already committed,
# instead relying on the check only at the time of commit.
recipestr1 = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
r.addArchive('distcc-2.9.tar.bz2', keyid='A0B3E88B')
r.Create("/usr/foo", contents="Bar!!!\\n") | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def _checkSignature(self, file):
listcounter.append(None)
return | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def testSourceTestApplyMacros(self):
"""
Test applymacros
"""
recipestr1 = """ | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def cleanup(r, builddir, destdir):
pass | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
def setup(r):
# avoid cleanup
r.addArchive('tmpwatch-2.9.0.tar.gz', rpm='tmpwatch-2.9.0-2.src.rpm')
# test applying a patch
r.macros.bugid = 'BUGID'
r.addPatch('tmpwatch.fakebug.patch', macros=True) | sassoftware/conary | [
47,
9,
47,
4,
1396904066
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.