function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def machineName(self) -> str: return self._machine_name
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def updatableMachinesModel(self) -> UpdatableMachinesModel: return cast(UpdatableMachinesModel, self._updatable_machines_model)
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def qualityType(self) -> str: return self._quality_type
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def numSettingsOverridenByQualityChanges(self) -> int: return self._num_settings_overridden_by_quality_changes
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def qualityName(self) -> str: return self._quality_name
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def intentName(self) -> str: return self._intent_name
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def activeMode(self) -> str: return self._active_mode
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def hasVisibleSettingsField(self) -> bool: return self._has_visible_settings_field
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def totalNumberOfSettings(self) -> int: general_definition_containers = ContainerRegistry.getInstance().findDefinitionContainers(id = "fdmprinter") if not general_definition_containers: return 0 return len(general_definition_containers[0].getAllKeys())
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def numVisibleSettings(self) -> int: return self._num_visible_settings
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def machineConflict(self) -> bool: return self._has_machine_conflict
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def qualityChangesConflict(self) -> bool: return self._has_quality_changes_conflict
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def materialConflict(self) -> bool: return self._has_material_conflict
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def setResolveStrategy(self, key: str, strategy: Optional[str]) -> None: if key in self._result: self._result[key] = strategy
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def setMachineToOverride(self, machine_name: str) -> None: self._override_machine = machine_name
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def closeBackend(self) -> None: """Close the backend: otherwise one could end up with "Slicing...""" Application.getInstance().getBackend().close()
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def setMachineConflict(self, machine_conflict: bool) -> None: if self._has_machine_conflict != machine_conflict: self._has_machine_conflict = machine_conflict self.machineConflictChanged.emit()
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def getResult(self) -> Dict[str, Optional[str]]: if "machine" in self._result and self.updatableMachinesModel.count <= 1: self._result["machine"] = None if "quality_changes" in self._result and not self._has_quality_changes_conflict: self._result["quality_changes"] = None if "material" in self._result and not self._has_material_conflict: self._result["material"] = None # If the machine needs to be re-created, the definition_changes should also be re-created. # If the machine strategy is None, it means that there is no name conflict with existing ones. In this case # new definitions changes are created if "machine" in self._result: if self._result["machine"] == "new" or self._result["machine"] is None and self._result["definition_changes"] is None: self._result["definition_changes"] = "new" return self._result
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def show(self) -> None: # Emit signal so the right thread actually shows the view. if threading.current_thread() != threading.main_thread(): self._lock.acquire() # Reset the result self._result = {"machine": self._default_strategy, "quality_changes": self._default_strategy, "definition_changes": self._default_strategy, "material": self._default_strategy} self._visible = True self.showDialogSignal.emit()
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def notifyClosed(self) -> None: """Used to notify the dialog so the lock can be released.""" self._result = {} # The result should be cleared before hide, because after it is released the main thread lock self._visible = False try: self._lock.release() except: pass
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def _onVisibilityChanged(self, visible: bool) -> None: if not visible: try: self._lock.release() except: pass
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def onOkButtonClicked(self) -> None: self._view.hide() self.hide()
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def onCancelButtonClicked(self) -> None: self._result = {} self._view.hide() self.hide()
Ultimaker/Cura
[ 4656, 1806, 4656, 2468, 1402923331 ]
def as_unit(v, axis=1): """Return array of unit vectors parallel to vectors in `v`. Parameters ---------- v : ndarray of float axis : int, optional Axis along which to normalize length. Returns ------- ndarray of float : Unit vector of `v`, i.e. `v` divided by its magnitude along `axis`. """ u = np.array(v, dtype=np.float64, copy=True) if u.ndim == 1: sqmag = u.dot(u) if sqmag >= EPS: u /= sqmag ** 0.5 else: if axis == 1: sqmag = np.einsum("...ij,...ij->...i", u, u) else: sqmag = np.einsum("...ij,...ij->...j", u, u) sqmag[sqmag < EPS] = 1.0 u /= np.expand_dims(np.sqrt(sqmag), axis) return u
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def make_transform_matrix(center, y=None, z=None): """Make 4x4 homogenous transformation matrix. Given Nx4 array A where A[:, 4] = 1., the transform matrix M should be used with dot(M, A.T).T. Order of operations is 1. translation, 2. align `y` x `z` plane to yz-plane 3. align `y` to y-axis. Parameters ---------- center : 1x3 array of float Coordinate that should be centered after transformation. y : None or 1x3 array of float Vector that should lie on the y-axis after transformation z : None or 1x3 array of float Vector that after transformation should lie on yz-plane in direction of z-axis. Returns ------- 4x4 array of float 4x4 homogenous transformation matrix. """ translate = np.identity(4, dtype=np.float64) translate[:3, 3] = -np.asarray(center, dtype=np.float64) if y is not None: y = np.atleast_2d(y) if z is None: rotate = np.identity(4, dtype=np.float64) rotate[:3, :3] = make_rotation_matrix(y, Y_AXIS) else: z = np.atleast_2d(z) rotate_norm = np.identity(4, dtype=np.float64) x_unit = as_unit(np.cross(y, z)) rotate_norm[:3, :3] = make_rotation_matrix(x_unit, X_AXIS) new_y = np.dot(rotate_norm[:3, :3], y.flatten()) rotate_y = np.identity(4, dtype=np.float64) rotate_y[:3, :3] = make_rotation_matrix(new_y.flatten(), Y_AXIS) rotate = np.dot(rotate_y, rotate_norm) transform = np.dot(rotate, translate) else: transform = translate return transform
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def transform_array(transform_matrix, a): """Pad an array with 1s, transform, and return with original dimensions. Parameters ---------- transform_matrix : 4x4 array of float 4x4 homogenous transformation matrix a : Nx3 array of float Array of 3-D coordinates. Returns ------- Nx3 array of float : Transformed array """ return unpad_array(np.dot(transform_matrix, pad_array(a).T).T)
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def unpad_array(a, axis=1): """Return `a` with row removed along `axis`. Parameters ---------- a : ndarray Array from which to remove row axis : int, optional Axis from which to remove row Returns ------- ndarray Unpadded array. """ if a.ndim == 1: return a[:-1] else: shape = list(a.shape) shape[axis] -= 1 return a[: shape[0], : shape[1]]
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def calculate_angles(vec_arr, ref, ref_norm=None): """Calculate angles between vectors in `vec_arr` and `ref` vector. If `ref_norm` is not provided, angle ranges between 0 and pi. If it is provided, angle ranges between 0 and 2pi. Note that if `ref_norm` is orthogonal to `vec_arr` and `ref`, then the angle is rotation around the axis, but if a non-orthogonal axis is provided, this may not be the case. Parameters ---------- vec_arr : Nx3 array of float Array of N 3D vectors. ref : 1x3 array of float Reference vector ref_norm : 1x3 array of float Normal vector. Returns ------- 1-D array Array of N angles """ unit_vec_arr = as_unit(vec_arr) unit_ref = as_unit(ref).flatten() ang = np.arccos(np.clip(np.dot(unit_vec_arr, unit_ref), -1.0, 1.0)) # handle cases where a vector is the origin ang[np.all(unit_vec_arr == np.zeros(3), axis=1)] = 0.0 if ref_norm is not None: sign = np.sign( np.dot(ref_norm, np.cross(unit_vec_arr, unit_ref).T) ).flatten() sign[sign == 0] = 1 ang = rotate_angles(sign * ang, 2 * np.pi) return ang
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def quaternion_to_transform_matrix(quaternion, translation=np.zeros(3)): """Convert quaternion to homogenous 4x4 transform matrix. Parameters ---------- quaternion : 4x1 array of float Quaternion describing rotation after translation. translation : 3x1 array of float, optional Translation to be performed before rotation. """ q = np.array(quaternion, dtype=np.float64, copy=True) n = np.linalg.norm(q) if n < 1e-12: return np.identity(4, dtype=np.float64) q /= n q = 2 * np.outer(q, q) # fmt: off transform_mat = np.array( [[1.-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.], [ q[1, 2]+q[3, 0], 1.-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.], [ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.-q[1, 1]-q[2, 2], 0.], [ 0., 0., 0., 1.]], dtype=np.float64 ) # fmt: on transform_mat[:3, 3] = translation return transform_mat
keiserlab/e3fp
[ 102, 29, 102, 13, 1442343078 ]
def render_nested(self, node): node['been'] = u'here'
ndparker/tdi
[ 8, 2, 8, 2, 1381778054 ]
def test_start_and_stop_one(self): context = SmContext(SmApplication(self.config_dir_override), None, False, False) result = actions.start_one(context, "TEST_ONE", False, True, False, None, port=None) self.assertTrue(result) self.waitForCondition((lambda: len(context.get_service("TEST_ONE").status())), 1) context.kill("TEST_ONE", True) self.assertEqual(context.get_service("TEST_ONE").status(), [])
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_dropwizard_from_source(self): sm_application = SmApplication(self.config_dir_override) context = SmContext(sm_application, None, False, False) service_resolver = ServiceResolver(sm_application) servicetostart = "DROPWIZARD_NEXUS_END_TO_END_TEST" actions.start_and_wait( service_resolver, context, [servicetostart], False, False, False, None, port=None, seconds_to_wait=90, append_args=None, ) self.assertIsNotNone(context.get_service(servicetostart).status()) context.kill(servicetostart, True) self.assertEqual(context.get_service(servicetostart).status(), [])
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_play_from_source(self): sm_application = SmApplication(self.config_dir_override) context = SmContext(sm_application, None, False, False) service_resolver = ServiceResolver(sm_application) servicetostart = "PLAY_NEXUS_END_TO_END_TEST" port = None secondsToWait = 90 append_args = None actions.start_and_wait( service_resolver, context, [servicetostart], True, False, False, None, port, secondsToWait, append_args, ) self.assertIsNotNone(context.get_service(servicetostart).status()) context.kill(servicetostart, True) self.assertEqual(context.get_service(servicetostart).status(), [])
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_play_from_source_default(self): sm_application = SmApplication(self.config_dir_override) context = SmContext(sm_application, None, False, False) service_resolver = ServiceResolver(sm_application) servicetostart = "PLAY_NEXUS_END_TO_END_TEST" port = None secondsToWait = 90 append_args = None actions.start_and_wait( service_resolver, context, [servicetostart], False, False, False, None, port, secondsToWait, append_args, ) self.assertIsNotNone(context.get_service(servicetostart).status()) context.kill(servicetostart, True) self.assertEqual(context.get_service(servicetostart).status(), [])
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_successful_play_default_run_from_jar_without_waiting(self): sm_application = SmApplication(self.config_dir_override) context = SmContext(sm_application, None, False, False) service_resolver = ServiceResolver(sm_application) context.kill_everything(True) self.startFakeNexus() source = False fatJar = True release = False proxy = None port = None seconds_to_wait = None append_args = None try: servicetostart = ["PLAY_NEXUS_END_TO_END_DEFAULT_JAR_TEST"] actions.start_and_wait( service_resolver, context, servicetostart, source, fatJar, release, proxy, port, seconds_to_wait, append_args, ) finally: context.kill_everything(True)
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_failing_play_from_jar(self): sm_application = SmApplication(self.config_dir_override) context = SmContext(sm_application, None, False, False) service_resolver = ServiceResolver(sm_application) context.kill_everything(True) self.startFakeNexus() try: servicetostart = ["BROKEN_PLAY_PROJECT"] actions.start_and_wait( service_resolver, context, servicetostart, source=False, fatjar=True, release=False, proxy=None, port=None, seconds_to_wait=2, append_args=None, ) self.fail("Did not expect the project to startup.") except ServiceManagerException as sme: self.assertEqual("Timed out starting service(s): BROKEN_PLAY_PROJECT", sme.args[0]) finally: context.kill_everything(True)
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def test_assets_server(self): context = SmContext(SmApplication(self.config_dir_override), None, False, False) context.kill_everything(True) self.startFakeArtifactory() actions.start_one( context, "PYTHON_SIMPLE_SERVER_ASSETS_FRONTEND", False, True, False, None, port=None, ) self.assertIsNotNone(context.get_service("PYTHON_SIMPLE_SERVER_ASSETS_FRONTEND").status()) context.kill("PYTHON_SIMPLE_SERVER_ASSETS_FRONTEND", wait=True) self.assertEqual(context.get_service("PYTHON_SIMPLE_SERVER_ASSETS_FRONTEND").status(), [])
hmrc/service-manager
[ 56, 37, 56, 10, 1403617754 ]
def set_filename_version(filename, version_number): with open(filename, 'w+') as f: f.write("version = '{}'\n".format(version_number))
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def rm(filename): info('Delete {}'.format(filename)) rmtree(filename, ignore_errors=True)
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def fail(message, *args): print('Error:', message % args, file=sys.stderr) sys.exit(1)
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def git_is_clean(): return Popen(['git', 'diff', '--quiet']).wait() == 0
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def make_git_tag(tag): info('Tagging "%s"', tag) Popen(['git', 'tag', tag]).wait()
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def version_lst_to_str(v): return '.'.join(str(n) for n in v)
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def get_current_version(): with open('eralchemy/version.py') as f: lines = f.readlines() namespace = {} exec(lines[0], namespace) return version_str_to_lst(namespace['version'])
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def get_next_version(major, minor, fix, current_version): if major: return [current_version[0] + 1, 0, 0] if minor: return [current_version[0], current_version[1] + 1, 0] if fix: return [current_version[0], current_version[1], current_version[2] + 1] raise UserWarning()
Alexis-benoist/eralchemy
[ 1000, 114, 1000, 47, 1430341643 ]
def solve_cg(v: GridVariableVector, q0: GridVariable, rtol: float = 1e-6, atol: float = 1e-6, maxiter: Optional[int] = None) -> GridArray: """Conjugate gradient solve for the pressure such that continuity is enforced. Returns a pressure correction `q` such that `div(v - grad(q)) == 0`. The relationship between `q` and our actual pressure estimate is given by `p = q * density / dt`. Args: v: the velocity field. q0: an initial value, or "guess" for the pressure correction. A common choice is the correction from the previous time step. Also specifies the boundary conditions on `q`. rtol: relative tolerance for convergence. atol: absolute tolerance for convergence. maxiter: optional int, the maximum number of iterations to perform. Returns: A pressure correction `q` such that `div(v - grad(q))` is zero. """ # TODO(jamieas): add functionality for non-uniform density. rhs = fd.divergence(v) def laplacian_with_bcs(array: GridArray) -> GridArray: variable = grids.GridVariable(array, q0.bc) return fd.laplacian(variable) q, _ = jax.scipy.sparse.linalg.cg( laplacian_with_bcs, rhs, x0=q0.array, tol=rtol, atol=atol, maxiter=maxiter) return q
google/jax-cfd
[ 434, 58, 434, 34, 1616435172 ]
def projection( v: GridVariableVector, solve: Callable = solve_fast_diag,
google/jax-cfd
[ 434, 58, 434, 34, 1616435172 ]
def __virtual__(): """ NAPALM library must be installed for this module to work and run in a (proxy) minion. """ return salt.utils.napalm.virtual(__opts__, __virtualname__, __file__)
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def _update_config( template_name, template_source=None, template_hash=None, template_hash_name=None, template_user="root", template_group="root", template_mode="755", template_attrs="--------------e----", saltenv=None, template_engine="jinja", skip_verify=False, defaults=None, test=False, commit=True, debug=False, replace=False, **template_vars
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def replace_pattern( name, pattern, repl, count=0, flags=8, bufsize=1, append_if_not_found=False, prepend_if_not_found=False, not_found_content=None, search_only=False, show_changes=True, backslash_literal=False, source="running", path=None, test=False, replace=True, debug=False, commit=True,
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def saved( name, source="running", user=None, group=None, mode=None, attrs=None, makedirs=False, dir_mode=None, replace=True, backup="", show_changes=True, create=True, tmp_dir="", tmp_ext="", encoding=None, encoding_errors="strict", allow_empty=False, follow_symlinks=True, check_cmd=None, win_owner=None, win_perms=None, win_deny_perms=None, win_inheritance=True, win_perms_reset=False, **kwargs
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def managed( name, template_name=None, template_source=None, template_hash=None, template_hash_name=None, saltenv="base", template_engine="jinja", skip_verify=False, context=None, defaults=None, test=False, commit=True, debug=False, replace=False, commit_in=None, commit_at=None, revert_in=None, revert_at=None, **template_vars
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def commit_cancelled(name): """ .. versionadded:: 2019.2.0 Cancel a commit scheduled to be executed via the ``commit_in`` and ``commit_at`` arguments from the :py:func:`net.load_template <salt.modules.napalm_network.load_template>` or :py:func:`net.load_config <salt.modules.napalm_network.load_config>` execution functions. The commit ID is displayed when the commit is scheduled via the functions named above. State SLS Example: .. code-block:: yaml '20180726083540640360': netconfig.commit_cancelled """ cancelled = {"name": name, "result": None, "changes": {}, "comment": ""} if __opts__["test"]: cancelled["comment"] = "It would cancel commit #{}".format(name) return cancelled ret = __salt__["net.cancel_commit"](name) cancelled.update(ret) return cancelled
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def __init__(self, min_clusters=None, max_clusters=None, refinement_options=None, autotune=None, laplacian_type=None, stop_eigenvalue=1e-2, row_wise_renorm=False, custom_dist="cosine", max_iter=300, constraint_options=None, eigengap_type=EigenGapType.Ratio, affinity_function=utils.compute_affinity_matrix, post_eigen_cluster_function=custom_distance_kmeans.run_kmeans): """Constructor of the clusterer. Args: min_clusters: minimal number of clusters allowed (only effective if not None) max_clusters: maximal number of clusters allowed (only effective if not None), can be used together with min_clusters to fix the number of clusters refinement_options: a RefinementOptions object that contains refinement arguments for the affinity matrix. If None, we will not refine autotune: an AutoTune object to automatically search p_percentile laplacian_type: a LaplacianType. If None, we do not use a laplacian matrix stop_eigenvalue: when computing the number of clusters using Eigen Gap, we do not look at eigen values smaller than this value row_wise_renorm: if True, perform row-wise re-normalization on the spectral embeddings custom_dist: str or callable. custom distance measure for k-means. If a string, "cosine", "euclidean", "mahalanobis", or any other distance functions defined in scipy.spatial.distance can be used max_iter: the maximum number of iterations for the custom k-means constraint_options: a ConstraintOptions object that contains constraint arguments eigengap_type: the type of the eigengap computation affinity_function: a function to compute the affinity matrix from the embeddings. This defaults to (cos(x,y)+1)/2 post_eigen_cluster_function: a function to cluster the spectral embeddings after the eigenvalue computations. This function must have the same signature as custom_distance_kmeans.run_kmeans """ self.min_clusters = min_clusters self.max_clusters = max_clusters if not refinement_options: self.refinement_options = refinement.RefinementOptions() else: self.refinement_options = refinement_options self.autotune = autotune self.laplacian_type = laplacian_type self.row_wise_renorm = row_wise_renorm self.stop_eigenvalue = stop_eigenvalue self.custom_dist = custom_dist self.max_iter = max_iter self.constraint_options = constraint_options self.eigengap_type = eigengap_type self.affinity_function = affinity_function self.post_eigen_cluster_function = post_eigen_cluster_function
wq2012/SpectralCluster
[ 400, 66, 400, 1, 1547834748 ]
def progress(index, size, for_what='当前进度', step=10): block_size = int(size / step) if index % block_size == 0: crt = int(index / block_size) print('%s ==> [%d / %d]' % (for_what, crt, step))
DannyLee1991/article_cosine_similarity
[ 8, 1, 8, 2, 1504659733 ]
def _log_time(func): # func() def wrapper(*args, **kwargs): print("start") start_time = time.time() result = func() if len(args) == len(kwargs) == 0 else func(*args, **kwargs) end_time = time.time() cost_time = end_time - start_time print("[%s] cost time -> %s" % (func.__name__, cost_time)) return result return wrapper
DannyLee1991/article_cosine_similarity
[ 8, 1, 8, 2, 1504659733 ]
def line(log_str, style='-'): print(style * 12 + str(log_str) + style * 12)
DannyLee1991/article_cosine_similarity
[ 8, 1, 8, 2, 1504659733 ]
def setUp(self): super(TenderLotAwardCheckResourceTest, self).setUp() self.app.authorization = ('Basic', ('auction', '')) response = self.app.get('/tenders/{}/auction'.format(self.tender_id)) auction_bids_data = response.json['data']['bids'] for lot_id in self.initial_lots: response = self.app.post_json('/tenders/{}/auction/{}'.format(self.tender_id, lot_id['id']), {'data': {'bids': auction_bids_data}}) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, 'application/json') response = self.app.get('/tenders/{}'.format(self.tender_id)) self.assertEqual(response.json['data']['status'], "active.qualification")
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(TenderAwardComplaintResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(TenderLotAwardComplaintResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) bid = self.initial_bids[0] response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(TenderAwardComplaintDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth # Create complaint for award self.bid_token = self.initial_bids_tokens.values()[0] response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format( self.tender_id, self.award_id, self.bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization}}) complaint = response.json['data'] self.complaint_id = complaint['id'] self.complaint_owner_token = response.json['access']['token']
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(Tender2LotAwardComplaintDocumentResourceTest, self).setUp() # Create award bid = self.initial_bids[0] auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth # Create complaint for award bid_token = self.initial_bids_tokens.values()[0] response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format( self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization}}) complaint = response.json['data'] self.complaint_id = complaint['id'] self.complaint_owner_token = response.json['access']['token']
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(TenderAwardDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def setUp(self): super(Tender2LotAwardDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) bid = self.initial_bids[0] response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(Tender2LotAwardComplaintDocumentResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardComplaintResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardDocumentResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardComplaintDocumentResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardComplaintResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardDocumentResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardResourceTest)) suite.addTest(unittest.makeSuite(TenderLotAwardResourceTest)) return suite
openprocurement/openprocurement.tender.belowthreshold
[ 2, 13, 2, 29, 1487334011 ]
def __init__(self): super().__init__()
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def connect(self): pass
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def disconnect(self): pass
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def __enter__(self): return self.connect()
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def __init__(self, core_management_host, core_management_port, svc=None): try: if svc: self.service = svc else: self.connect(core_management_host, core_management_port) self.base_url = '{}:{}'.format(self.service._address, self.service._port) self.management_api_url = '{}:{}'.format(self.service._address, self.service._management_port) except Exception: raise InvalidServiceInstance
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def base_url(self): return self.__base_url
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def base_url(self, url): self.__base_url = url
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def service(self): return self.__service
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def service(self, svc): if not isinstance(svc, ServiceRecord): w_msg = 'Storage should be a valid FogLAMP micro-service instance' _LOGGER.warning(w_msg) raise InvalidServiceInstance if not getattr(svc, "_type") == "Storage": w_msg = 'Storage should be a valid *Storage* micro-service instance' _LOGGER.warning(w_msg) raise InvalidServiceInstance self.__service = svc
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def connect(self, core_management_host, core_management_port): svc = self._get_storage_service(host=core_management_host, port=core_management_port) if len(svc) == 0: raise InvalidServiceInstance self.service = ServiceRecord(s_id=svc["id"], s_name=svc["name"], s_type=svc["type"], s_port=svc["service_port"], m_port=svc["management_port"], s_address=svc["address"], s_protocol=svc["protocol"]) return self
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def __init__(self, core_mgt_host, core_mgt_port, svc=None): super().__init__(core_management_host=core_mgt_host, core_management_port=core_mgt_port, svc=svc) self.__class__._base_url = self.base_url
foglamp/FogLAMP
[ 68, 41, 68, 7, 1492706127 ]
def createDataSet(): dataSet = [[1, 1, 'yes'], [1, 1, 'yes'], [1, 0, 'no'], [0, 1, 'no'], [0, 1, 'no']] labels = ['no surfacing','flippers'] #change to discrete values return dataSet, labels
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def calcShannonEnt(dataSet): numEntries = len(dataSet) labelCounts = {} for featVec in dataSet: #the the number of unique elements and their occurance currentLabel = featVec[-1] if currentLabel not in labelCounts.keys(): labelCounts[currentLabel] = 0 labelCounts[currentLabel] += 1 shannonEnt = 0.0 for key in labelCounts: prob = float(labelCounts[key])/numEntries shannonEnt -= prob * log(prob,2) #log base 2 return shannonEnt
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def splitDataSet(dataSet, axis, value): retDataSet = [] for featVec in dataSet: if featVec[axis] == value: reducedFeatVec = featVec[:axis] #chop out axis used for splitting reducedFeatVec.extend(featVec[axis+1:]) retDataSet.append(reducedFeatVec) return retDataSet
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def chooseBestFeatureToSplit(dataSet): numFeatures = len(dataSet[0]) - 1 #the last column is used for the labels baseEntropy = calcShannonEnt(dataSet) bestInfoGain = 0.0; bestFeature = -1 for i in range(numFeatures): #iterate over all the features featList = [example[i] for example in dataSet]#create a list of all the examples of this feature uniqueVals = set(featList) #get a set of unique values newEntropy = 0.0 for value in uniqueVals: subDataSet = splitDataSet(dataSet, i, value) prob = len(subDataSet)/float(len(dataSet)) newEntropy += prob * calcShannonEnt(subDataSet) infoGain = baseEntropy - newEntropy #calculate the info gain; ie reduction in entropy if (infoGain > bestInfoGain): #compare this to the best gain so far bestInfoGain = infoGain #if better than current best, set to best bestFeature = i return bestFeature #returns an integer
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def majorityCnt(classList): classCount={} for vote in classList: if vote not in classCount.keys(): classCount[vote] = 0 classCount[vote] += 1 sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True) return sortedClassCount[0][0]
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def createTree(dataSet,labels): classList = [example[-1] for example in dataSet] if classList.count(classList[0]) == len(classList): return classList[0]#stop splitting when all of the classes are equal if len(dataSet[0]) == 1: #stop splitting when there are no more features in dataSet return majorityCnt(classList) bestFeat = chooseBestFeatureToSplit(dataSet) bestFeatLabel = labels[bestFeat] myTree = {bestFeatLabel:{}} del(labels[bestFeat]) featValues = [example[bestFeat] for example in dataSet] uniqueVals = set(featValues) for value in uniqueVals: subLabels = labels[:] #copy all of labels, so trees don't mess up existing labels myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet, bestFeat, value),subLabels) return myTree
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def classify(inputTree,featLabels,testVec): firstStr = inputTree.keys()[0] secondDict = inputTree[firstStr] featIndex = featLabels.index(firstStr) key = testVec[featIndex] valueOfFeat = secondDict[key] if isinstance(valueOfFeat, dict): classLabel = classify(valueOfFeat, featLabels, testVec) else: classLabel = valueOfFeat return classLabel
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def storeTree(inputTree,filename): import pickle fw = open(filename,'w') pickle.dump(inputTree,fw) fw.close()
onehao/opensource
[ 1, 1, 1, 1, 1414656394 ]
def Params(cls): p = super().Params() p.Define( 'draw_visualizations', False, 'Boolean for whether to draw ' 'visualizations. This is independent of laser_sampling_rate.') p.ap_metric = waymo_ap_metric.WaymoAPMetrics.Params( waymo_metadata.WaymoMetadata()) p.Define( 'extra_ap_metrics', {}, 'Dictionary of extra AP metrics to run in the decoder. The key' 'is the name of the metric and the value is a sub-class of ' 'APMetric') p.Define( 'save_residuals', False, 'If True, this expects the residuals and ground-truth to be available ' 'in the decoder output dictionary, and it will save it to the decoder ' 'output file. See decode_include_residuals in PointDetectorBase ' 'for details.') return p
tensorflow/lingvo
[ 2689, 429, 2689, 115, 1532471428 ]
def ProcessOutputs(self, input_batch, model_outputs): """Produce additional decoder outputs for WaymoOpenDataset. Args: input_batch: A .NestedMap of the inputs to the model. model_outputs: A .NestedMap of the outputs of the model, including:: - per_class_predicted_bboxes: [batch, num_classes, num_boxes, 7] float Tensor with per class 3D (7 DOF) bounding boxes. - per_class_predicted_bbox_scores: [batch, num_classes, num_boxes] float Tensor with per class, per box scores. - per_class_valid_mask: [batch, num_classes, num_boxes] masking Tensor indicating which boxes were still kept after NMS for each class. Returns: A NestedMap of additional decoder outputs needed for PostProcessDecodeOut. """ del model_outputs p = self.params input_labels = input_batch.labels input_metadata = input_batch.metadata source_ids = tf.strings.join([ input_metadata.run_segment, tf.as_string(input_metadata.run_start_offset) ], separator='_') ret = py_utils.NestedMap({ 'num_points_in_bboxes': input_batch.labels.bboxes_3d_num_points, # Ground truth. 'bboxes_3d': input_labels.bboxes_3d, 'bboxes_3d_mask': input_labels.bboxes_3d_mask, 'labels': input_labels.labels, 'label_ids': input_labels.label_ids, 'speed': input_labels.speed, 'acceleration': input_labels.acceleration, # Fill the following in. 'source_ids': source_ids, 'difficulties': input_labels.single_frame_detection_difficulties, 'unfiltered_bboxes_3d_mask': input_labels.unfiltered_bboxes_3d_mask, 'run_segment': input_metadata.run_segment, 'run_start_offset': input_metadata.run_start_offset, 'pose': input_metadata.pose, }) if p.draw_visualizations: laser_sample = self._SampleLaserForVisualization( input_batch.lasers.points_xyz, input_batch.lasers.points_padding) ret.update(laser_sample) return ret
tensorflow/lingvo
[ 2689, 429, 2689, 115, 1532471428 ]
def raises_exception(exc_class, msg=None): def decorator(func): def wrapper(self, *args, **kwargs): try: func(self, *args, **kwargs) self.fail("expected exception %s wasn't raised" % exc_class.__name__) except exc_class as e: if not e.args: self.assertEqual(msg, None) elif msg is not None: self.assertEqual(e.args[0], msg, "incorrect exception message. expected '%s', got '%s'" % (msg, e.args[0])) wrapper.__name__ = func.__name__ return wrapper return decorator
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def raises_if(test, cond, exc_class, exc_msg=None): try: yield except exc_class as e: test.assertTrue(cond) if exc_msg is None: pass elif exc_msg.startswith('...') and exc_msg != '...': if exc_msg.endswith('...'): test.assertIn(exc_msg[3:-3], str(e)) else: test.assertTrue(str(e).endswith(exc_msg[3:])) elif exc_msg.endswith('...'): test.assertTrue(str(e).startswith(exc_msg[:-3])) else: test.assertEqual(str(e), exc_msg) else: test.assertFalse(cond)
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def flatten(x): result = [] for el in x: if hasattr(el, "__iter__") and not isinstance(el, basestring): result.extend(flatten(el)) else: result.append(el) return result
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def __init__(con, database): con.database = database if database and database.provider_name == 'postgres': con.autocommit = True
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def rollback(con): pass
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def __init__(cursor): cursor.description = [] cursor.rowcount = 0
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def fetchone(cursor): return None
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def fetchall(cursor): return []
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def __init__(pool, database): pool.database = database
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def release(pool, con): pass
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def disconnect(pool): pass
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def bind(self, provider_name, *args, **kwargs): if self.real_provider_name is not None: provider_name = self.real_provider_name self.provider_name = provider_name provider_module = import_module('pony.orm.dbproviders.' + provider_name) provider_cls = provider_module.provider_cls raw_server_version = self.raw_server_version
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def inspect_connection(provider, connection): pass
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]
def _execute(database, sql, globals, locals, frame_depth): assert False # pragma: no cover
Ahmad31/Web_Flask_Cassandra
[ 1, 1, 1, 3, 1492520361 ]