text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def set_pre_handler(self, handler):
'''set pre handler'''
with self._lock:
if self._handler_ctx is not None:
return self._handler_ctx.set_pre_handler(handler)
return RET_ERROR |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def msearch(self, m, query, fields=None, limit=None, or_=True):
'''
set limit make search faster
'''
ix = self._index(m)
if fields is None:
fields = ix.fields
group = OrGroup if or_ else AndGroup
parser = MultifieldParser(fields, ix.schema, group=group)
return ix.search(parser.parse(query), limit=limit) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def update(self, **kwargs):
"Update document not update index."
kw = dict(index=self.name, doc_type=self.doc_type, ignore=[404])
kw.update(**kwargs)
return self._client.update(**kw) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def diagonalize_collision_matrix(collision_matrices, i_sigma=None, i_temp=None, pinv_solver=0, log_level=0):
"""Diagonalize collision matrices. Note ---- collision_matricies is overwritten by eigenvectors. Parameters collision_matricies : ndarray, optional Collision matrix. This ndarray has to have the following size and flags. shapes: (sigmas, temperatures, prod(mesh), num_band, prod(mesh), num_band) (sigmas, temperatures, ir_grid_points, num_band, 3, ir_grid_points, num_band, 3) (size, size) dtype='double', order='C' i_sigma : int, optional Index of BZ integration methods, tetrahedron method and smearing method with widths. Default is None. i_temp : int, optional Index of temperature. Default is None. pinv_solver : int, optional Diagnalization solver choice. log_level : int, optional Verbosity level. Smaller is more quiet. Default is 0. Returns ------- w : ndarray, optional Eigenvalues. shape=(size_of_collision_matrix,), dtype='double' """ |
start = time.time()
# Matrix size of collision matrix to be diagonalized.
# The following value is expected:
# ir-colmat: num_ir_grid_points * num_band * 3
# red-colmat: num_mesh_points * num_band
shape = collision_matrices.shape
if len(shape) == 6:
size = shape[2] * shape[3]
assert size == shape[4] * shape[5]
elif len(shape) == 8:
size = np.prod(shape[2:5])
assert size == np.prod(shape[5:8])
elif len(shape) == 2:
size = shape[0]
assert size == shape[1]
solver = _select_solver(pinv_solver)
# [1] dsyev: safer and slower than dsyevd and smallest memory usage
# [2] dsyevd: faster than dsyev and largest memory usage
if solver in [1, 2]:
if log_level:
routine = ['dsyev', 'dsyevd'][solver - 1]
sys.stdout.write("Diagonalizing by lapacke %s... " % routine)
sys.stdout.flush()
import phono3py._phono3py as phono3c
w = np.zeros(size, dtype='double')
if i_sigma is None:
_i_sigma = 0
else:
_i_sigma = i_sigma
if i_temp is None:
_i_temp = 0
else:
_i_temp = i_temp
phono3c.diagonalize_collision_matrix(collision_matrices,
w,
_i_sigma,
_i_temp,
0.0,
(solver + 1) % 2,
0) # only diagonalization
elif solver == 3: # np.linalg.eigh depends on dsyevd.
if log_level:
sys.stdout.write("Diagonalizing by np.linalg.eigh... ")
sys.stdout.flush()
col_mat = collision_matrices[i_sigma, i_temp].reshape(
size, size)
w, col_mat[:] = np.linalg.eigh(col_mat)
elif solver == 4: # fully scipy dsyev
if log_level:
sys.stdout.write("Diagonalizing by "
"scipy.linalg.lapack.dsyev... ")
sys.stdout.flush()
import scipy.linalg
col_mat = collision_matrices[i_sigma, i_temp].reshape(
size, size)
w, _, info = scipy.linalg.lapack.dsyev(col_mat.T, overwrite_a=1)
elif solver == 5: # fully scipy dsyevd
if log_level:
sys.stdout.write("Diagonalizing by "
"scipy.linalg.lapack.dsyevd... ")
sys.stdout.flush()
import scipy.linalg
col_mat = collision_matrices[i_sigma, i_temp].reshape(
size, size)
w, _, info = scipy.linalg.lapack.dsyevd(col_mat.T, overwrite_a=1)
if log_level:
print("[%.3fs]" % (time.time() - start))
sys.stdout.flush()
return w |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_weights(self):
"""Returns weights used for collision matrix and |X> and |f> self._rot_grid_points : ndarray shape=(ir_grid_points, point_operations), dtype='uintp' r_gps : grid points of arms of k-star with duplicates len(r_gps) == order of crystallographic point group len(unique(r_gps)) == number of arms of the k-star Returns ------- weights : list sqrt(g_k)/|g|, where g is the crystallographic point group and g_k is the number of arms of k-star. """ |
weights = []
n = float(self._rot_grid_points.shape[1])
for r_gps in self._rot_grid_points:
weights.append(np.sqrt(len(np.unique(r_gps)) / n))
return weights |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_I(self, a, b, size, plus_transpose=True):
"""Return I matrix in Chaput's PRL paper. None is returned if I is zero matrix. """ |
r_sum = np.zeros((3, 3), dtype='double', order='C')
for r in self._rotations_cartesian:
for i in range(3):
for j in range(3):
r_sum[i, j] += r[a, i] * r[b, j]
if plus_transpose:
r_sum += r_sum.T
# Return None not to consume computer for diagonalization
if (np.abs(r_sum) < 1e-10).all():
return None
# Same as np.kron(np.eye(size), r_sum), but writen as below
# to be sure the values in memory C-congiguous with 'double'.
I_mat = np.zeros((3 * size, 3 * size), dtype='double', order='C')
for i in range(size):
I_mat[(i * 3):((i + 1) * 3), (i * 3):((i + 1) * 3)] = r_sum
return I_mat |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_mode_kappa_Chaput(self, i_sigma, i_temp, weights):
"""Calculate mode kappa by the way in Laurent Chaput's PRL paper. This gives the different result from _set_mode_kappa and requires more memory space. """ |
X = self._get_X(i_temp, weights, self._gv).ravel()
num_ir_grid_points = len(self._ir_grid_points)
num_band = self._primitive.get_number_of_atoms() * 3
size = num_ir_grid_points * num_band * 3
v = self._collision_matrix[i_sigma, i_temp].reshape(size, size)
solver = _select_solver(self._pinv_solver)
if solver in [1, 2, 4, 5]:
v = v.T
e = self._get_eigvals_pinv(i_sigma, i_temp)
t = self._temperatures[i_temp]
omega_inv = np.empty(v.shape, dtype='double', order='C')
np.dot(v, (e * v).T, out=omega_inv)
Y = np.dot(omega_inv, X)
self._set_f_vectors(Y, num_ir_grid_points, weights)
elems = ((0, 0), (1, 1), (2, 2), (1, 2), (0, 2), (0, 1))
for i, vxf in enumerate(elems):
mat = self._get_I(vxf[0], vxf[1], num_ir_grid_points * num_band)
self._mode_kappa[i_sigma, i_temp, :, :, i] = 0
if mat is not None:
np.dot(mat, omega_inv, out=mat)
# vals = (X ** 2 * np.diag(mat)).reshape(-1, 3).sum(axis=1)
# vals = vals.reshape(num_ir_grid_points, num_band)
# self._mode_kappa[i_sigma, i_temp, :, :, i] = vals
w = diagonalize_collision_matrix(mat,
pinv_solver=self._pinv_solver,
log_level=self._log_level)
if solver in [1, 2, 4, 5]:
mat = mat.T
spectra = np.dot(mat.T, X) ** 2 * w
for s, eigvec in zip(spectra, mat.T):
vals = s * (eigvec ** 2).reshape(-1, 3).sum(axis=1)
vals = vals.reshape(num_ir_grid_points, num_band)
self._mode_kappa[i_sigma, i_temp, :, :, i] += vals
factor = self._conversion_factor * Kb * t ** 2
self._mode_kappa[i_sigma, i_temp] *= factor |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_third_order_displacements(cell, symmetry, is_plusminus='auto', is_diagonal=False):
"""Create dispalcement dataset Note ---- Atoms 1, 2, and 3 are defined as follows: Atom 1: The first displaced atom. Third order force constant between Atoms 1, 2, and 3 is calculated. Atom 2: The second displaced atom. Second order force constant between Atoms 2 and 3 is calculated. Atom 3: Force is mesuared on this atom. Parameters cell : PhonopyAtoms Supercell symmetry : Symmetry Symmetry of supercell is_plusminus : str or bool, optional Type of displacements, plus only (False), always plus and minus (True), and plus and minus depending on site symmetry ('auto'). is_diagonal : bool, optional Whether allow diagonal displacements of Atom 2 or not Returns ------- dict Data structure is like: {'natom': 64, 'cutoff_distance': 4.000000, 'first_atoms': [{'number': atom1, 'displacement': [0.03, 0., 0.], 'second_atoms': [ {'number': atom2, 'displacement': [0., -0.03, 0.], 'distance': 2.353}, """ |
positions = cell.get_scaled_positions()
lattice = cell.get_cell().T
# Least displacements of first atoms (Atom 1) are searched by
# using respective site symmetries of the original crystal.
# 'is_diagonal=False' below is made intentionally to expect
# better accuracy.
disps_first = get_least_displacements(symmetry,
is_plusminus=is_plusminus,
is_diagonal=False)
symprec = symmetry.get_symmetry_tolerance()
dds = []
for disp in disps_first:
atom1 = disp[0]
disp1 = disp[1:4]
site_sym = symmetry.get_site_symmetry(atom1)
dds_atom1 = {'number': atom1,
'direction': disp1,
'second_atoms': []}
# Reduced site symmetry at the first atom with respect to
# the displacement of the first atoms.
reduced_site_sym = get_reduced_site_symmetry(site_sym, disp1, symprec)
# Searching orbits (second atoms) with respect to
# the first atom and its reduced site symmetry.
second_atoms = get_least_orbits(atom1,
cell,
reduced_site_sym,
symprec)
for atom2 in second_atoms:
dds_atom2 = get_next_displacements(atom1,
atom2,
reduced_site_sym,
lattice,
positions,
symprec,
is_diagonal)
min_vec = get_equivalent_smallest_vectors(atom1,
atom2,
cell,
symprec)[0]
min_distance = np.linalg.norm(np.dot(lattice, min_vec))
dds_atom2['distance'] = min_distance
dds_atom1['second_atoms'].append(dds_atom2)
dds.append(dds_atom1)
return dds |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_bond_symmetry(site_symmetry, lattice, positions, atom_center, atom_disp, symprec=1e-5):
""" Bond symmetry is the symmetry operations that keep the symmetry of the cell containing two fixed atoms. """ |
bond_sym = []
pos = positions
for rot in site_symmetry:
rot_pos = (np.dot(pos[atom_disp] - pos[atom_center], rot.T) +
pos[atom_center])
diff = pos[atom_disp] - rot_pos
diff -= np.rint(diff)
dist = np.linalg.norm(np.dot(lattice, diff))
if dist < symprec:
bond_sym.append(rot)
return np.array(bond_sym) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_least_orbits(atom_index, cell, site_symmetry, symprec=1e-5):
"""Find least orbits for a centering atom""" |
orbits = _get_orbits(atom_index, cell, site_symmetry, symprec)
mapping = np.arange(cell.get_number_of_atoms())
for i, orb in enumerate(orbits):
for num in np.unique(orb):
if mapping[num] > mapping[i]:
mapping[num] = mapping[i]
return np.unique(mapping) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_fc3_to_hdf5(fc3, filename='fc3.hdf5', p2s_map=None, compression=None):
"""Write third-order force constants in hdf5 format. Parameters force_constants : ndarray Force constants shape=(n_satom, n_satom, n_satom, 3, 3, 3) or (n_patom, n_satom, n_satom,3,3,3), dtype=double filename : str Filename to be used. p2s_map : ndarray, optional Primitive atom indices in supercell index system shape=(n_patom,), dtype=intc compression : str or int, optional h5py's lossless compression filters (e.g., "gzip", "lzf"). See the detail at docstring of h5py.Group.create_dataset. Default is None. """ |
with h5py.File(filename, 'w') as w:
w.create_dataset('fc3', data=fc3, compression=compression)
if p2s_map is not None:
w.create_dataset('p2s_map', data=p2s_map) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_unitary_matrix_to_hdf5(temperature, mesh, unitary_matrix=None, sigma=None, sigma_cutoff=None, solver=None, filename=None, verbose=False):
"""Write eigenvectors of collision matrices at temperatures. Depending on the choice of the solver, eigenvectors are sotred in either column-wise or row-wise. """ |
suffix = _get_filename_suffix(mesh,
sigma=sigma,
sigma_cutoff=sigma_cutoff,
filename=filename)
hdf5_filename = "unitary" + suffix + ".hdf5"
with h5py.File(hdf5_filename, 'w') as w:
w.create_dataset('temperature', data=temperature)
if unitary_matrix is not None:
w.create_dataset('unitary_matrix', data=unitary_matrix)
if solver is not None:
w.create_dataset('solver', data=solver)
if verbose:
if len(temperature) > 1:
text = "Unitary matrices "
else:
text = "Unitary matrix "
if sigma is not None:
text += "at sigma %s " % _del_zeros(sigma)
if sigma_cutoff is not None:
text += "(%4.2f SD) " % sigma_cutoff
if len(temperature) > 1:
text += "were written into "
else:
text += "was written into "
if sigma is not None:
text += "\n"
text += "\"%s\"." % hdf5_filename
print(text) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_frequency_shift( self, grid_points, temperatures=np.arange(0, 1001, 10, dtype='double'), epsilons=None, output_filename=None):
"""Frequency shift from lowest order diagram is calculated. Args: epslins(list of float):
The value to avoid divergence. When multiple values are given frequency shifts for those values are returned. """ |
if self._interaction is None:
self.set_phph_interaction()
if epsilons is None:
_epsilons = [0.1]
else:
_epsilons = epsilons
self._grid_points = grid_points
get_frequency_shift(self._interaction,
self._grid_points,
self._band_indices,
_epsilons,
temperatures,
output_filename=output_filename,
log_level=self._log_level) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_triplets_reciprocal_mesh_at_q(fixed_grid_number, mesh, rotations, is_time_reversal=True, swappable=True):
"""Search symmetry reduced triplets fixing one q-point Triplets of (q0, q1, q2) are searched. Parameters fixed_grid_number : int Grid point of q0 mesh : array_like Mesh numbers dtype='intc' shape=(3,) rotations : array_like Rotation matrices in real space. Note that those in reciprocal space mean these matrices transposed (local terminology). dtype='intc' shape=(n_rot, 3, 3) is_time_reversal : bool Inversion symemtry is added if it doesn't exist. swappable : bool q1 and q2 can be swapped. By this number of triplets decreases. """ |
import phono3py._phono3py as phono3c
map_triplets = np.zeros(np.prod(mesh), dtype='uintp')
map_q = np.zeros(np.prod(mesh), dtype='uintp')
grid_address = np.zeros((np.prod(mesh), 3), dtype='intc')
phono3c.triplets_reciprocal_mesh_at_q(
map_triplets,
map_q,
grid_address,
fixed_grid_number,
np.array(mesh, dtype='intc'),
is_time_reversal * 1,
np.array(rotations, dtype='intc', order='C'),
swappable * 1)
return map_triplets, map_q, grid_address |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_averaged_interaction(self):
"""Return sum over phonon triplets of interaction strength See Eq.(21) of PRB 91, 094306 (2015) """ |
# v[triplet, band0, band, band]
v = self._interaction_strength
w = self._weights_at_q
v_sum = np.dot(w, v.sum(axis=2).sum(axis=2))
return v_sum / np.prod(v.shape[2:]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def optimize(lattice, positions, numbers, displacements, forces, alm_options=None, p2s_map=None, p2p_map=None, log_level=0):
"""Calculate force constants lattice : array_like Basis vectors. a, b, c are given as column vectors. shape=(3, 3), dtype='double' positions : array_like Fractional coordinates of atomic points. shape=(num_atoms, 3), dtype='double' numbers : array_like Atomic numbers. shape=(num_atoms,), dtype='intc' displacements : array_like Atomic displacement patterns in supercells in Cartesian. dtype='double', shape=(supercells, num_atoms, 3) forces : array_like Forces in supercells. dtype='double', shape=(supercells, num_atoms, 3) alm_options : dict, optional Default is None. List of keys cutoff_distance : float solver : str Either 'SimplicialLDLT' or 'dense'. Default is 'SimplicialLDLT'. """ |
from alm import ALM
with ALM(lattice, positions, numbers) as alm:
natom = len(numbers)
alm.set_verbosity(log_level)
nkd = len(np.unique(numbers))
if 'cutoff_distance' not in alm_options:
rcs = -np.ones((2, nkd, nkd), dtype='double')
elif type(alm_options['cutoff_distance']) is float:
rcs = np.ones((2, nkd, nkd), dtype='double')
rcs[0] *= -1
rcs[1] *= alm_options['cutoff_distance']
alm.define(2, rcs)
alm.set_displacement_and_force(displacements, forces)
if 'solver' in alm_options:
solver = alm_options['solver']
else:
solver = 'SimplicialLDLT'
info = alm.optimize(solver=solver)
fc2 = extract_fc2_from_alm(alm,
natom,
atom_list=p2s_map,
p2s_map=p2s_map,
p2p_map=p2p_map)
fc3 = _extract_fc3_from_alm(alm,
natom,
p2s_map=p2s_map,
p2p_map=p2p_map)
return fc2, fc3 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_alm_disp_fc3(disp_dataset):
"""Create displacements of atoms for ALM input Note ---- Dipslacements of all atoms in supercells for all displacement configurations in phono3py are returned, i.e., most of displacements are zero. Only the configurations with 'included' == True are included in the list of indices that is returned, too. Parameters disp_dataset : dict Displacement dataset that may be obtained by file_IO.parse_disp_fc3_yaml. Returns ------- disp : ndarray Displacements of atoms in supercells of all displacement configurations. shape=(ndisp, natom, 3) dtype='double' indices : list of int The indices of the displacement configurations with 'included' == True. """ |
natom = disp_dataset['natom']
ndisp = len(disp_dataset['first_atoms'])
for disp1 in disp_dataset['first_atoms']:
ndisp += len(disp1['second_atoms'])
disp = np.zeros((ndisp, natom, 3), dtype='double', order='C')
indices = []
count = 0
for disp1 in disp_dataset['first_atoms']:
indices.append(count)
disp[count, disp1['number']] = disp1['displacement']
count += 1
for disp1 in disp_dataset['first_atoms']:
for disp2 in disp1['second_atoms']:
if 'included' in disp2:
if disp2['included']:
indices.append(count)
else:
indices.append(count)
disp[count, disp1['number']] = disp1['displacement']
disp[count, disp2['number']] = disp2['displacement']
count += 1
return disp, indices |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def capture_dash_in_url_name(self, node):
""" Capture dash in URL name """ |
for keyword in node.keywords:
if keyword.arg == 'name' and '-' in keyword.value.s:
return DJ04(
lineno=node.lineno,
col=node.col_offset,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def capture_url_missing_namespace(self, node):
""" Capture missing namespace in url include. """ |
for arg in node.args:
if not(isinstance(arg, ast.Call) and isinstance(arg.func, ast.Name)):
continue
if arg.func.id != 'include':
continue
for keyword in arg.keywords:
if keyword.arg == 'namespace':
return
return DJ05(
lineno=node.lineno,
col=node.col_offset,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_call_name(self, node):
""" Return call name for the given node. """ |
if isinstance(node.func, ast.Attribute):
return node.func.attr
elif isinstance(node.func, ast.Name):
return node.func.id |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def message(self):
""" Return issue message. """ |
message = self.description.format(**self.parameters)
return '{code} {message}'.format(code=self.code, message=message) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self, node):
""" Captures the use of exclude in ModelForm Meta """ |
if not self.checker_applies(node):
return
issues = []
for body in node.body:
if not isinstance(body, ast.ClassDef):
continue
for element in body.body:
if not isinstance(element, ast.Assign):
continue
for target in element.targets:
if target.id == 'fields' and self.is_string_dunder_all(element):
issues.append(
DJ07(
lineno=node.lineno,
col=node.col_offset,
)
)
elif target.id == 'exclude':
issues.append(
DJ06(
lineno=node.lineno,
col=node.col_offset,
)
)
return issues |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def detect_scheme_and_format(source):
"""Detect scheme and format based on source and return as a tuple. Scheme is a minimum 2 letters before `://` (will be lower cased). For example `http` from `http://example.com/table.csv` """ |
# Scheme: stream
if hasattr(source, 'read'):
return ('stream', None)
# Format: inline
if not isinstance(source, six.string_types):
return (None, 'inline')
# Format: gsheet
if 'docs.google.com/spreadsheets' in source:
if 'export' not in source and 'pub' not in source:
return (None, 'gsheet')
elif 'csv' in source:
return ('https', 'csv')
# Format: sql
for sql_scheme in config.SQL_SCHEMES:
if source.startswith('%s://' % sql_scheme):
return (None, 'sql')
# General
parsed = urlparse(source)
scheme = parsed.scheme.lower()
if len(scheme) < 2:
scheme = config.DEFAULT_SCHEME
format = os.path.splitext(parsed.path or parsed.netloc)[1][1:].lower() or None
if format is None:
# Test if query string contains a "format=" parameter.
query_string = parse_qs(parsed.query)
query_string_format = query_string.get("format")
if query_string_format is not None and len(query_string_format) == 1:
format = query_string_format[0]
# Format: datapackage
if parsed.path.endswith('datapackage.json'):
return (None, 'datapackage')
return (scheme, format) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def detect_encoding(sample, encoding=None):
"""Detect encoding of a byte string sample. """ |
# To reduce tabulator import time
from cchardet import detect
if encoding is not None:
return normalize_encoding(sample, encoding)
result = detect(sample)
confidence = result['confidence'] or 0
encoding = result['encoding'] or 'ascii'
encoding = normalize_encoding(sample, encoding)
if confidence < config.ENCODING_CONFIDENCE:
encoding = config.DEFAULT_ENCODING
if encoding == 'ascii':
encoding = config.DEFAULT_ENCODING
return encoding |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def normalize_encoding(sample, encoding):
"""Normalize encoding including 'utf-8-sig', 'utf-16-be', utf-16-le tweaks. """ |
encoding = codecs.lookup(encoding).name
# Work around 'Incorrect detection of utf-8-sig encoding'
# <https://github.com/PyYoshi/cChardet/issues/28>
if encoding == 'utf-8':
if sample.startswith(codecs.BOM_UTF8):
encoding = 'utf-8-sig'
# Use the BOM stripping name (without byte-order) for UTF-16 encodings
elif encoding == 'utf-16-be':
if sample.startswith(codecs.BOM_UTF16_BE):
encoding = 'utf-16'
elif encoding == 'utf-16-le':
if sample.startswith(codecs.BOM_UTF16_LE):
encoding = 'utf-16'
return encoding |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def detect_html(text):
"""Detect if text is HTML. """ |
pattern = re.compile('\\s*<(!doctype|html)', re.IGNORECASE)
return bool(pattern.match(text)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset_stream(stream):
"""Reset stream pointer to the first element. If stream is not seekable raise Exception. """ |
try:
position = stream.tell()
except Exception:
position = True
if position != 0:
try:
stream.seek(0)
except Exception:
message = 'It\'s not possible to reset this stream'
raise exceptions.TabulatorException(message) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def requote_uri(uri):
"""Requote uri if it contains non-ascii chars, spaces etc. """ |
# To reduce tabulator import time
import requests.utils
if six.PY2:
def url_encode_non_ascii(bytes):
pattern = '[\x80-\xFF]'
replace = lambda c: ('%%%02x' % ord(c.group(0))).upper()
return re.sub(pattern, replace, bytes)
parts = urlparse(uri)
uri = urlunparse(
part.encode('idna') if index == 1
else url_encode_non_ascii(part.encode('utf-8'))
for index, part in enumerate(parts))
return requests.utils.requote_uri(uri) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def import_attribute(path):
"""Import attribute by path like `package.module.attribute` """ |
module_name, attribute_name = path.rsplit('.', 1)
module = import_module(module_name)
attribute = getattr(module, attribute_name)
return attribute |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def extract_options(options, names):
"""Return options for names and remove it from given options in-place. """ |
result = {}
for name, value in copy(options).items():
if name in names:
result[name] = value
del options[name]
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def stringify_value(value):
"""Convert any value to string. """ |
if value is None:
return u''
isoformat = getattr(value, 'isoformat', None)
if isoformat is not None:
value = isoformat()
return type(u'')(value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def reset(self):
'''Resets the stream pointer to the beginning of the file.'''
if self.__row_number > self.__sample_size:
self.__parser.reset()
self.__extract_sample()
self.__extract_headers()
self.__row_number = 0 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def sample(self):
'''Returns the stream's rows used as sample.
These sample rows are used internally to infer characteristics of the
source file (e.g. encoding, headers, ...).
'''
sample = []
iterator = iter(self.__sample_extended_rows)
iterator = self.__apply_processors(iterator)
for row_number, headers, row in iterator:
sample.append(row)
return sample |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def iter(self, keyed=False, extended=False):
'''Iterate over the rows.
Each row is returned in a format that depends on the arguments `keyed`
and `extended`. By default, each row is returned as list of their
values.
Args:
keyed (bool, optional): When True, each returned row will be a
`dict` mapping the header name to its value in the current row.
For example, `[{'name': 'J Smith', 'value': '10'}]`. Ignored if
``extended`` is True. Defaults to False.
extended (bool, optional): When True, returns each row as a tuple
with row number (starts at 1), list of headers, and list of row
values. For example, `(1, ['name', 'value'], ['J Smith', '10'])`.
Defaults to False.
Returns:
Iterator[Union[List[Any], Dict[str, Any], Tuple[int, List[str], List[Any]]]]:
The row itself. The format depends on the values of `keyed` and
`extended` arguments.
Raises:
exceptions.TabulatorException: If the stream is closed.
'''
# Error if closed
if self.closed:
message = 'Stream is closed. Please call "stream.open()" first.'
raise exceptions.TabulatorException(message)
# Create iterator
iterator = chain(
self.__sample_extended_rows,
self.__parser.extended_rows)
iterator = self.__apply_processors(iterator)
# Yield rows from iterator
for row_number, headers, row in iterator:
if row_number > self.__row_number:
self.__row_number = row_number
if extended:
yield (row_number, headers, row)
elif keyed:
yield dict(zip(headers, row))
else:
yield row |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def save(self, target, format=None, encoding=None, **options):
'''Save stream to the local filesystem.
Args:
target (str): Path where to save the stream.
format (str, optional): The format the stream will be saved as. If
None, detects from the ``target`` path. Defaults to None.
encoding (str, optional): Saved file encoding. Defaults to
``config.DEFAULT_ENCODING``.
**options: Extra options passed to the writer.
'''
# Get encoding/format
if encoding is None:
encoding = config.DEFAULT_ENCODING
if format is None:
_, format = helpers.detect_scheme_and_format(target)
# Prepare writer class
writer_class = self.__custom_writers.get(format)
if writer_class is None:
if format not in config.WRITERS:
message = 'Format "%s" is not supported' % format
raise exceptions.FormatError(message)
writer_class = helpers.import_attribute(config.WRITERS[format])
# Prepare writer options
writer_options = helpers.extract_options(options, writer_class.options)
if options:
message = 'Not supported options "%s" for format "%s"'
message = message % (', '.join(options), format)
raise exceptions.TabulatorException(message)
# Write data to target
writer = writer_class(**writer_options)
writer.write(self.iter(), target, headers=self.headers, encoding=encoding) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def validate(source, scheme=None, format=None):
'''Check if tabulator is able to load the source.
Args:
source (Union[str, IO]): The source path or IO object.
scheme (str, optional): The source scheme. Auto-detect by default.
format (str, optional): The source file format. Auto-detect by default.
Returns:
bool: Whether tabulator is able to load the source file.
Raises:
`tabulator.exceptions.SchemeError`: The file scheme is not supported.
`tabulator.exceptions.FormatError`: The file format is not supported.
'''
# Get scheme and format
detected_scheme, detected_format = helpers.detect_scheme_and_format(source)
scheme = scheme or detected_scheme
format = format or detected_format
# Validate scheme and format
if scheme is not None:
if scheme not in config.LOADERS:
raise exceptions.SchemeError('Scheme "%s" is not supported' % scheme)
if format not in config.PARSERS:
raise exceptions.FormatError('Format "%s" is not supported' % format)
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def require_axis(f):
""" Check if the object of the function has axis and sel_axis members """ |
@wraps(f)
def _wrapper(self, *args, **kwargs):
if None in (self.axis, self.sel_axis):
raise ValueError('%(func_name) requires the node %(node)s '
'to have an axis and a sel_axis function' %
dict(func_name=f.__name__, node=repr(self)))
return f(self, *args, **kwargs)
return _wrapper |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(point_list=None, dimensions=None, axis=0, sel_axis=None):
""" Creates a kd-tree from a list of points All points in the list must be of the same dimensionality. If no point_list is given, an empty tree is created. The number of dimensions has to be given instead. If both a point_list and dimensions are given, the numbers must agree. Axis is the axis on which the root-node should split. sel_axis(axis) is used when creating subnodes of a node. It receives the axis of the parent node and returns the axis of the child node. """ |
if not point_list and not dimensions:
raise ValueError('either point_list or dimensions must be provided')
elif point_list:
dimensions = check_dimensionality(point_list, dimensions)
# by default cycle through the axis
sel_axis = sel_axis or (lambda prev_axis: (prev_axis+1) % dimensions)
if not point_list:
return KDNode(sel_axis=sel_axis, axis=axis, dimensions=dimensions)
# Sort point list and choose median as pivot element
point_list = list(point_list)
point_list.sort(key=lambda point: point[axis])
median = len(point_list) // 2
loc = point_list[median]
left = create(point_list[:median], dimensions, sel_axis(axis))
right = create(point_list[median + 1:], dimensions, sel_axis(axis))
return KDNode(loc, left, right, axis=axis, sel_axis=sel_axis, dimensions=dimensions) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def level_order(tree, include_all=False):
""" Returns an iterator over the tree in level-order If include_all is set to True, empty parts of the tree are filled with dummy entries and the iterator becomes infinite. """ |
q = deque()
q.append(tree)
while q:
node = q.popleft()
yield node
if include_all or node.left:
q.append(node.left or node.__class__())
if include_all or node.right:
q.append(node.right or node.__class__()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def visualize(tree, max_level=100, node_width=10, left_padding=5):
""" Prints the tree to stdout """ |
height = min(max_level, tree.height()-1)
max_width = pow(2, height)
per_level = 1
in_level = 0
level = 0
for node in level_order(tree, include_all=True):
if in_level == 0:
print()
print()
print(' '*left_padding, end=' ')
width = int(max_width*node_width/per_level)
node_str = (str(node.data) if node else '').center(width)
print(node_str, end=' ')
in_level += 1
if in_level == per_level:
in_level = 0
per_level *= 2
level += 1
if level > height:
break
print()
print() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_leaf(self):
""" Returns True if a Node has no subnodes True False """ |
return (not self.data) or \
(all(not bool(c) for c, p in self.children)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def children(self):
""" Returns an iterator for the non-empty children of the Node The children are returned as (Node, pos) tuples where pos is 0 for the left subnode and 1 for the right. 0 0 2 """ |
if self.left and self.left.data is not None:
yield self.left, 0
if self.right and self.right.data is not None:
yield self.right, 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_child(self, index, child):
""" Sets one of the node's children index 0 refers to the left, 1 to the right child """ |
if index == 0:
self.left = child
else:
self.right = child |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_child_pos(self, child):
""" Returns the position if the given child If the given node is the left child, 0 is returned. If its the right child, 1 is returned. Otherwise None """ |
for c, pos in self.children:
if child == c:
return pos |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add(self, point):
""" Adds a point to the current node or iteratively descends to one of its children. Users should call add() only to the topmost tree. """ |
current = self
while True:
check_dimensionality([point], dimensions=current.dimensions)
# Adding has hit an empty leaf-node, add here
if current.data is None:
current.data = point
return current
# split on self.axis, recurse either left or right
if point[current.axis] < current.data[current.axis]:
if current.left is None:
current.left = current.create_subnode(point)
return current.left
else:
current = current.left
else:
if current.right is None:
current.right = current.create_subnode(point)
return current.right
else:
current = current.right |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_subnode(self, data):
""" Creates a subnode for the current node """ |
return self.__class__(data,
axis=self.sel_axis(self.axis),
sel_axis=self.sel_axis,
dimensions=self.dimensions) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_replacement(self):
""" Finds a replacement for the current node The replacement is returned as a (replacement-node, replacements-parent-node) tuple """ |
if self.right:
child, parent = self.right.extreme_child(min, self.axis)
else:
child, parent = self.left.extreme_child(max, self.axis)
return (child, parent if parent is not None else self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove(self, point, node=None):
""" Removes the node with the given point from the tree Returns the new root node of the (sub)tree. If there are multiple points matching "point", only one is removed. The optional "node" parameter is used for checking the identity, once the removeal candidate is decided.""" |
# Recursion has reached an empty leaf node, nothing here to delete
if not self:
return
# Recursion has reached the node to be deleted
if self.should_remove(point, node):
return self._remove(point)
# Remove direct subnode
if self.left and self.left.should_remove(point, node):
self.left = self.left._remove(point)
elif self.right and self.right.should_remove(point, node):
self.right = self.right._remove(point)
# Recurse to subtrees
if point[self.axis] <= self.data[self.axis]:
if self.left:
self.left = self.left.remove(point, node)
if point[self.axis] >= self.data[self.axis]:
if self.right:
self.right = self.right.remove(point, node)
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def axis_dist(self, point, axis):
""" Squared distance at the given axis between the current Node and the given point """ |
return math.pow(self.data[axis] - point[axis], 2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dist(self, point):
""" Squared distance between the current Node and the given point """ |
r = range(self.dimensions)
return sum([self.axis_dist(point, i) for i in r]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def search_knn(self, point, k, dist=None):
""" Return the k nearest neighbors of point and their distances point must be an actual point, not a node. k is the number of results to return. The actual results can be less (if there aren't more nodes to return) or more in case of equal distances. dist is a distance function, expecting two points and returning a distance value. Distance values can be any comparable type. The result is an ordered list of (node, distance) tuples. """ |
if k < 1:
raise ValueError("k must be greater than 0.")
if dist is None:
get_dist = lambda n: n.dist(point)
else:
get_dist = lambda n: dist(n.data, point)
results = []
self._search_node(point, k, results, get_dist, itertools.count())
# We sort the final result by the distance in the tuple
# (<KdNode>, distance).
return [(node, -d) for d, _, node in sorted(results, reverse=True)] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def search_nn(self, point, dist=None):
""" Search the nearest node of the given point point must be an actual point, not a node. The nearest node to the point is returned. If a location of an actual node is used, the Node with this location will be returned (not its neighbor). dist is a distance function, expecting two points and returning a distance value. Distance values can be any comparable type. The result is a (node, distance) tuple. """ |
return next(iter(self.search_knn(point, 1, dist)), None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def search_nn_dist(self, point, distance, best=None):
""" Search the n nearest nodes of the given point which are within given distance point must be a location, not a node. A list containing the n nearest nodes to the point within the distance will be returned. """ |
results = []
get_dist = lambda n: n.dist(point)
self._search_nn_dist(point, distance, results, get_dist)
return results |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_valid(self):
""" Checks recursively if the tree is valid It is valid if each node splits correctly """ |
if not self:
return True
if self.left and self.data[self.axis] < self.left.data[self.axis]:
return False
if self.right and self.data[self.axis] > self.right.data[self.axis]:
return False
return all(c.is_valid() for c, _ in self.children) or self.is_leaf |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def extreme_child(self, sel_func, axis):
""" Returns a child of the subtree and its parent The child is selected by sel_func which is either min or max (or a different function with similar semantics). """ |
max_key = lambda child_parent: child_parent[0].data[axis]
# we don't know our parent, so we include None
me = [(self, None)] if self else []
child_max = [c.extreme_child(sel_func, axis) for c, _ in self.children]
# insert self for unknown parents
child_max = [(c, p if p is not None else self) for c, p in child_max]
candidates = me + child_max
if not candidates:
return None, None
return sel_func(candidates, key=max_key) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dump_raw(self, text, stream=None):
"""Encrypt raw data and write to stream.""" |
encrypted = self.vault.encrypt(text)
if stream:
stream.write(encrypted)
else:
return encrypted |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dump(self, data, stream=None):
"""Encrypt data and print stdout or write to stream.""" |
yaml_text = yaml.dump(
data,
default_flow_style=False,
allow_unicode=True)
return self.dump_raw(yaml_text, stream=stream) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_item(c, name, item):
""" add_item adds MenuItems to the menu identified by 'name' """ |
if isinstance(item, MenuItem):
if name not in c.items:
c.items[name] = []
c.items[name].append(item)
c.sorted[name] = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_menus(c):
""" load_menus loops through INSTALLED_APPS and loads the menu.py files from them. """ |
# we don't need to do this more than once
if c.loaded:
return
# Fetch all installed app names
app_names = settings.INSTALLED_APPS
if apps:
app_names = [
app_config.name
for app_config in apps.get_app_configs()
]
# loop through our INSTALLED_APPS
for app in app_names:
# skip any django apps
if app.startswith("django."):
continue
menu_module = '%s.menus' % app
try:
__import__(menu_module, fromlist=["menu", ])
except ImportError:
pass
c.loaded = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sort_menus(c):
""" sort_menus goes through the items and sorts them based on their weight """ |
for name in c.items:
if not c.sorted[name]:
c.items[name].sort(key=lambda x: x.weight)
c.sorted[name] = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process(c, request, name=None):
""" process uses the current request to determine which menus should be visible, which are selected, etc. """ |
# make sure we're loaded & sorted
c.load_menus()
c.sort_menus()
if name is None:
# special case, process all menus
items = {}
for name in c.items:
items[name] = c.process(request, name)
return items
if name not in c.items:
return []
items = copy.deepcopy(c.items[name])
curitem = None
for item in items:
item.process(request)
if item.visible:
item.selected = False
if item.match_url(request):
if curitem is None or len(curitem.url) < len(item.url):
curitem = item
if curitem is not None:
curitem.selected = True
# return only visible items
visible = [
item
for item in items
if item.visible
]
# determine if we should apply 'selected' to parents when one of their
# children is the 'selected' menu
if getattr(settings, 'MENU_SELECT_PARENTS', False):
def is_child_selected(item):
for child in item.children:
if child.selected or is_child_selected(child):
return True
for item in visible:
if is_child_selected(item):
item.selected = True
return visible |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check(self, request):
""" Evaluate if we should be visible for this request """ |
if callable(self.check_func):
self.visible = self.check_func(request) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process(self, request):
""" """ |
# if we're not visible we return since we don't need to do anymore processing
self.check(request)
if not self.visible:
return
# evaluate our title
if callable(self.title):
self.title = self.title(request)
# if no title is set turn it into a slug
if self.slug is None:
# in python3 we don't need to convert to unicode, in python2 slugify
# requires a unicode string
if sys.version_info > (3, 0):
self.slug = slugify(self.title)
else:
self.slug = slugify(unicode(self.title))
# evaluate children
if callable(self.children):
children = list(self.children(request))
else:
children = list(self.children)
for child in children:
child.parent = self
child.process(request)
self.children = [
child
for child in children
if child.visible
]
self.children.sort(key=lambda child: child.weight)
# if we have no children and MENU_HIDE_EMPTY then we are not visible and should return
hide_empty = getattr(settings, 'MENU_HIDE_EMPTY', False)
if hide_empty and len(self.children) == 0:
self.visible = False
return
# find out if one of our children is selected, and mark it as such
curitem = None
for item in self.children:
item.selected = False
if item.match_url(request):
if curitem is None or len(curitem.url) < len(item.url):
curitem = item
if curitem is not None:
curitem.selected = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def match_url(self, request):
""" match url determines if this is selected """ |
matched = False
if self.exact_url:
if re.match("%s$" % (self.url,), request.path):
matched = True
elif re.match("%s" % self.url, request.path):
matched = True
return matched |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure(self, proto):
""" Configures the protocol using the information gathered from the remote Mongo instance. Such information may contain the max BSON document size, replica set configuration, and the master status of the instance. """ |
if not proto:
defer.returnValue(None)
reply = yield self.__send_ismaster(proto, timeout=self.initialDelay)
# Handle the reply from the "ismaster" query. The reply contains
# configuration information about the peer.
# Make sure we got a result document.
if len(reply.documents) != 1:
raise OperationFailure("TxMongo: invalid document length.")
# Get the configuration document from the reply.
config = reply.documents[0].decode()
# Make sure the command was successful.
if not config.get("ok"):
code = config.get("code")
msg = "TxMongo: " + config.get("err", "Unknown error")
raise OperationFailure(msg, code)
# Check that the replicaSet matches.
set_name = config.get("setName")
expected_set_name = self.uri["options"].get("replicaset")
if expected_set_name and (expected_set_name != set_name):
# Log the invalid replica set failure.
msg = "TxMongo: Mongo instance does not match requested replicaSet."
raise ConfigurationError(msg)
# Track max bson object size limit.
proto.max_bson_size = config.get("maxBsonObjectSize", DEFAULT_MAX_BSON_SIZE)
proto.max_write_batch_size = config.get("maxWriteBatchSize", DEFAULT_MAX_WRITE_BATCH_SIZE)
proto.set_wire_versions(config.get("minWireVersion", 0),
config.get("maxWireVersion", 0))
# Track the other hosts in the replica set.
hosts = config.get("hosts")
if isinstance(hosts, list) and hosts:
for host in hosts:
if ':' not in host:
host = (host, 27017)
else:
host = host.split(':', 1)
host[1] = int(host[1])
host = tuple(host)
if host not in self.__allnodes:
self.__allnodes.append(host)
# Check if this node is the master.
ismaster = config.get("ismaster")
if not ismaster:
msg = "TxMongo: MongoDB host `%s` is not master." % config.get('me')
raise AutoReconnect(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def notifyReady(self):
""" Returns a deferred that will fire when the factory has created a protocol that can be used to communicate with a Mongo server. Note that this will not fire until we have connected to a Mongo master, unless slaveOk was specified in the Mongo URI connection options. """ |
if self.instance:
return defer.succeed(self.instance)
def on_cancel(d):
self.__notify_ready.remove(d)
df = defer.Deferred(on_cancel)
self.__notify_ready.append(df)
return df |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def retryNextHost(self, connector=None):
""" Have this connector connect again, to the next host in the configured list of hosts. """ |
if not self.continueTrying:
msg = "TxMongo: Abandoning {0} on explicit request.".format(connector)
log.msg(msg)
return
if connector is None:
if self.connector is None:
raise ValueError("TxMongo: No additional connector to retry.")
else:
connector = self.connector
delay = False
self.__index += 1
if self.__index >= len(self.__allnodes):
self.__index = 0
delay = True
connector.host, connector.port = self.__allnodes[self.__index]
if delay:
self.retry(connector)
else:
connector.connect() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def indexes_created(self):
"""Returns a defer on the creation of this GridFS instance's indexes """ |
d = defer.Deferred()
self.__indexes_created_defer.chainDeferred(d)
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_last_version(self, filename):
"""Get a file from GridFS by ``"filename"``. Returns the most recently uploaded file in GridFS with the name `filename` as an instance of :class:`~gridfs.grid_file.GridOut`. Raises :class:`~gridfs.errors.NoFile` if no such file exists. An index on ``{filename: 1, uploadDate: -1}`` will automatically be created when this method is called the first time. :Parameters: - `filename`: ``"filename"`` of the file to get .. versionadded:: 1.6 """ |
def ok(doc):
if doc is None:
raise NoFile("TxMongo: no file in gridfs with filename {0}".format(repr(filename)))
return GridOut(self.__collection, doc)
return self.__files.find_one({"filename": filename},
filter = filter.sort(DESCENDING("uploadDate"))).addCallback(ok) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authenticate(self, name, password, mechanism="DEFAULT"):
""" Send an authentication command for this database. mostly stolen from pymongo """ |
if not isinstance(name, (bytes, unicode)):
raise TypeError("TxMongo: name must be an instance of basestring.")
if not isinstance(password, (bytes, unicode)):
raise TypeError("TxMongo: password must be an instance of basestring.")
"""
Authenticating
"""
return self.connection.authenticate(self, name, password, mechanism) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def timeout(func):
"""Decorator to add timeout to Deferred calls""" |
@wraps(func)
def _timeout(*args, **kwargs):
now = time()
deadline = kwargs.pop("deadline", None)
seconds = kwargs.pop("timeout", None)
if deadline is None and seconds is not None:
deadline = now + seconds
if deadline is not None and deadline < now:
raise TimeExceeded("TxMongo: run time exceeded by {0}s.".format(now-deadline))
kwargs['_deadline'] = deadline
raw_d = func(*args, **kwargs)
if deadline is None:
return raw_d
if seconds is None and deadline is not None and deadline - now > 0:
seconds = deadline - now
timeout_d = defer.Deferred()
times_up = reactor.callLater(seconds, timeout_d.callback, None)
def on_ok(result):
if timeout_d.called:
raw_d.cancel()
raise TimeExceeded("TxMongo: run time of {0}s exceeded.".format(seconds))
else:
times_up.cancel()
return result[0]
def on_fail(failure):
failure.trap(defer.FirstError)
assert failure.value.index == 0
times_up.cancel()
failure.value.subFailure.raiseException()
return defer.DeferredList([raw_d, timeout_d], fireOnOneCallback=True,
fireOnOneErrback=True, consumeErrors=True).addCallbacks(on_ok, on_fail)
return _timeout |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def writelines(self, sequence):
"""Write a sequence of strings to the file. Does not add separators. """ |
iterator = iter(sequence)
def iterate(_=None):
try:
return self.write(next(iterator)).addCallback(iterate)
except StopIteration:
return
return defer.maybeDeferred(iterate) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_new_requests(self):
"""Retrieve all the new request that were found in this request. Returns: list(:class:`nyawc.http.Request`):
A list of request objects. """ |
content_type = self.__queue_item.response.headers.get('content-type')
scrapers = self.__get_all_scrapers()
new_requests = []
for scraper in scrapers:
instance = scraper(self.__options, self.__queue_item)
if self.__content_type_matches(content_type, instance.content_types):
new_requests.extend(instance.get_requests())
return new_requests |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __make_request(self, url, method, data, auth, cookies, headers, proxies, timeout, verify):
"""Execute a request with the given data. Args: url (str):
The URL to call. method (str):
The method (e.g. `get` or `post`). data (str):
The data to call the URL with. auth (obj):
The authentication class. cookies (obj):
The cookie dict. headers (obj):
The header dict. proxies (obj):
The proxies dict. timeout (int):
The request timeout in seconds. verify (mixed):
SSL verification. Returns: obj: The response object. """ |
request_by_method = getattr(requests, method)
return request_by_method(
url=url,
data=data,
auth=auth,
cookies=cookies,
headers=headers,
proxies=proxies,
timeout=timeout,
verify=verify,
allow_redirects=True,
stream=False
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_all_scrapers(self):
"""Find all available scraper references. Returns: list(obj):
The scraper references. """ |
modules_strings = self.__get_all_scrapers_modules()
modules = []
for module_string in modules_strings:
module = importlib.import_module("nyawc.scrapers." + module_string)
modules.append(getattr(module, module_string))
return modules |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_all_scrapers_modules(self):
"""Find all available scraper modules. Returns: list(obj):
The scraper modules. """ |
modules = []
file = os.path.realpath(__file__)
folder = os.path.dirname(file)
for filename in os.listdir(folder + "/../scrapers"):
if filename.endswith("Scraper.py") and not filename.startswith("Base"):
modules.append(filename[:-3])
return modules |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __content_type_matches(self, content_type, available_content_types):
"""Check if the given content type matches one of the available content types. Args: content_type (str):
The given content type. available_content_types list(str):
All the available content types. Returns: bool: True if a match was found, False otherwise. """ |
if content_type is None:
return False
if content_type in available_content_types:
return True
for available_content_type in available_content_types:
if available_content_type in content_type:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def increase_route_count(self, crawled_request):
"""Increase the count that determines how many times a URL of a certain route has been crawled. Args: crawled_request (:class:`nyawc.http.Request`):
The request that possibly matches a route. """ |
for route in self.__routing_options.routes:
if re.compile(route).match(crawled_request.url):
count_key = str(route) + crawled_request.method
if count_key in self.__routing_count.keys():
self.__routing_count[count_key] += 1
else:
self.__routing_count[count_key] = 1
break |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_treshold_reached(self, scraped_request):
"""Check if similar requests to the given requests have already been crawled X times. Where X is the minimum treshold amount from the options. Args: scraped_request (:class:`nyawc.http.Request`):
The request that possibly reached the minimum treshold. Returns: bool: True if treshold reached, false otherwise. """ |
for route in self.__routing_options.routes:
if re.compile(route).match(scraped_request.url):
count_key = str(route) + scraped_request.method
if count_key in self.__routing_count.keys():
return self.__routing_count[count_key] >= self.__routing_options.minimum_threshold
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_request(self, request):
"""Add a request to the queue. Args: request (:class:`nyawc.http.Request`):
The request to add. Returns: :class:`nyawc.QueueItem`: The created queue item. """ |
queue_item = QueueItem(request, Response(request.url))
self.add(queue_item)
return queue_item |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def has_request(self, request):
"""Check if the given request already exists in the queue. Args: request (:class:`nyawc.http.Request`):
The request to check. Returns: bool: True if already exists, False otherwise. """ |
queue_item = QueueItem(request, Response(request.url))
key = queue_item.get_hash()
for status in QueueItem.STATUSES:
if key in self.__get_var("items_" + status).keys():
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_first(self, status):
"""Get the first item in the queue that has the given status. Args: status (str):
return the first item with this status. Returns: :class:`nyawc.QueueItem`: The first queue item with the given status. """ |
items = self.get_all(status)
if items:
return list(items.items())[0][1]
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_for_type(input_type="text"):
"""Get a random string for the given html input type Args: input_type (str):
The input type (e.g. email). Returns: str: The (cached) random value. """ |
if input_type in RandomInputHelper.cache:
return RandomInputHelper.cache[input_type]
types = {
"text": RandomInputHelper.get_random_value,
"hidden": RandomInputHelper.get_random_value,
"search": RandomInputHelper.get_random_value,
"color": RandomInputHelper.get_random_color,
"week": {"function": RandomInputHelper.get_random_value, "params": [2, ["1234"]]},
"password": RandomInputHelper.get_random_password,
"number": RandomInputHelper.get_random_number,
"tel": RandomInputHelper.get_random_telephonenumber,
"url": RandomInputHelper.get_random_url,
"textarea": RandomInputHelper.get_random_text,
"email": RandomInputHelper.get_random_email
}
if types.get(input_type) is None:
return ""
if type(types.get(input_type)) is dict:
generator = types.get(input_type)
value = generator.get("function")(*generator.get("params"))
else:
value = types.get(input_type)()
RandomInputHelper.cache[input_type] = value
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_random_value(length=10, character_sets=[string.ascii_uppercase, string.ascii_lowercase]):
"""Get a random string with the given length. Args: length (int):
The length of the string to return. character_sets list(str):
The caracter sets to use. Returns: str: The random string. """ |
return "".join(random.choice("".join(character_sets)) for i in range(length)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_random_email(ltd="com"):
"""Get a random email address with the given ltd. Args: ltd (str):
The ltd to use (e.g. com). Returns: str: The random email. """ |
email = [
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
"@",
RandomInputHelper.get_random_value(6, [string.ascii_lowercase]),
".",
ltd
]
return "".join(email) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_random_password():
"""Get a random password that complies with most of the requirements. Note: This random password is not strong and not "really" random, and should only be used for testing purposes. Returns: str: The random password. """ |
password = []
password.append(RandomInputHelper.get_random_value(4, [string.ascii_lowercase]))
password.append(RandomInputHelper.get_random_value(2, [string.digits]))
password.append(RandomInputHelper.get_random_value(2, ["$&*@!"]))
password.append(RandomInputHelper.get_random_value(4, [string.ascii_uppercase]))
return "".join(password) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_random_url(ltd="com"):
"""Get a random url with the given ltd. Args: ltd (str):
The ltd to use (e.g. com). Returns: str: The random url. """ |
url = [
"https://",
RandomInputHelper.get_random_value(8, [string.ascii_lowercase]),
".",
ltd
]
return "".join(url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_random_telephonenumber():
"""Get a random 10 digit phone number that complies with most of the requirements. Returns: str: The random telephone number. """ |
phone = [
RandomInputHelper.get_random_value(3, "123456789"),
RandomInputHelper.get_random_value(3, "12345678"),
"".join(map(str, random.sample(range(10), 4)))
]
return "-".join(phone) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def complies_with_scope(queue_item, new_request, scope):
"""Check if the new request complies with the crawling scope. Args: queue_item (:class:`nyawc.QueueItem`):
The parent queue item of the new request. new_request (:class:`nyawc.http.Request`):
The request to check. scope (:class:`nyawc.Options.OptionsScope`):
The scope to check. Returns: bool: True if it complies, False otherwise. """ |
if not URLHelper.is_parsable(queue_item.request.url):
return False
if not URLHelper.is_parsable(new_request.url):
return False
if scope.request_methods:
if not queue_item.request.method in scope.request_methods:
return False
if scope.protocol_must_match:
if URLHelper.get_protocol(queue_item.request.url) != URLHelper.get_protocol(new_request.url):
return False
if scope.subdomain_must_match:
current_subdomain = URLHelper.get_subdomain(queue_item.request.url)
new_subdomain = URLHelper.get_subdomain(new_request.url)
www_matches = False
if current_subdomain == "www" and new_subdomain == "":
www_matches = True
if new_subdomain == "www" and current_subdomain == "":
www_matches = True
if not www_matches and current_subdomain != new_subdomain:
return False
if scope.hostname_must_match:
if URLHelper.get_hostname(queue_item.request.url) != URLHelper.get_hostname(new_request.url):
return False
if scope.tld_must_match:
if URLHelper.get_tld(queue_item.request.url) != URLHelper.get_tld(new_request.url):
return False
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_cookie_header(queue_item):
"""Convert a requests cookie jar to a HTTP request cookie header value. Args: queue_item (:class:`nyawc.QueueItem`):
The parent queue item of the new request. Returns: str: The HTTP cookie header value. """ |
header = []
path = URLHelper.get_path(queue_item.request.url)
for cookie in queue_item.request.cookies:
root_path = cookie.path == "" or cookie.path == "/"
if path.startswith(cookie.path) or root_path:
header.append(cookie.name + "=" + cookie.value)
return "&".join(header) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_soup_response(self):
"""Get the response as a cached BeautifulSoup container. Returns: obj: The BeautifulSoup container. """ |
if self.response is not None:
if self.__response_soup is None:
result = BeautifulSoup(self.response.text, "lxml")
if self.decomposed:
return result
else:
self.__response_soup = BeautifulSoup(self.response.text, "lxml")
return self.__response_soup |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_hash(self):
"""Generate and return the dict index hash of the given queue item. Note: Cookies should not be included in the hash calculation because otherwise requests are crawled multiple times with e.g. different session keys, causing infinite crawling recursion. Note: At this moment the keys do not actually get hashed since it works perfectly without and since hashing the keys requires us to built hash collision management. Returns: str: The hash of the given queue item. """ |
if self.__index_hash:
return self.__index_hash
key = self.request.method
key += URLHelper.get_protocol(self.request.url)
key += URLHelper.get_subdomain(self.request.url)
key += URLHelper.get_hostname(self.request.url)
key += URLHelper.get_tld(self.request.url)
key += URLHelper.get_path(self.request.url)
key += str(URLHelper.get_ordered_params(self.request.url))
if self.request.data is not None:
key += str(self.request.data.keys())
self.__index_hash = key
return self.__index_hash |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_request(self, host, soup):
"""Build a request from the given soup form. Args: host str: The URL of the current queue item. soup (obj):
The BeautifulSoup form. Returns: :class:`nyawc.http.Request`: The new Request. """ |
url = URLHelper.make_absolute(host, self.__trim_grave_accent(soup["action"])) if soup.has_attr("action") else host
method_original = soup["method"] if soup.has_attr("method") else "get"
method = "post" if method_original.lower() == "post" else "get"
data = self.__get_form_data(soup)
return Request(url, method, data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_form_data(self, soup):
"""Build a form data dict from the given form. Args: soup (obj):
The BeautifulSoup form. Returns: obj: The form data (key/value). """ |
elements = self.__get_valid_form_data_elements(soup)
form_data = self.__get_default_form_data_input(elements)
callback = self.options.callbacks.form_before_autofill
action = callback(self.queue_item, elements, form_data)
if action == CrawlerActions.DO_AUTOFILL_FORM:
self.__autofill_form_data(form_data, elements)
return form_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_valid_form_data_elements(self, soup):
"""Get all valid form input elements. Note: An element is valid when the value can be updated client-side and the element has a name attribute. Args: soup (obj):
The BeautifulSoup form. Returns: list(obj):
Soup elements. """ |
elements = []
for element in soup.find_all(["input", "button", "textarea", "select"]):
if element.has_attr("name"):
elements.append(element)
return elements |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __autofill_form_data(self, form_data, elements):
"""Autofill empty form data with random data. Args: form_data (obj):
The {key: value} form data elements list(obj):
Soup elements. Returns: obj: The {key: value} """ |
for element in elements:
if not element["name"] in form_data:
continue
if not len(form_data[element["name"]]) is 0:
continue
if element.name == "textarea":
form_data[element["name"]] = RandomInputHelper.get_for_type("textarea")
continue
if element.has_attr("type"):
form_data[element["name"]] = RandomInputHelper.get_for_type(element["type"]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_default_value_from_element(self, element):
"""Get the default value of a form element Args: elements (obj):
The soup element. Returns: str: The default value """ |
if element.name == "select":
options = element.find_all("option")
is_multiple = element.has_attr("multiple")
selected_options = [
option for option in options
if option.has_attr("selected")
]
if not selected_options and options:
selected_options = [options[0]]
selected_values = []
if is_multiple:
for option in selected_options:
value = option["value"] if option.has_attr("value") else option.string
selected_values.append(value)
return selected_values
elif len(selected_options) >= 1:
if selected_options[0].has_attr("value"):
return selected_options[0]["value"]
else:
return selected_options[0].string
return ""
if element.name == "textarea":
return element.string if element.string is not None else ""
if element.name == "input" and element.has_attr("type"):
if element["type"] in ("checkbox", "radio"):
if not element.has_attr("checked"):
return False
if element.has_attr("value"):
return element["value"]
else:
return "on"
if element.has_attr("value"):
return element["value"]
return "" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def append_with_data(url, data):
"""Append the given URL with the given data OrderedDict. Args: url (str):
The URL to append. data (obj):
The key value OrderedDict to append to the URL. Returns: str: The new URL. """ |
if data is None:
return url
url_parts = list(urlparse(url))
query = OrderedDict(parse_qsl(url_parts[4], keep_blank_values=True))
query.update(data)
url_parts[4] = URLHelper.query_dict_to_string(query)
return urlunparse(url_parts) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_subdomain(url):
"""Get the subdomain of the given URL. Args: url (str):
The URL to get the subdomain from. Returns: str: The subdomain(s) """ |
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
return ".".join(URLHelper.__cache[url].netloc.split(".")[:-2]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_hostname(url):
"""Get the hostname of the given URL. Args: url (str):
The URL to get the hostname from. Returns: str: The hostname """ |
if url not in URLHelper.__cache:
URLHelper.__cache[url] = urlparse(url)
parts = URLHelper.__cache[url].netloc.split(".")
if len(parts) == 1:
return parts[0]
else:
return ".".join(parts[-2:-1]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.