Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
5,000 | def filter_filenames(filenames):
filenames_cleaned = []
for filename in filenames:
keep = True
for pattern in FILE_EXCLUDE_EXTENTIONS:
if filename.endswith(pattern):
keep = False
for pattern in FILE_SKIP_PATTENRS:
if pattern in filename:
... | Skip files with extentions in `FILE_EXCLUDE_EXTENTIONS` and filenames that
contain `FILE_SKIP_PATTENRS`. |
5,001 | def convert_constants(jmag, hmag, kmag,
cjhk,
cjh, cjk, chk,
cj, ch, ck):
if jmag is not None:
if hmag is not None:
if kmag is not None:
return cjhk[0] + cjhk[1]*jmag + cjhk[2]*hmag + cjhk[3]*kmag
... | This converts between JHK and BVRI/SDSS mags.
Not meant to be used directly. See the functions below for more sensible
interface. This function does the grunt work of converting from JHK to
either BVRI or SDSS ugriz. while taking care of missing values for any of
jmag, hmag, or kmag.
Parameters
... |
5,002 | def from_dc_code(cls, dc_code):
result = cls.list()
dc_codes = {}
for dc in result:
if dc.get():
dc_codes[dc[]] = dc[]
return dc_codes.get(dc_code) | Retrieve the datacenter id associated to a dc_code |
5,003 | def access_var(self, id_, lineno, scope=None, default_type=None):
result = self.access_id(id_, lineno, scope, default_type)
if result is None:
return None
if not self.check_class(id_, CLASS.var, lineno, scope):
return None
assert isinstance(result, symb... | Since ZX BASIC allows access to undeclared variables, we must allow
them, and *implicitly* declare them if they are not declared already.
This function just checks if the id_ exists and returns its entry so.
Otherwise, creates an implicit declared variable entry and returns it.
If the -... |
5,004 | def complete_run(self, text, line, b, e):
forth_files = [f.split(os.path.sep)[-1] for f in forth_files]
return forth_files | Autocomplete file names with .forth ending. |
5,005 | def get_assign_groups(line, ops=ops):
group = []
for item in line:
group.append(item)
if item in ops:
yield group
group = []
yield group | Split a line into groups by assignment (including
augmented assignment) |
5,006 | def decode_exactly(code, bits_per_char=6):
assert bits_per_char in (2, 4, 6)
if len(code) == 0:
return 0., 0., _LNG_INTERVAL[1], _LAT_INTERVAL[1]
bits = len(code) * bits_per_char
level = bits >> 1
dim = 1 << level
code_int = decode_int(code, bits_per_char)
if CYTHON_AVAILABLE... | Decode a geohash on a hilbert curve as a lng/lat position with error-margins
Decodes the geohash `code` as a lng/lat position with error-margins. It assumes,
that the length of `code` corresponds to the precision! And that each character
in `code` encodes `bits_per_char` bits. Do not mix geohashes with dif... |
5,007 | def renew(cls, fqdn, duration, background):
fqdn = fqdn.lower()
if not background and not cls.intty():
background = True
domain_info = cls.info(fqdn)
current_year = domain_info[].year
domain_params = {
: duration,
: current_year,
... | Renew a domain. |
5,008 | def get_plotable3d(self):
polyhedra = sum([polyhedron.get_plotable3d()
for polyhedron in self.polyhedra], [])
return polyhedra + self.surface.get_plotable3d() | :returns: matplotlib Poly3DCollection
:rtype: list of mpl_toolkits.mplot3d |
5,009 | def _sync_from_disk(self):
try:
fobj = self.open_manifest()
except IOError as e:
if e.errno == errno.ENOENT:
raise ValueError(
"couldn't find manifest file in %s" % (self.path,))
elif e.errno == errno.ENOTDIR:
... | Read any changes made on disk to this Refpkg.
This is necessary if other programs are making changes to the
Refpkg on disk and your program must be synchronized to them. |
5,010 | def ready_argument_list(self, arguments):
gpu_args = []
for arg in arguments:
if isinstance(arg, numpy.ndarray):
gpu_args.append(cl.Buffer(self.ctx, self.mf.READ_WRITE | self.mf.COPY_HOST_PTR, hostbuf=arg))
else:
gpu_args.app... | ready argument list to be passed to the kernel, allocates gpu mem
:param arguments: List of arguments to be passed to the kernel.
The order should match the argument list on the OpenCL kernel.
Allowed values are numpy.ndarray, and/or numpy.int32, numpy.float32, and so on.
:type ... |
5,011 | def start(self, input_data, output_data, transform_resources, **kwargs):
self.transform_resources = transform_resources
self.input_data = input_data
self.output_data = output_data
image = self.primary_container[]
instance_type = transform_resources[]
instance_co... | Start the Local Transform Job
Args:
input_data (dict): Describes the dataset to be transformed and the location where it is stored.
output_data (dict): Identifies the location where to save the results from the transform job
transform_resources (dict): compute instances for ... |
5,012 | def id(self):
except KeyError:
pass
chip_id = self.detector.chip.id
board_id = None
if chip_id == ap_chip.BCM2XXX:
board_id = self._pi_id()
elif chip_id == ap_chip.AM33XX:
board_id = self._beaglebone_id()
e... | Return a unique id for the detected board, if any. |
5,013 | def _job_to_text(self, job):
next_run = self._format_date(job.get(, None))
tasks =
for task in job.get(, []):
tasks += self._task_to_text(task)
tasks +=
return .join([ % job.get(, None),
% job.get(, None),
... | Return a standard formatting of a Job serialization. |
5,014 | def load_mlf(filename, utf8_normalization=None):
with codecs.open(filename, , ) as f:
data = f.read().decode()
if utf8_normalization:
data = unicodedata.normalize(utf8_normalization, data)
mlfs = {}
for mlf_object in HTK_MLF_RE.finditer(data):
mlfs[mlf_object.group(... | Load an HTK Master Label File.
:param filename: The filename of the MLF file.
:param utf8_normalization: None |
5,015 | def base64(self, charset=None):
return b64encode(self.bytes()).decode(charset or self.charset) | Data encoded as base 64 |
5,016 | def streamline(self):
t = time.time()
self.language.streamline()
log.info(, self.__class__.__name__, time.time() - t) | Streamline the language represented by this parser to make queries run faster. |
5,017 | def _make_session(connection: Optional[str] = None) -> Session:
if connection is None:
connection = get_global_connection()
engine = create_engine(connection)
create_all(engine)
session_cls = sessionmaker(bind=engine)
session = session_cls()
return session | Make a session. |
5,018 | def _on_rpc_done(self, future):
_LOGGER.info("RPC termination has signaled manager shutdown.")
future = _maybe_wrap_exception(future)
thread = threading.Thread(
name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
)
thread.daemon = True
... | Triggered whenever the underlying RPC terminates without recovery.
This is typically triggered from one of two threads: the background
consumer thread (when calling ``recv()`` produces a non-recoverable
error) or the grpc management thread (when cancelling the RPC).
This method is *non... |
5,019 | def remove(self, obj, commit=True):
database = self._database(writable=True)
database.delete_document(TERM_PREFIXES[ID] + get_identifier(obj))
database.close() | Remove indexes for `obj` from the database.
We delete all instances of `Q<app_name>.<model_name>.<pk>` which
should be unique to this object.
Optional arguments:
`commit` -- ignored |
5,020 | def get_relationship(self, relationship_id):
collection = JSONClientValidated(,
collection=,
runtime=self._runtime)
result = collection.find_one(
dict({: ObjectId(self._get_i... | Gets the ``Relationship`` specified by its ``Id``.
arg: relationship_id (osid.id.Id): the ``Id`` of the
``Relationship`` to retrieve
return: (osid.relationship.Relationship) - the returned
``Relationship``
raise: NotFound - no ``Relationship`` found with the ... |
5,021 | def add_column(self, position, source_header, datatype, **kwargs):
from ..identity import GeneralNumber2
c = self.column(source_header)
c_by_pos = self.column(position)
datatype = if datatype == else datatype
assert not c or not c_by_pos or c.vid == c_by_pos.vid
... | Add a column to the source table.
:param position: Integer position of the column started from 1.
:param source_header: Name of the column, as it exists in the source file
:param datatype: Python datatype ( str, int, float, None ) for the column
:param kwargs: Other source record args.
... |
5,022 | def freeze_graph_tpu(model_path):
assert model_path
assert FLAGS.tpu_name
if FLAGS.tpu_name.startswith():
tpu_grpc_url = FLAGS.tpu_name
else:
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=None, project=None)
tpu_grpc... | Custom freeze_graph implementation for Cloud TPU. |
5,023 | def calculate_size(transaction_id, thread_id):
data_size = 0
data_size += calculate_size_str(transaction_id)
data_size += LONG_SIZE_IN_BYTES
return data_size | Calculates the request payload size |
5,024 | def merge(self, other):
other = IntervalCell.coerce(other)
if self.is_equal(other):
return self
elif other.is_entailed_by(self):
return self
elif self.is_entailed_by(other):
self.low, self.high = other.low, other.high
elif... | Merges the two values |
5,025 | def operate(config):
"Interface to do simple operations on the database."
app = make_app(config=config)
print "Operate Mode"
with app.app_context():
operate_menu() | Interface to do simple operations on the database. |
5,026 | def update_port_ip_address(self):
leases = None
req = dict(ip=)
instances = self.get_vms_for_this_req(**req)
if instances is None:
return
for vm in instances:
if not leases:
leases = self._get_ip_leases()
... | Find the ip address that assinged to a port via DHCP
The port database will be updated with the ip address. |
5,027 | def _create_threads(self):
creator = JobCreator(
self.config,
self.observers.jobs,
self.logger
)
self.jobs = creator.job_factory() | This method creates job instances. |
5,028 | def base_exception_handler(*args):
header, frames, trcback = format_report(*extract_exception(*args))
LOGGER.error("!> {0}".format(Constants.logging_separators))
map(lambda x: LOGGER.error("!> {0}".format(x)), header)
LOGGER.error("!> {0}".format(Constants.logging_separators))
map(lambda x: ... | Provides the base exception handler.
:param \*args: Arguments.
:type \*args: \*
:return: Definition success.
:rtype: bool |
5,029 | def generate_seasonal_averages(qout_file, seasonal_average_file,
num_cpus=multiprocessing.cpu_count()):
with RAPIDDataset(qout_file) as qout_nc_file:
print("Generating seasonal average file ...")
seasonal_avg_nc = Dataset(seasonal_average_file, )
seasonal... | This function loops through a CF compliant rapid streamflow
file to produce a netCDF file with a seasonal average for
365 days a year |
5,030 | def _get_caller_globals_and_locals():
caller_frame = inspect.stack()[2]
myglobals = caller_frame[0].f_globals
mylocals = caller_frame[0].f_locals
return myglobals, mylocals | Returns the globals and locals of the calling frame.
Is there an alternative to frame hacking here? |
5,031 | def _get_session(self):
if self.port in (465, "465"):
session = self._get_ssl()
elif self.port in (587, "587"):
session = self._get_tls()
try:
session.login(self.from_, self._auth)
except SMTPResponseException as e:
raise MessageS... | Start session with email server. |
5,032 | def inline(self) -> str:
return "{0}:{1}:{2}:{3}:{4}".format(self.issuer,
self.signatures[0],
self.membership_ts,
self.identity_ts,
... | Return inline string format of the Membership instance
:return: |
5,033 | def fit(self, train_set, test_set):
with tf.Graph().as_default(), tf.Session() as self.tf_session:
self.build_model()
tf.global_variables_initializer().run()
third = self.num_epochs // 3
for i in range(self.num_epochs):
lr_decay = self.lr... | Fit the model to the given data.
:param train_set: training data
:param test_set: test data |
5,034 | def add_slide(self, slide_layout):
partname = self._next_slide_partname
slide_layout_part = slide_layout.part
slide_part = SlidePart.new(partname, self.package, slide_layout_part)
rId = self.relate_to(slide_part, RT.SLIDE)
return rId, slide_part.slide | Return an (rId, slide) pair of a newly created blank slide that
inherits appearance from *slide_layout*. |
5,035 | def parse_mixed_delim_str(line):
arrs = [[], [], []]
for group in line.split():
for col, coord in enumerate(group.split()):
if coord:
arrs[col].append(int(coord))
return [tuple(arr) for arr in arrs] | Turns .obj face index string line into [verts, texcoords, normals] numeric tuples. |
5,036 | def _get_filename(self):
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname | Return a unique file name. |
5,037 | def devices_l(self) -> Dict:
output, _ = self._execute(, )
devices = output.split()[4::6]
models = output.split()[7::6]
return dict(zip(devices, models)) | List connected devices (-l for long output). |
5,038 | def _create_sequences(self):
try:
self.pdb.construct_pdb_to_rosetta_residue_map(self.rosetta_scripts_path, rosetta_database_path = self.rosetta_database_path, cache_dir = self.cache_dir)
except PDBMissingMainchainAtomsException:
self.pdb_to_rosetta_residue_map_... | Get all of the Sequences - Rosetta, ATOM, SEQRES, FASTA, UniParc. |
5,039 | def _sort_tensor(tensor):
sorted_, _ = tf.nn.top_k(tensor, k=tf.shape(input=tensor)[-1])
sorted_.set_shape(tensor.shape)
return sorted_ | Use `top_k` to sort a `Tensor` along the last dimension. |
5,040 | def callback(self):
self._callback(*self._args, **self._kwargs)
self._last_checked = time.time() | Run the callback |
5,041 | def pick_q_v1(self):
inl = self.sequences.inlets.fastaccess
new = self.sequences.states.fastaccess_new
new.qjoints[0] = 0.
for idx in range(inl.len_q):
new.qjoints[0] += inl.q[idx][0] | Assign the actual value of the inlet sequence to the upper joint
of the subreach upstream. |
5,042 | def html(self):
failure = ""
skipped = None
stdout = tag.text(self.stdout)
stderr = tag.text(self.stderr)
if self.skipped:
skipped = .format(msg=tag.text(self.skipped_msg),
skip=tag.text(self.skipped))
if self.failed():
... | Render this test case as HTML
:return: |
5,043 | def print_async_event(self, suffix, event):
tag
if not isinstance(event, dict):
return
if suffix in (,):
return
try:
outputter = self.opts.get(, event.get(, None) or event.get().get())
except AttributeError:
outputter = N... | Print all of the events with the prefix 'tag' |
5,044 | def render_html(self):
return self._template.safe_substitute(
report_type=self._report_type,
results=self.render_json()
) | Render an HTML report. |
5,045 | def _finish_disconnection_action(self, action):
success = action.data[]
conn_key = action.data[]
if self._get_connection_state(conn_key) != self.Disconnecting:
self._logger.error("Invalid finish_disconnection action on a connection whose state is not Disconnecting, conn_ke... | Finish a disconnection attempt
There are two possible outcomes:
- if we were successful at disconnecting, we transition to disconnected
- if we failed at disconnecting, we transition back to idle
Args:
action (ConnectionAction): the action object describing what we are
... |
5,046 | def unsign_data(self, data, url_safe=True):
if url_safe:
return utils.unsign_url_safe(data,
secret_key=self.secret_key,
salt=self.user_salt)
else:
return utils.unsign_data(data,
... | Retrieve the signed data. If it is expired, it will throw an exception
:param data: token/signed data
:param url_safe: bool. If true it will allow it to be passed in URL
:return: mixed, the data in its original form |
5,047 | def execute(self, args, kwargs):
return self.lookup_explicit(args, kwargs)(*args, **kwargs) | Dispatch a call. Call the first function whose type signature matches
the arguemts. |
5,048 | def session(self, session=None):
if self.related_instance:
session = self.related_instance.session
if session is None:
raise QuerySetError()
return session | Override :meth:`Manager.session` so that this
:class:`RelatedManager` can retrieve the session from the
:attr:`related_instance` if available. |
5,049 | def eval_py(self, _globals, _locals):
try:
params = eval(self.script, _globals, _locals)
except NameError as e:
raise Exception(
.format(str(e))
)
except ResolutionError as e:
raise Exception(.format(str(e)... | Evaluates a file containing a Python params dictionary. |
5,050 | def _encode_params(**kw):
R&Da=1&b=R%26D\u4e2d\u6587ABa=%E4%B8%AD%E6%96%87&b=A&b=B&b=123
args = []
for k, v in kw.iteritems():
if isinstance(v, basestring):
qv = v.encode() if isinstance(v, unicode) else v
args.append( % (k, urllib.quote(qv)))
elif isinstance(v, colle... | do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123' |
5,051 | def attach(gandi, disk, vm, position, read_only, background, force):
if not force:
proceed = click.confirm("Are you sure you want to attach disk "
" to vm ?" % (disk, vm))
if not proceed:
return
disk_info = gandi.disk.info(disk)
attached = di... | Attach disk to vm.
disk can be a disk name, or ID
vm can be a vm name, or ID |
5,052 | def shorten_text(self, text):
if len(text) > self.width:
return text[:self.width - 3] +
return text | Shortens text to fit into the :attr:`width`. |
5,053 | def GetIPAddresses(self):
results = []
for address in self.addresses:
human_readable_address = address.human_readable_address
if human_readable_address is not None:
results.append(human_readable_address)
return results | Return a list of IP addresses. |
5,054 | def run(self, arguments, show_help=True):
if self.use_sys:
if not gf.FROZEN:
if sys.stdin.encoding not in ["UTF-8", "UTF8"]:
self.print_warning(u"The default input encoding is not UTF-8.")
self.print_warning(u"You... | Program entry point.
Please note that the first item in ``arguments`` is discarded,
as it is assumed to be the script/invocation name;
pass a "dumb" placeholder if you call this method with
an argument different that ``sys.argv``.
:param arguments: the list of arguments
... |
5,055 | def _GetDirectory(self):
if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY:
return None
return VShadowDirectory(self._file_system, self.path_spec) | Retrieves a directory.
Returns:
VShadowDirectory: a directory None if not available. |
5,056 | def _CheckKeyPath(self, registry_key, search_depth):
if self._key_path_segments is None:
return False
if search_depth < 0 or search_depth > self._number_of_key_path_segments:
return False
if search_depth == 0:
segment_name =
else:
segment_name = self._key_path_s... | Checks the key path find specification.
Args:
registry_key (WinRegistryKey): Windows Registry key.
search_depth (int): number of key path segments to compare.
Returns:
bool: True if the Windows Registry key matches the find specification,
False if not. |
5,057 | def _format_time(seconds):
minutes = seconds // 60
hours = minutes // 60
rtn = u.format(minutes % 60, seconds % 60)
if hours:
rtn = u.format(int(hours % 24), rtn)
days = int(hours // 24)
if days:
rtn = u.format(days, rtn)
return rtn | Args:
seconds (float): amount of time
Format time string for eta and elapsed |
5,058 | def parse_size(image, size):
bits = size.split("x")
if image.size[0] == 0 or image.size[1] == 0:
ratio = 1.0
else:
ratio = float(image.size[0]) / float(image.size[1])
if len(bits) == 1 or not bits[1]:
width = int(bits[0])
height =... | Parse a size string (i.e. "200", "200x100", "x200", etc.) into a
(width, height) tuple. |
5,059 | def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
self.fileExtension = extension
KEYWORDS = {: spc.connectChunk,
: spc.sjuncChunk,
: spc.slinkChunk}
sjuncs = []... | Storm Pipe Network File Read from File Method |
5,060 | def _consolidate_coordinateList(
self,
coordinateList):
self.log.debug()
raList = []
raList[:] = np.array([c[0] for c in coordinateList])
decList = []
decList[:] = np.array([c[1] for c in coordinateList])
nedStreamRadius = self.settings[... | *match the coordinate list against itself with the parameters of the NED search queries to minimise duplicated NED queries*
**Key Arguments:**
- ``coordinateList`` -- the original coordinateList.
**Return:**
- ``updatedCoordinateList`` -- the coordinate list with duplicated sea... |
5,061 | def cashFlow(symbol, token=, version=):
_raiseIfNotStr(symbol)
return _getJson( + symbol + , token, version) | Pulls cash flow data. Available quarterly (4 quarters) or annually (4 years).
https://iexcloud.io/docs/api/#cash-flow
Updates at 8am, 9am UTC daily
Args:
symbol (string); Ticker to request
token (string); Access token
version (string); API version
Returns:
dict: resul... |
5,062 | def get_sd_auth(val, sd_auth_pillar_name=):
*
sd_pillar = __pillar__.get(sd_auth_pillar_name)
log.debug(, sd_pillar)
if not sd_pillar:
log.error(, sd_auth_pillar_name)
raise CommandExecutionError(
.format(sd_auth_pillar_name)
)
try:
return sd_pillar[val]
... | Returns requested Server Density authentication value from pillar.
CLI Example:
.. code-block:: bash
salt '*' serverdensity_device.get_sd_auth <val> |
5,063 | def p_array_literal_2(self, p):
items = p[2]
if len(p) == 6:
items.extend(p[4])
p[0] = ast.Array(items=items) | array_literal : LBRACKET element_list RBRACKET
| LBRACKET element_list COMMA elision_opt RBRACKET |
5,064 | def sort_pkglist(pkgs):
*["3.45", "2.13"]
try:
for key in pkgs:
pkgs[key] = sorted(set(pkgs[key]))
except AttributeError as exc:
log.exception(exc) | Accepts a dict obtained from pkg.list_pkgs() and sorts in place the list of
versions for any packages that have multiple versions installed, so that
two package lists can be compared to one another.
CLI Example:
.. code-block:: bash
salt '*' pkg_resource.sort_pkglist '["3.45", "2.13"]' |
5,065 | def get_block_from_time(self, timestring, error_margin=10):
known_block = self.get_current_block()[]
known_block_timestamp = self.block_timestamp(known_block)
timestring_timestamp = parse_time(timestring).timestamp()
delta = known_block_timestamp - timestring_timestamp
b... | Estimate block number from given time
:param str timestring: String representing time
:param int error_margin: Estimate block number within this interval (in seconds) |
5,066 | def retry(self, delay=0, group=None, message=None):
args = [, self.jid, self.queue_name, self.worker_name, delay]
if group is not None and message is not None:
args.append(group)
args.append(message)
return self.client(*args) | Retry this job in a little bit, in the same queue. This is meant
for the times when you detect a transient failure yourself |
5,067 | def to_python(self, value: Optional[str]) -> Optional[Any]:
if isinstance(value, datetime.datetime):
return value
if value is None:
return value
if value == :
return None
return iso_string_to_python_datetime(value) | Called during deserialization and during form ``clean()`` calls.
Must deal with an instance of the correct type; a string; or ``None``
(if the field allows ``null=True``).
Should raise ``ValidationError`` if problems. |
5,068 | def _compile_lock(self, query, value):
if isinstance(value, basestring):
return value
if value is True:
return
elif value is False:
return | Compile the lock into SQL
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param value: The lock value
:type value: bool or str
:return: The compiled lock
:rtype: str |
5,069 | def check_file(filepath):
check_path(filepath)
if not os.path.exists(filepath):
print("WARNING: File does not exist. Creating it: %s" % filepath)
open(filepath, ).close()
try:
print("Setting access rights for %s for www-data user" % (filepath))
uid = pwd.getpwnam("www-da... | - Checks if the parent directories for this path exist.
- Checks that the file exists.
- Donates the file to the web server user.
TODO: This is Debian / Ubuntu specific. |
5,070 | def create(self, request):
login_form = AuthenticationForm(request, data=request.data)
if not login_form.is_valid():
raise serializers.ValidationError(login_form.errors)
auth_login(request, login_form.get_user())
serializer = U... | Log in django staff user |
5,071 | def vars_to_array(self):
logger.warn(
)
if not self.vars:
return None
vars_matrix = matrix(self.vars, size=(self.vars[0].size[0],
len(self.vars))).trans()
self.vars_array = np.array(vars_matrix)
... | Convert `self.vars` to a numpy array
Returns
-------
numpy.array |
5,072 | def same_origin(url1, url2):
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, KeyError):
return Fal... | Return True if the urls have the same origin, else False.
Copied from Django:
https://github.com/django/django/blob/master/django/utils/http.py#L255 |
5,073 | def p_partselect_pointer_minus(self, p):
p[0] = Partselect(p[1], p[3], Minus(
p[3], p[5], lineno=p.lineno(1)), lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) | partselect : pointer LBRACKET expression MINUSCOLON expression RBRACKET |
5,074 | def validate_auth_option(option, value):
lower, value = validate(option, value)
if lower not in _AUTH_OPTIONS:
raise ConfigurationError(
% (option,))
return lower, value | Validate optional authentication parameters. |
5,075 | def json2space(x, oldy=None, name=NodeType.Root.value):
y = list()
if isinstance(x, dict):
if NodeType.Type.value in x.keys():
_type = x[NodeType.Type.value]
name = name + + _type
if _type == :
if oldy != None:
_index = oldy[N... | Change search space from json format to hyperopt format |
5,076 | def metropolis_hastings_step(current_state: State,
proposed_state: State,
energy_change: FloatTensor,
seed=None) -> Tuple[State, tf.Tensor, tf.Tensor]:
flat_current = tf.nest.flatten(current_state)
flat_proposed = nest.flatten... | Metropolis-Hastings step.
This probabilistically chooses between `current_state` and `proposed_state`
based on the `energy_change` so as to preserve detailed balance.
Energy change is the negative of `log_accept_ratio`.
Args:
current_state: Current state.
proposed_state: Proposed state.
energy_ch... |
5,077 | def create_path_env_var(new_entries, env=None, env_var=, delimiter=, prepend=False):
if env is None:
env = {}
prev_path = env.get(env_var, None)
if prev_path is None:
path_dirs = list()
else:
path_dirs = list(prev_path.split(delimiter))
new_entries_list = list(new_entries)
if prepend:
... | Join path entries, combining with an environment variable if specified. |
5,078 | def is_grouping_sane(cls, gtype):
if gtype == cls.SHUFFLE or gtype == cls.ALL or gtype == cls.LOWEST or gtype == cls.NONE:
return True
elif isinstance(gtype, cls.FIELDS):
return gtype.gtype == topology_pb2.Grouping.Value("FIELDS") and \
gtype.fields is not None
elif isinstance(... | Checks if a given gtype is sane |
5,079 | def attribute_md5(self):
def utf8(str):
if isinstance(str, six.string_types):
return str.encode()
return str
md5 = hashlib.md5()
struct_format = "!I".encode()
encoded += struct.pack(struct_format, len(utf8(n... | The MD5 of all attributes is calculated by first generating a
utf-8 string from each attribute and MD5-ing the concatenation
of them all. Each attribute is encoded with some bytes that
describe the length of each part and the type of attribute.
Not yet implemented:
List type... |
5,080 | def createRootJob(self, *args, **kwargs):
rootJob = self.create(*args, **kwargs)
self.setRootJob(rootJob.jobStoreID)
return rootJob | Create a new job and set it as the root job in this job store
:rtype: toil.jobGraph.JobGraph |
5,081 | def __set_no_protein(self, hgvs_string):
no_protein_list = [, ]
if hgvs_string in no_protein_list:
self.is_no_protein = True
self.is_non_silent = True
else:
self.is_no_protein = False | Set a flag for no protein expected. ("p.0" or "p.0?")
Args:
hgvs_string (str): hgvs syntax with "p." removed |
5,082 | def _SendRecv():
port = int(os.getenv(DEVSHELL_ENV, 0))
if port == 0:
raise NoDevshellServer()
sock = socket.socket()
sock.connect((, port))
data = CREDENTIAL_INFO_REQUEST_JSON
msg = .format(len(data), data)
sock.sendall(_helpers._to_bytes(msg, encoding=))
header = sock.... | Communicate with the Developer Shell server socket. |
5,083 | def register(self, matchers, runnable):
if getattr(self, , None) is not None and getattr(self, , None) is None:
self.syscallrunnable = runnable
else:
for m in matchers:
self.matchtree.insert(m, runnable)
events = self.eventtre... | Register an iterator(runnable) to scheduler and wait for events
:param matchers: sequence of EventMatchers
:param runnable: an iterator that accept send method
:param daemon: if True, the runnable will be registered as a daemon. |
5,084 | def length(self):
item = self.head
counter = 0
while item is not None:
counter += 1
item = item.next_node
return counter | Gets length
:return: How many items in linked list of linked list |
5,085 | def baseline_correct(G):
baseidx =[]
baseidx.extend(range(np.min(np.where(G.f_ppm<5.0)),np.max(np.where(G.f_ppm>4.0))+1))
baseidx.extend(range(np.min(np.where(G.f_ppm<3.5)),np.max(np.where(G.f_ppm>3.2))+1))
baseidx.extend(range(np.min(np.where(G.f_ppm<2.8)),np.max(np.where(G.f_ppm>2.5))+1))
... | This function zeroes the baseline from 2.5ppm upwards |
5,086 | def download(course, tid=None, dl_all=False, force=False, upgradejava=False,
update=False):
def dl(id):
download_exercise(Exercise.get(Exercise.tid == id),
force=force,
update_java=upgradejava,
update=update)
... | Download the exercises from the server. |
5,087 | def p_article(self, article):
article[0] = Article(article[1][4], article[2], article[3], article[1][0],
article[1][1], article[1][2], article[1][3], article[1][5]) | article : ARTICLEHEADER opttexts rules opttexts |
5,088 | def sdiffstore(self, destkey, key, *keys):
return self.execute(b, destkey, key, *keys) | Subtract multiple sets and store the resulting set in a key. |
5,089 | def set_checkpoint(self, checkpoint_trigger,
checkpoint_path, isOverWrite=True):
if not os.path.exists(checkpoint_path):
mkpath(checkpoint_path)
callBigDlFunc(self.bigdl_type, "setCheckPoint", self.value,
checkpoint_trigger, checkpoint_pa... | Configure checkpoint settings.
:param checkpoint_trigger: the interval to write snapshots
:param checkpoint_path: the path to write snapshots into
:param isOverWrite: whether to overwrite existing snapshots in path.default is True |
5,090 | def get_path_and_name(full_name):
if full_name:
parts = full_name.split("/")
return ("/".join(parts[0:-1]), parts[-1]) if len(parts) > 1 else ("/", full_name)
return None, None | Split Whole Patch onto 'Patch' and 'Name'
:param full_name: <str> Full Resource Name - likes 'Root/Folder/Folder2/Name'
:return: tuple (Patch, Name) |
5,091 | def start(self):
nat_interface_number = yield from self._look_for_interface("nat")
if nat_interface_number < 0:
raise GNS3VMError("The GNS3 VM: {} must have a NAT interface configured in order to start".format(self.vmname))
hostonly_interface_number = yield from s... | Start the GNS3 VM. |
5,092 | def toc(*args, **kwargs):
global Gtic_start
f_elapsedTime = time.time() - Gtic_start
for key, value in kwargs.items():
if key == : return value % f_elapsedTime
if key == : return "Elapsed time = %f seconds." % f_elapsedTime
return f_elapsedTime | Port of the MatLAB function of same name
Behaviour is controllable to some extent by the keyword
args: |
5,093 | def get_message_by_id(self, message_id):
result = self.wapi_functions.getMessageById(message_id)
if result:
result = factory_message(result, self)
return result | Fetch a message
:param message_id: Message ID
:type message_id: str
:return: Message or False
:rtype: Message |
5,094 | def clean_strings(iterable):
retval = []
for val in iterable:
try:
retval.append(val.strip())
except(AttributeError):
retval.append(val)
return retval | Take a list of strings and clear whitespace
on each one. If a value in the list is not a
string pass it through untouched.
Args:
iterable: mixed list
Returns:
mixed list |
5,095 | def dot(vec1, vec2):
if isinstance(vec1, Vector3) and isinstance(vec2, Vector3):
return (vec1.x * vec2.x) + (vec1.y * vec2.y) + (vec1.z * vec2.z)
elif isinstance(vec1, Vector4) and isinstance(vec2, Vector4):
return (vec1.x * vec2.x) + (vec1.y * vec2.y) + (vec1.z * vec2.z) + (vec1.w * vec2.w... | Returns the dot product of two Vectors |
5,096 | def iterate_from_vcf(infile, sample):
vcf = pysam.VCF()
vcf.connect(infile)
if sample not in vcf.getsamples():
raise KeyError("sample %s not vcf file")
for row in vcf.fetch():
result = vcf2pileup(row, sample)
if result:
yield result | iterate over a vcf-formatted file.
*infile* can be any iterator over a lines.
The function yields named tuples of the type
:class:`pysam.Pileup.PileupSubstitution` or
:class:`pysam.Pileup.PileupIndel`.
Positions without a snp will be skipped.
This method is wasteful and written to support sa... |
5,097 | def parse_fntdata(_data, _config, _extra_data_receiver=None):
data = {}
frame_data_list = []
parse_common_info = parse("common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}", _data[1])
parse_page_info = parse("page id={id:d} file=\"{file}\"", _da... | info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2
common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0
page id=0 file="attack_num.png"
chars count=12
char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 pa... |
5,098 | def merge(cls, components):
action = cls.EXTEND
val = {}
for component in components:
if component.action is cls.REPLACE:
val = component.val
action = cls.REPLACE
elif component.action is cls.EXTEND:
val.update(component.val)
else:
raise Parse... | Merges components into a single component, applying their actions appropriately.
This operation is associative: M(M(a, b), c) == M(a, M(b, c)) == M(a, b, c).
:param list components: an iterable of instances of DictValueComponent.
:return: An instance representing the result of merging the components.
... |
5,099 | def run(analysis, path=None, name=None, info=None, **kwargs):
kwargs.update({
: analysis,
: path,
: name,
: info,
})
main(**kwargs) | Run a single analysis.
:param Analysis analysis: Analysis class to run.
:param str path: Path of analysis. Can be `__file__`.
:param str name: Name of the analysis.
:param dict info: Optional entries are ``version``, ``title``,
``readme``, ...
:param dict static: Map[url regex, root-folder]... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.