func_code_string stringlengths 52 1.94M | func_documentation_string stringlengths 1 47.2k |
|---|---|
def ack(self, delivery_tag=0, multiple=False):
if not compatibility.is_integer(delivery_tag):
raise AMQPInvalidArgument('delivery_tag should be an integer')
elif not isinstance(multiple, bool):
raise AMQPInvalidArgument('multiple should be a boolean')
ack_frame =... | Acknowledge Message.
:param int/long delivery_tag: Server-assigned delivery tag
:param bool multiple: Acknowledge multiple messages
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError:... |
def nack(self, delivery_tag=0, multiple=False, requeue=True):
if not compatibility.is_integer(delivery_tag):
raise AMQPInvalidArgument('delivery_tag should be an integer')
elif not isinstance(multiple, bool):
raise AMQPInvalidArgument('multiple should be a boolean')
... | Negative Acknowledgement.
:param int/long delivery_tag: Server-assigned delivery tag
:param bool multiple: Negative acknowledge multiple messages
:param bool requeue: Re-queue the message
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the ch... |
def reject(self, delivery_tag=0, requeue=True):
if not compatibility.is_integer(delivery_tag):
raise AMQPInvalidArgument('delivery_tag should be an integer')
elif not isinstance(requeue, bool):
raise AMQPInvalidArgument('requeue should be a boolean')
reject_frame... | Reject Message.
:param int/long delivery_tag: Server-assigned delivery tag
:param bool requeue: Re-queue the message
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the ... |
def _consume_add_and_get_tag(self, consume_rpc_result):
consumer_tag = consume_rpc_result['consumer_tag']
self._channel.add_consumer_tag(consumer_tag)
return consumer_tag | Add the tag to the channel and return it.
:param dict consume_rpc_result:
:rtype: str |
def _consume_rpc_request(self, arguments, consumer_tag, exclusive, no_ack,
no_local, queue):
consume_frame = specification.Basic.Consume(queue=queue,
consumer_tag=consumer_tag,
... | Create a Consume Frame and execute a RPC request.
:param str queue: Queue name
:param str consumer_tag: Consumer tag
:param bool no_local: Do not deliver own messages
:param bool no_ack: No acknowledgement needed
:param bool exclusive: Request exclusive access
:param dic... |
def _validate_publish_parameters(body, exchange, immediate, mandatory,
properties, routing_key):
if not compatibility.is_string(body):
raise AMQPInvalidArgument('body should be a string')
elif not compatibility.is_string(routing_key):
... | Validate Publish Parameters.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param dict properties: Message properties
:param bool mandatory: Requires the message is published... |
def _handle_utf8_payload(body, properties):
if 'content_encoding' not in properties:
properties['content_encoding'] = 'utf-8'
encoding = properties['content_encoding']
if compatibility.is_unicode(body):
body = body.encode(encoding)
elif compatibility.PYTH... | Update the Body and Properties to the appropriate encoding.
:param bytes|str|unicode body: Message payload
:param dict properties: Message properties
:return: |
def _get_message(self, get_frame, auto_decode):
message_uuid = self._channel.rpc.register_request(
get_frame.valid_responses + ['ContentHeader', 'ContentBody']
)
try:
self._channel.write_frame(get_frame)
get_ok_frame = self._channel.rpc.get_request(me... | Get and return a message using a Basic.Get frame.
:param Basic.Get get_frame:
:param bool auto_decode: Auto-decode strings when possible.
:rtype: Message |
def _publish_confirm(self, frames_out):
confirm_uuid = self._channel.rpc.register_request(['Basic.Ack',
'Basic.Nack'])
self._channel.write_frames(frames_out)
result = self._channel.rpc.get_request(confirm_uuid, raw=True)
... | Confirm that message was published successfully.
:param list frames_out:
:rtype: bool |
def _create_content_body(self, body):
frames = int(math.ceil(len(body) / float(self._max_frame_size)))
for offset in compatibility.RANGE(0, frames):
start_frame = self._max_frame_size * offset
end_frame = start_frame + self._max_frame_size
body_len = len(body... | Split body based on the maximum frame size.
This function is based on code from Rabbitpy.
https://github.com/gmr/rabbitpy
:param bytes|str|unicode body: Message payload
:rtype: collections.Iterable |
def _get_content_body(self, message_uuid, body_size):
body = bytes()
while len(body) < body_size:
body_piece = self._channel.rpc.get_request(message_uuid, raw=True,
multiple=True)
if not body_piece.value:
... | Get Content Body using RPC requests.
:param str uuid_body: Rpc Identifier.
:param int body_size: Content Size.
:rtype: str |
def declare(self, queue='', passive=False, durable=False,
exclusive=False, auto_delete=False, arguments=None):
if not compatibility.is_string(queue):
raise AMQPInvalidArgument('queue should be a string')
elif not isinstance(passive, bool):
raise AMQPInval... | Declare a Queue.
:param str queue: Queue name
:param bool passive: Do not create
:param bool durable: Durable queue
:param bool exclusive: Request exclusive access
:param bool auto_delete: Automatically delete when not in use
:param dict arguments: Queue key/value argume... |
def delete(self, queue='', if_unused=False, if_empty=False):
if not compatibility.is_string(queue):
raise AMQPInvalidArgument('queue should be a string')
elif not isinstance(if_unused, bool):
raise AMQPInvalidArgument('if_unused should be a boolean')
elif not isi... | Delete a Queue.
:param str queue: Queue name
:param bool if_unused: Delete only if unused
:param bool if_empty: Delete only if empty
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnect... |
def purge(self, queue):
if not compatibility.is_string(queue):
raise AMQPInvalidArgument('queue should be a string')
purge_frame = pamqp_queue.Purge(queue=queue)
return self._channel.rpc_request(purge_frame) | Purge a Queue.
:param str queue: Queue name
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:r... |
def bind(self, queue='', exchange='', routing_key='', arguments=None):
if not compatibility.is_string(queue):
raise AMQPInvalidArgument('queue should be a string')
elif not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
... | Bind a Queue.
:param str queue: Queue name
:param str exchange: Exchange name
:param str routing_key: The routing key to use
:param dict arguments: Bind key/value arguments
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel e... |
def unbind(self, queue='', exchange='', routing_key='', arguments=None):
if not compatibility.is_string(queue):
raise AMQPInvalidArgument('queue should be a string')
elif not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
... | Unbind a Queue.
:param str queue: Queue name
:param str exchange: Exchange name
:param str routing_key: The routing key used
:param dict arguments: Unbind key/value arguments
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel... |
def on_frame(self, frame_in):
LOGGER.debug('Frame Received: %s', frame_in.name)
if frame_in.name == 'Heartbeat':
return
elif frame_in.name == 'Connection.Close':
self._close_connection(frame_in)
elif frame_in.name == 'Connection.CloseOk':
self... | Handle frames sent to Channel0.
:param frame_in: Amqp frame.
:return: |
def _close_connection(self, frame_in):
self._set_connection_state(Stateful.CLOSED)
if frame_in.reply_code != 200:
reply_text = try_utf8_decode(frame_in.reply_text)
message = (
'Connection was closed by remote server: %s' % reply_text
)
... | Connection Close.
:param specification.Connection.Close frame_in: Amqp frame.
:return: |
def _blocked_connection(self, frame_in):
self.is_blocked = True
LOGGER.warning(
'Connection is blocked by remote server: %s',
try_utf8_decode(frame_in.reason)
) | Connection is Blocked.
:param frame_in:
:return: |
def _send_start_ok(self, frame_in):
mechanisms = try_utf8_decode(frame_in.mechanisms)
if 'EXTERNAL' in mechanisms:
mechanism = 'EXTERNAL'
credentials = '\0\0'
elif 'PLAIN' in mechanisms:
mechanism = 'PLAIN'
credentials = self._plain_creden... | Send Start OK frame.
:param specification.Connection.Start frame_in: Amqp frame.
:return: |
def _send_tune_ok(self, frame_in):
self.max_allowed_channels = self._negotiate(frame_in.channel_max,
MAX_CHANNELS)
self.max_frame_size = self._negotiate(frame_in.frame_max,
MAX_FRAME_SIZE)
... | Send Tune OK frame.
:param specification.Connection.Tune frame_in: Tune frame.
:return: |
def _send_open_connection(self):
open_frame = specification.Connection.Open(
virtual_host=self._parameters['virtual_host']
)
self._write_frame(open_frame) | Send Open Connection frame.
:return: |
def _write_frame(self, frame_out):
self._connection.write_frame(0, frame_out)
LOGGER.debug('Frame Sent: %s', frame_out.name) | Write a pamqp frame from Channel0.
:param frame_out: Amqp frame.
:return: |
def _client_properties():
return {
'product': 'AMQPStorm',
'platform': 'Python %s (%s)' % (platform.python_version(),
platform.python_implementation()),
'capabilities': {
'basic.nack': True,
... | AMQPStorm Client Properties.
:rtype: dict |
def build_inbound_messages(self, break_on_empty=False, to_tuple=False,
auto_decode=True):
self.check_for_errors()
while not self.is_closed:
message = self._build_message(auto_decode=auto_decode)
if not message:
self.check_fo... | Build messages in the inbound queue.
:param bool break_on_empty: Should we break the loop when there are
no more messages in our inbound queue.
This does not guarantee that the upstream
queue is empty, ... |
def close(self, reply_code=200, reply_text=''):
if not compatibility.is_integer(reply_code):
raise AMQPInvalidArgument('reply_code should be an integer')
elif not compatibility.is_string(reply_text):
raise AMQPInvalidArgument('reply_text should be a string')
try:... | Close Channel.
:param int reply_code: Close reply code (e.g. 200)
:param str reply_text: Close reply text
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
... |
def check_for_errors(self):
try:
self._connection.check_for_errors()
except AMQPConnectionError:
self.set_state(self.CLOSED)
raise
if self.exceptions:
exception = self.exceptions[0]
if self.is_open:
self.excepti... | Check connection and channel for errors.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return: |
def confirm_deliveries(self):
self._confirming_deliveries = True
confirm_frame = specification.Confirm.Select()
return self.rpc_request(confirm_frame) | Set the channel to confirm that each message has been
successfully delivered.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return: |
def on_frame(self, frame_in):
if self.rpc.on_frame(frame_in):
return
if frame_in.name in CONTENT_FRAME:
self._inbound.append(frame_in)
elif frame_in.name == 'Basic.Cancel':
self._basic_cancel(frame_in)
elif frame_in.name == 'Basic.CancelOk':
... | Handle frame sent to this specific channel.
:param pamqp.Frame frame_in: Amqp frame.
:return: |
def open(self):
self._inbound = []
self._exceptions = []
self.set_state(self.OPENING)
self.rpc_request(specification.Channel.Open())
self.set_state(self.OPEN) | Open Channel.
:return: |
def process_data_events(self, to_tuple=False, auto_decode=True):
if not self._consumer_callbacks:
raise AMQPChannelError('no consumer callback defined')
for message in self.build_inbound_messages(break_on_empty=True,
auto_decode=aut... | Consume inbound messages.
:param bool to_tuple: Should incoming messages be converted to a
tuple before delivery.
:param bool auto_decode: Auto-decode strings when possible.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQP... |
def rpc_request(self, frame_out, connection_adapter=None):
with self.rpc.lock:
uuid = self.rpc.register_request(frame_out.valid_responses)
self._connection.write_frame(self.channel_id, frame_out)
return self.rpc.get_request(
uuid, connection_adapter=c... | Perform a RPC Request.
:param specification.Frame frame_out: Amqp frame.
:rtype: dict |
def start_consuming(self, to_tuple=False, auto_decode=True):
while not self.is_closed:
self.process_data_events(
to_tuple=to_tuple,
auto_decode=auto_decode
)
if self.consumer_tags:
sleep(IDLE_WAIT)
conti... | Start consuming messages.
:param bool to_tuple: Should incoming messages be converted to a
tuple before delivery.
:param bool auto_decode: Auto-decode strings when possible.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQP... |
def stop_consuming(self):
if not self.consumer_tags:
return
if not self.is_closed:
for tag in self.consumer_tags:
self.basic.cancel(tag)
self.remove_consumer_tag() | Stop consuming messages.
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return: |
def write_frame(self, frame_out):
self.check_for_errors()
self._connection.write_frame(self.channel_id, frame_out) | Write a pamqp frame from the current channel.
:param specification.Frame frame_out: A single pamqp frame.
:return: |
def write_frames(self, frames_out):
self.check_for_errors()
self._connection.write_frames(self.channel_id, frames_out) | Write multiple pamqp frames from the current channel.
:param list frames_out: A list of pamqp frames.
:return: |
def _basic_cancel(self, frame_in):
LOGGER.warning(
'Received Basic.Cancel on consumer_tag: %s',
try_utf8_decode(frame_in.consumer_tag)
)
self.remove_consumer_tag(frame_in.consumer_tag) | Handle a Basic Cancel frame.
:param specification.Basic.Cancel frame_in: Amqp frame.
:return: |
def _basic_return(self, frame_in):
reply_text = try_utf8_decode(frame_in.reply_text)
message = (
"Message not delivered: %s (%s) to queue '%s' from exchange '%s'" %
(
reply_text,
frame_in.reply_code,
frame_in.routing_key,
... | Handle a Basic Return Frame and treat it as an error.
:param specification.Basic.Return frame_in: Amqp frame.
:return: |
def _build_message(self, auto_decode):
with self.lock:
if len(self._inbound) < 2:
return None
headers = self._build_message_headers()
if not headers:
return None
basic_deliver, content_header = headers
body = se... | Fetch and build a complete Message from the inbound queue.
:param bool auto_decode: Auto-decode strings when possible.
:rtype: Message |
def _build_message_headers(self):
basic_deliver = self._inbound.pop(0)
if not isinstance(basic_deliver, specification.Basic.Deliver):
LOGGER.warning(
'Received an out-of-order frame: %s was '
'expecting a Basic.Deliver frame',
type(bas... | Fetch Message Headers (Deliver & Header Frames).
:rtype: tuple|None |
def _build_message_body(self, body_size):
body = bytes()
while len(body) < body_size:
if not self._inbound:
self.check_for_errors()
sleep(IDLE_WAIT)
continue
body_piece = self._inbound.pop(0)
if not body_piece.v... | Build the Message body from the inbound queue.
:rtype: str |
def _close_channel(self, frame_in):
if frame_in.reply_code != 200:
reply_text = try_utf8_decode(frame_in.reply_text)
message = (
'Channel %d was closed by remote server: %s' %
(
self._channel_id,
reply_text
... | Close Channel.
:param specification.Channel.Close frame_in: Channel Close frame.
:return: |
def create(self, username, password, tags=''):
user_payload = json.dumps({
'password': password,
'tags': tags
})
return self.http_client.put(API_USER % username,
payload=user_payload) | Create User.
:param str username: Username
:param str password: Password
:param str tags: Comma-separate list of tags (e.g. monitoring)
:rtype: None |
def get_permission(self, username, virtual_host):
virtual_host = quote(virtual_host, '')
return self.http_client.get(API_USER_VIRTUAL_HOST_PERMISSIONS %
(
virtual_host,
username
... | Get User permissions for the configured virtual host.
:param str username: Username
:param str virtual_host: Virtual host name
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict |
def set_permission(self, username, virtual_host, configure_regex='.*',
write_regex='.*', read_regex='.*'):
virtual_host = quote(virtual_host, '')
permission_payload = json.dumps({
"configure": configure_regex,
"read": read_regex,
"write... | Set User permissions for the configured virtual host.
:param str username: Username
:param str virtual_host: Virtual host name
:param str configure_regex: Permission pattern for configuration
operations for this user.
:param str write_regex: Permissio... |
def delete_permission(self, username, virtual_host):
virtual_host = quote(virtual_host, '')
return self.http_client.delete(
API_USER_VIRTUAL_HOST_PERMISSIONS %
(
virtual_host,
username
)) | Delete User permissions for the configured virtual host.
:param str username: Username
:param str virtual_host: Virtual host name
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: d... |
def start_server(self):
self._stopped.clear()
if not self._connection or self._connection.is_closed:
self._create_connection()
while not self._stopped.is_set():
try:
# Check our connection for errors.
self._connection.check_for_err... | Start the RPC Server.
:return: |
def _update_consumers(self):
# Do we need to start more consumers.
consumer_to_start = \
min(max(self.number_of_consumers - len(self._consumers), 0), 2)
for _ in range(consumer_to_start):
consumer = Consumer(self.rpc_queue)
self._start_consumer(consum... | Update Consumers.
- Add more if requested.
- Make sure the consumers are healthy.
- Remove excess consumers.
:return: |
def get(self, path, payload=None, headers=None):
return self._request('get', path, payload, headers) | HTTP GET operation.
:param path: URI Path
:param payload: HTTP Body
:param headers: HTTP Headers
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:return: Response |
def post(self, path, payload=None, headers=None):
return self._request('post', path, payload, headers) | HTTP POST operation.
:param path: URI Path
:param payload: HTTP Body
:param headers: HTTP Headers
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:return: Response |
def delete(self, path, payload=None, headers=None):
return self._request('delete', path, payload, headers) | HTTP DELETE operation.
:param path: URI Path
:param payload: HTTP Body
:param headers: HTTP Headers
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:return: Response |
def put(self, path, payload=None, headers=None):
return self._request('put', path, payload, headers) | HTTP PUT operation.
:param path: URI Path
:param payload: HTTP Body
:param headers: HTTP Headers
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:return: Response |
def _request(self, method, path, payload=None, headers=None):
url = urlparse.urljoin(self._base_url, 'api/%s' % path)
headers = headers or {}
headers['content-type'] = 'application/json'
try:
response = requests.request(
method, url,
a... | HTTP operation.
:param method: Operation type (e.g. post)
:param path: URI Path
:param payload: HTTP Body
:param headers: HTTP Headers
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issu... |
def _check_for_errors(response, json_response):
status_code = response.status_code
try:
response.raise_for_status()
except requests.HTTPError as why:
raise ApiError(str(why), reply_code=status_code)
if isinstance(json_response, dict) and 'error' in json_r... | Check payload for errors.
:param response: HTTP response
:param json_response: Json response
:raises ApiError: Raises if the remote server encountered an error.
:return: |
def get(self, node=None):
if not node:
return self.http_client.get(HEALTHCHECKS)
return self.http_client.get(HEALTHCHECKS_NODE % node) | Run basic healthchecks against the current node, or against a given
node.
Example response:
> {"status":"ok"}
> {"status":"failed","reason":"string"}
:param node: Node name
:raises ApiError: Raises if the remote server encountered an error.
... |
def mean_pairwise_difference(ac, an=None, fill=np.nan):
# This function calculates the mean number of pairwise differences
# between haplotypes within a single population, generalising to any number
# of alleles.
# check inputs
ac = asarray_ndim(ac, 2)
# total number of haplotypes
if an... | Calculate for each variant the mean number of pairwise differences
between chromosomes sampled from within a single population.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
an : array_like, int, shape (n_variants,), optional
Allele ... |
def mean_pairwise_difference_between(ac1, ac2, an1=None, an2=None,
fill=np.nan):
# This function calculates the mean number of pairwise differences
# between haplotypes from two different populations, generalising to any
# number of alleles.
# check inputs
a... | Calculate for each variant the mean number of pairwise differences
between chromosomes sampled from two different populations.
Parameters
----------
ac1 : array_like, int, shape (n_variants, n_alleles)
Allele counts array from the first population.
ac2 : array_like, int, shape (n_variants,... |
def sequence_diversity(pos, ac, start=None, stop=None,
is_accessible=None):
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
ac = asarray_ndim(ac, 2)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=True)
# deal with ... | Estimate nucleotide diversity within a given region, which is the
average proportion of sites (including monomorphic sites not present in the
data) that differ between randomly chosen pairs of chromosomes.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, usi... |
def sequence_divergence(pos, ac1, ac2, an1=None, an2=None, start=None,
stop=None, is_accessible=None):
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
ac1 = asarray_ndim(ac1, 2)
ac2 = asarray_ndim(ac2, 2)
if an1 is not None:... | Estimate nucleotide divergence between two populations within a
given region, which is the average proportion of sites (including
monomorphic sites not present in the data) that differ between randomly
chosen pairs of chromosomes, one from each population.
Parameters
----------
pos : array_lik... |
def windowed_diversity(pos, ac, size=None, start=None, stop=None, step=None,
windows=None, is_accessible=None, fill=np.nan):
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=Tr... | Estimate nucleotide diversity in windows over a single
chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
siz... |
def windowed_divergence(pos, ac1, ac2, size=None, start=None, stop=None,
step=None, windows=None, is_accessible=None,
fill=np.nan):
# check inputs
pos = SortedIndex(pos, copy=False)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=True)
# cal... | Estimate nucleotide divergence between two populations in windows
over a single chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac1 : array_like, int, shape (n_variants, n_alleles)
Al... |
def windowed_df(pos, ac1, ac2, size=None, start=None, stop=None, step=None,
windows=None, is_accessible=None, fill=np.nan):
# check inputs
pos = SortedIndex(pos, copy=False)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=True)
# locate fixed differences
loc_df = locat... | Calculate the density of fixed differences between two populations in
windows over a single chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac1 : array_like, int, shape (n_variants, n_alleles... |
def watterson_theta(pos, ac, start=None, stop=None,
is_accessible=None):
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=True)
if not hasattr(ac, 'count_segregating'):
... | Calculate the value of Watterson's estimator over a given region.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
start : int, op... |
def windowed_watterson_theta(pos, ac, size=None, start=None, stop=None,
step=None, windows=None, is_accessible=None,
fill=np.nan):
# flake8: noqa
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
is_... | Calculate the value of Watterson's estimator in windows over a single
chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts... |
def tajima_d(ac, pos=None, start=None, stop=None, min_sites=3):
# check inputs
if not hasattr(ac, 'count_segregating'):
ac = AlleleCountsArray(ac, copy=False)
# deal with subregion
if pos is not None and (start is not None or stop is not None):
if not isinstance(pos, SortedIndex):
... | Calculate the value of Tajima's D over a given region.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
pos : array_like, int, shape (n_items,), optional
Variant positions, using 1-based coordinates, in ascending order.
start : int, opti... |
def windowed_tajima_d(pos, ac, size=None, start=None, stop=None,
step=None, windows=None, min_sites=3):
# check inputs
if not isinstance(pos, SortedIndex):
pos = SortedIndex(pos, copy=False)
if not hasattr(ac, 'count_segregating'):
ac = AlleleCountsArray(ac, copy=F... | Calculate the value of Tajima's D in windows over a single
chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
... |
def moving_tajima_d(ac, size, start=0, stop=None, step=None, min_sites=3):
d = moving_statistic(values=ac, statistic=tajima_d, size=size, start=start, stop=stop,
step=step, min_sites=min_sites)
return d | Calculate the value of Tajima's D in moving windows of `size` variants.
Parameters
----------
ac : array_like, int, shape (n_variants, n_alleles)
Allele counts array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
sto... |
def roh_mhmm(gv, pos, phet_roh=0.001, phet_nonroh=(0.0025, 0.01), transition=1e-6,
min_roh=0, is_accessible=None, contig_size=None):
from hmmlearn import hmm
# setup inputs
if isinstance(phet_nonroh, float):
phet_nonroh = phet_nonroh,
gv = GenotypeVector(gv)
pos = asarray_n... | Call ROH (runs of homozygosity) in a single individual given a genotype vector.
This function computes the likely ROH using a Multinomial HMM model. There are 3
observable states at each position in a chromosome/contig: 0 = Hom, 1 = Het,
2 = inaccessible (i.e., unobserved).
The model is provided with ... |
def roh_poissonhmm(gv, pos, phet_roh=0.001, phet_nonroh=(0.0025, 0.01), transition=1e-3,
window_size=1000, min_roh=0, is_accessible=None, contig_size=None):
from pomegranate import HiddenMarkovModel, PoissonDistribution
# equally accessbile windows
if is_accessible is None:
i... | Call ROH (runs of homozygosity) in a single individual given a genotype vector.
This function computes the likely ROH using a Poisson HMM model. The chromosome is divided into
equally accessible windows of specified size, then the number of hets observed in each is used
to fit a Poisson HMM. Note this is m... |
def sfs(dac, n=None):
# check input
dac, n = _check_dac_n(dac, n)
# need platform integer for bincount
dac = dac.astype(int, copy=False)
# compute site frequency spectrum
x = n + 1
s = np.bincount(dac, minlength=x)
return s | Compute the site frequency spectrum given derived allele counts at
a set of biallelic variants.
Parameters
----------
dac : array_like, int, shape (n_variants,)
Array of derived allele counts.
n : int, optional
The total number of chromosomes called.
Returns
-------
sfs... |
def sfs_folded(ac, n=None):
# check input
ac, n = _check_ac_n(ac, n)
# compute minor allele counts
mac = np.amin(ac, axis=1)
# need platform integer for bincount
mac = mac.astype(int, copy=False)
# compute folded site frequency spectrum
x = n//2 + 1
s = np.bincount(mac, minlengt... | Compute the folded site frequency spectrum given reference and
alternate allele counts at a set of biallelic variants.
Parameters
----------
ac : array_like, int, shape (n_variants, 2)
Allele counts array.
n : int, optional
The total number of chromosomes called.
Returns
--... |
def sfs_scaled(dac, n=None):
# compute site frequency spectrum
s = sfs(dac, n=n)
# apply scaling
s = scale_sfs(s)
return s | Compute the site frequency spectrum scaled such that a constant value is
expected across the spectrum for neutral variation and constant
population size.
Parameters
----------
dac : array_like, int, shape (n_variants,)
Array of derived allele counts.
n : int, optional
The total ... |
def scale_sfs(s):
k = np.arange(s.size)
out = s * k
return out | Scale a site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes,)
Site frequency spectrum.
Returns
-------
sfs_scaled : ndarray, int, shape (n_chromosomes,)
Scaled site frequency spectrum. |
def sfs_folded_scaled(ac, n=None):
# check input
ac, n = _check_ac_n(ac, n)
# compute the site frequency spectrum
s = sfs_folded(ac, n=n)
# apply scaling
s = scale_sfs_folded(s, n)
return s | Compute the folded site frequency spectrum scaled such that a constant
value is expected across the spectrum for neutral variation and constant
population size.
Parameters
----------
ac : array_like, int, shape (n_variants, 2)
Allele counts array.
n : int, optional
The total num... |
def scale_sfs_folded(s, n):
k = np.arange(s.shape[0])
out = s * k * (n - k) / n
return out | Scale a folded site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes//2,)
Folded site frequency spectrum.
n : int
Number of chromosomes called.
Returns
-------
sfs_folded_scaled : ndarray, int, shape (n_chromosomes//2,)
Scaled fold... |
def joint_sfs(dac1, dac2, n1=None, n2=None):
# check inputs
dac1, n1 = _check_dac_n(dac1, n1)
dac2, n2 = _check_dac_n(dac2, n2)
# compute site frequency spectrum
x = n1 + 1
y = n2 + 1
# need platform integer for bincount
tmp = (dac1 * y + dac2).astype(int, copy=False)
s = np.bin... | Compute the joint site frequency spectrum between two populations.
Parameters
----------
dac1 : array_like, int, shape (n_variants,)
Derived allele counts for the first population.
dac2 : array_like, int, shape (n_variants,)
Derived allele counts for the second population.
n1, n2 : ... |
def joint_sfs_folded(ac1, ac2, n1=None, n2=None):
# check inputs
ac1, n1 = _check_ac_n(ac1, n1)
ac2, n2 = _check_ac_n(ac2, n2)
# compute minor allele counts
mac1 = np.amin(ac1, axis=1)
mac2 = np.amin(ac2, axis=1)
# compute site frequency spectrum
x = n1//2 + 1
y = n2//2 + 1
... | Compute the joint folded site frequency spectrum between two
populations.
Parameters
----------
ac1 : array_like, int, shape (n_variants, 2)
Allele counts for the first population.
ac2 : array_like, int, shape (n_variants, 2)
Allele counts for the second population.
n1, n2 : int... |
def joint_sfs_scaled(dac1, dac2, n1=None, n2=None):
# compute site frequency spectrum
s = joint_sfs(dac1, dac2, n1=n1, n2=n2)
# apply scaling
s = scale_joint_sfs(s)
return s | Compute the joint site frequency spectrum between two populations,
scaled such that a constant value is expected across the spectrum for
neutral variation, constant population size and unrelated populations.
Parameters
----------
dac1 : array_like, int, shape (n_variants,)
Derived allele co... |
def scale_joint_sfs(s):
i = np.arange(s.shape[0])[:, None]
j = np.arange(s.shape[1])[None, :]
out = (s * i) * j
return out | Scale a joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n1, n2)
Joint site frequency spectrum.
Returns
-------
joint_sfs_scaled : ndarray, int, shape (n1, n2)
Scaled joint site frequency spectrum. |
def joint_sfs_folded_scaled(ac1, ac2, n1=None, n2=None):
# noqa
# check inputs
ac1, n1 = _check_ac_n(ac1, n1)
ac2, n2 = _check_ac_n(ac2, n2)
# compute site frequency spectrum
s = joint_sfs_folded(ac1, ac2, n1=n1, n2=n2)
# apply scaling
s = scale_joint_sfs_folded(s, n1, n2)
return s | Compute the joint folded site frequency spectrum between two
populations, scaled such that a constant value is expected across the
spectrum for neutral variation, constant population size and unrelated
populations.
Parameters
----------
ac1 : array_like, int, shape (n_variants, 2)
Allel... |
def scale_joint_sfs_folded(s, n1, n2):
# noqa
out = np.empty_like(s)
for i in range(s.shape[0]):
for j in range(s.shape[1]):
out[i, j] = s[i, j] * i * j * (n1 - i) * (n2 - j)
return out | Scale a folded joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (m_chromosomes//2, n_chromosomes//2)
Folded joint site frequency spectrum.
n1, n2 : int, optional
The total number of chromosomes called in each population.
Returns
-------
joint_... |
def fold_sfs(s, n):
# check inputs
s = asarray_ndim(s, 1)
assert s.shape[0] <= n + 1, 'invalid number of chromosomes'
# need to check s has all entries up to n
if s.shape[0] < n + 1:
sn = np.zeros(n + 1, dtype=s.dtype)
sn[:s.shape[0]] = s
s = sn
# fold
nf = (n + ... | Fold a site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes,)
Site frequency spectrum
n : int
Total number of chromosomes called.
Returns
-------
sfs_folded : ndarray, int
Folded site frequency spectrum |
def fold_joint_sfs(s, n1, n2):
# check inputs
s = asarray_ndim(s, 2)
assert s.shape[0] <= n1 + 1, 'invalid number of chromosomes'
assert s.shape[1] <= n2 + 1, 'invalid number of chromosomes'
# need to check s has all entries up to m
if s.shape[0] < n1 + 1:
sm = np.zeros((n1 + 1, s.s... | Fold a joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (m_chromosomes, n_chromosomes)
Joint site frequency spectrum.
n1, n2 : int, optional
The total number of chromosomes called in each population.
Returns
-------
joint_sfs_folded : ndarray,... |
def plot_sfs(s, yscale='log', bins=None, n=None,
clip_endpoints=True, label=None, plot_kwargs=None,
ax=None):
import matplotlib.pyplot as plt
import scipy
# check inputs
s = asarray_ndim(s, 1)
# setup axes
if ax is None:
fig, ax = plt.subplots()
# setup... | Plot a site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chromosomes s... |
def plot_sfs_folded(*args, **kwargs):
ax = plot_sfs(*args, **kwargs)
n = kwargs.get('n', None)
if n:
ax.set_xlabel('minor allele frequency')
else:
ax.set_xlabel('minor allele count')
return ax | Plot a folded site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes/2,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chro... |
def plot_sfs_scaled(*args, **kwargs):
kwargs.setdefault('yscale', 'linear')
ax = plot_sfs(*args, **kwargs)
ax.set_ylabel('scaled site frequency')
return ax | Plot a scaled site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number of chromo... |
def plot_sfs_folded_scaled(*args, **kwargs):
kwargs.setdefault('yscale', 'linear')
ax = plot_sfs_folded(*args, **kwargs)
ax.set_ylabel('scaled site frequency')
n = kwargs.get('n', None)
if n:
ax.set_xlabel('minor allele frequency')
else:
ax.set_xlabel('minor allele count')
... | Plot a folded scaled site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes/2,)
Site frequency spectrum.
yscale : string, optional
Y axis scale.
bins : int or array_like, int, optional
Allele count bins.
n : int, optional
Number ... |
def plot_joint_sfs(s, ax=None, imshow_kwargs=None):
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
# check inputs
s = asarray_ndim(s, 2)
# setup axes
if ax is None:
w = plt.rcParams['figure.figsize'][0]
fig, ax = plt.subplots(figsize=(w, w))
# set ... | Plot a joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes_pop1, n_chromosomes_pop2)
Joint site frequency spectrum.
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
imshow_kwargs : dict-like
... |
def plot_joint_sfs_folded(*args, **kwargs):
ax = plot_joint_sfs(*args, **kwargs)
ax.set_xlabel('minor allele count (population 1)')
ax.set_ylabel('minor allele count (population 2)')
return ax | Plot a joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes_pop1/2, n_chromosomes_pop2/2)
Joint site frequency spectrum.
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
imshow_kwargs : dict-like
... |
def plot_joint_sfs_scaled(*args, **kwargs):
imshow_kwargs = kwargs.get('imshow_kwargs', dict())
imshow_kwargs.setdefault('norm', None)
kwargs['imshow_kwargs'] = imshow_kwargs
ax = plot_joint_sfs(*args, **kwargs)
return ax | Plot a scaled joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes_pop1, n_chromosomes_pop2)
Joint site frequency spectrum.
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
imshow_kwargs : dict-like
... |
def plot_joint_sfs_folded_scaled(*args, **kwargs):
imshow_kwargs = kwargs.get('imshow_kwargs', dict())
imshow_kwargs.setdefault('norm', None)
kwargs['imshow_kwargs'] = imshow_kwargs
ax = plot_joint_sfs_folded(*args, **kwargs)
ax.set_xlabel('minor allele count (population 1)')
ax.set_ylabel(... | Plot a scaled folded joint site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes_pop1/2, n_chromosomes_pop2/2)
Joint site frequency spectrum.
ax : axes, optional
Axes on which to draw. If not provided, a new figure will be created.
imshow_kwargs : ... |
def memoryview_safe(x):
if not x.flags.writeable:
if not x.flags.owndata:
x = x.copy(order='A')
x.setflags(write=True)
return x | Make array safe to run in a Cython memoryview-based kernel. These
kernels typically break down with the error ``ValueError: buffer source
array is read-only`` when running in dask distributed.
See Also
--------
https://github.com/dask/distributed/issues/1978
https://github.com/cggh/scikit-allel... |
def _prep_fields_param(fields):
store_samples = False
if fields is None:
# add samples by default
return True, None
if isinstance(fields, str):
fields = [fields]
else:
fields = list(fields)
if 'samples' in fields:
fields.remove('samples')
store_sa... | Prepare the `fields` parameter, and determine whether or not to store samples. |
def _chunk_iter_progress(it, log, prefix):
n_variants = 0
before_all = time.time()
before_chunk = before_all
for chunk, chunk_length, chrom, pos in it:
after_chunk = time.time()
elapsed_chunk = after_chunk - before_chunk
elapsed = after_chunk - before_all
n_variants ... | Wrap a chunk iterator for progress logging. |
def read_vcf(input,
fields=None,
exclude_fields=None,
rename_fields=None,
types=None,
numbers=None,
alt_number=DEFAULT_ALT_NUMBER,
fills=None,
region=None,
tabix='tabix',
samples=None,
... | Read data from a VCF file into NumPy arrays.
.. versionchanged:: 1.12.0
Now returns None if no variants are found in the VCF file or matching the
requested region.
Parameters
----------
input : string or file-like
{input}
fields : list of strings, optional
{fields}
... |
def vcf_to_npz(input, output,
compressed=True,
overwrite=False,
fields=None,
exclude_fields=None,
rename_fields=None,
types=None,
numbers=None,
alt_number=DEFAULT_ALT_NUMBER,
fills=None... | Read data from a VCF file into NumPy arrays and save as a .npz file.
.. versionchanged:: 1.12.0
Now will not create any output file if no variants are found in the VCF file or
matching the requested region.
Parameters
----------
input : string
{input}
output : string
... |
def vcf_to_hdf5(input, output,
group='/',
compression='gzip',
compression_opts=1,
shuffle=False,
overwrite=False,
vlen=True,
fields=None,
exclude_fields=None,
rename_fields=Non... | Read data from a VCF file and load into an HDF5 file.
.. versionchanged:: 1.12.0
Now will not create any output file if no variants are found in the VCF file or
matching the requested region.
Parameters
----------
input : string
{input}
output : string
{output}
... |
def vcf_to_zarr(input, output,
group='/',
compressor='default',
overwrite=False,
fields=None,
exclude_fields=None,
rename_fields=None,
types=None,
numbers=None,
alt_number=DEFA... | Read data from a VCF file and load into a Zarr on-disk store.
.. versionchanged:: 1.12.0
Now will not create any output files if no variants are found in the VCF file or
matching the requested region.
Parameters
----------
input : string
{input}
output : string
{out... |
def iter_vcf_chunks(input,
fields=None,
exclude_fields=None,
types=None,
numbers=None,
alt_number=DEFAULT_ALT_NUMBER,
fills=None,
region=None,
tabix='tabix',
... | Iterate over chunks of data from a VCF file as NumPy arrays.
Parameters
----------
input : string
{input}
fields : list of strings, optional
{fields}
exclude_fields : list of strings, optional
{exclude_fields}
types : dict, optional
{types}
numbers : dict, op... |
def vcf_to_dataframe(input,
fields=None,
exclude_fields=None,
types=None,
numbers=None,
alt_number=DEFAULT_ALT_NUMBER,
fills=None,
region=None,
tabix='t... | Read data from a VCF file into a pandas DataFrame.
Parameters
----------
input : string
{input}
fields : list of strings, optional
{fields}
exclude_fields : list of strings, optional
{exclude_fields}
types : dict, optional
{types}
numbers : dict, optional
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.