docstring stringlengths 52 499 | function stringlengths 67 35.2k | __index_level_0__ int64 52.6k 1.16M |
|---|---|---|
Generic decorator for coroutines helper functions allowing
multiple variadic initialization arguments.
This function is intended to be used internally.
Arguments:
fn (function): target function to decorate.
Raises:
TypeError: if function or coroutine function is not provided.
Ret... | def decorate(fn):
if not isfunction(fn):
raise TypeError('paco: fn must be a callable object')
@functools.wraps(fn)
def decorator(*args, **kw):
# If coroutine object is passed
for arg in args:
if iscoro_or_corofunc(arg):
return fn(*args, **kw)
... | 949,096 |
Returns a coroutine function that when called, always returns
the provided value.
This function has an alias: `paco.identity`.
Arguments:
value (mixed): value to constantly return when coroutine is called.
delay (int/float): optional return value delay in seconds.
Returns:
cor... | def constant(value, delay=None):
@asyncio.coroutine
def coro():
if delay:
yield from asyncio.sleep(delay)
return value
return coro | 949,327 |
Convenient shortcut alias to ``loop.run_until_complete``.
Arguments:
coro (coroutine): coroutine object to schedule.
loop (asyncio.BaseEventLoop): optional event loop to use.
Defaults to: ``asyncio.get_event_loop()``.
Returns:
mixed: returned value by coroutine.
Usage:... | def run(coro, loop=None):
loop = loop or asyncio.get_event_loop()
return loop.run_until_complete(coro) | 949,375 |
Adds a new coroutine function with optional variadic argumetns.
Arguments:
coro (coroutine function): coroutine to execute.
*args (mixed): optional variadic arguments
Raises:
TypeError: if the coro object is not a valid coroutine
Returns:
future... | def add(self, coro, *args, **kw):
# Create coroutine object if a function is provided
if asyncio.iscoroutinefunction(coro):
coro = coro(*args, **kw)
# Verify coroutine
if not asyncio.iscoroutine(coro):
raise TypeError('paco: coro must be a coroutine obje... | 949,446 |
_add_sphere(ax)
Add a wireframe unit sphere onto matplotlib 3D axes
Args:
ax - matplotlib 3D axes object
Returns:
updated matplotlib 3D axes | def _add_sphere(ax):
(u, v) = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j]
x = np.cos(u) * np.sin(v)
y = np.sin(u) * np.sin(v)
z = np.cos(v)
ax.plot_wireframe(x, y, z, color='grey', linewidth=0.2)
return ax | 949,857 |
Template for Tika app commands
Args:
switches (list): list of switches to Tika app Jar
objectInput (object): file object/standard input to analyze
Return:
Standard output data (unicode Python 2, str Python 3) | def _command_template(self, switches, objectInput=None):
command = ["java", "-jar", self.file_jar, "-eUTF-8"]
if self.memory_allocation:
command.append("-Xmx{}".format(self.memory_allocation))
command.extend(switches)
if not objectInput:
objectInput = su... | 949,942 |
Return the content type of passed file or payload.
Args:
path (string): Path of file to analyze
payload (string): Payload base64 to analyze
objectInput (object): file object/standard input to analyze
Returns:
content type of file (string) | def detect_content_type(self, path=None, payload=None, objectInput=None):
# From Python detection content type from stdin doesn't work TO FIX
if objectInput:
message = "Detection content type with file object is not stable."
log.exception(message)
raise TikaA... | 949,943 |
Return only the text content of passed file.
These parameters are in OR. Only one of them can be analyzed.
Args:
path (string): Path of file to analyze
payload (string): Payload base64 to analyze
objectInput (object): file object/standard input to analyze
Re... | def extract_only_content(self, path=None, payload=None, objectInput=None):
if objectInput:
switches = ["-t"]
result = self._command_template(switches, objectInput)
return result, True, None
else:
f = file_path(path, payload)
switches =... | 949,944 |
This function returns a JSON of all contents and
metadata of passed file
Args:
path (string): Path of file to analyze
payload (string): Payload base64 to analyze
objectInput (object): file object/standard input to analyze
pretty_print (boolean): If True a... | def extract_all_content(
self,
path=None,
payload=None,
objectInput=None,
pretty_print=False,
convert_to_obj=False,
):
f = file_path(path, payload, objectInput)
switches = ["-J", "-t", "-r", f]
if not pretty_print:
switches... | 949,945 |
Given a file path, payload or file object, it writes file on disk and
returns the temp path.
Args:
path (string): path of real file
payload(string): payload in base64 of file
objectInput (object): file object/standard input to analyze
Returns:
Path of file | def file_path(path=None, payload=None, objectInput=None):
f = path if path else write_payload(payload, objectInput)
if not os.path.exists(f):
msg = "File {!r} does not exist".format(f)
log.exception(msg)
raise TikaAppFilePathError(msg)
return f | 949,948 |
This function writes a base64 payload or file object on disk.
Args:
payload (string): payload in base64
objectInput (object): file object/standard input to analyze
Returns:
Path of file | def write_payload(payload=None, objectInput=None):
temp = tempfile.mkstemp()[1]
log.debug("Write payload in temp file {!r}".format(temp))
with open(temp, 'wb') as f:
if payload:
payload = base64.b64decode(payload)
elif objectInput:
if six.PY3:
p... | 949,949 |
parse `global` section, and return the config.Global
Args:
global_node (TreeNode): `global` section treenode
Returns:
config.Global: an object | def build_global(self, global_node):
config_block_lines = self.__build_config_block(
global_node.config_block)
return config.Global(config_block=config_block_lines) | 950,183 |
parse `config_block` in each section
Args:
config_block_node (TreeNode): Description
Returns:
[line_node1, line_node2, ...] | def __build_config_block(self, config_block_node):
node_lists = []
for line_node in config_block_node:
if isinstance(line_node, pegnode.ConfigLine):
node_lists.append(self.__build_config(line_node))
elif isinstance(line_node, pegnode.OptionLine):
... | 950,184 |
parse `defaults` sections, and return a config.Defaults
Args:
defaults_node (TreeNode): Description
Returns:
config.Defaults: an object | def build_defaults(self, defaults_node):
proxy_name = defaults_node.defaults_header.proxy_name.text
config_block_lines = self.__build_config_block(
defaults_node.config_block)
return config.Defaults(
name=proxy_name,
config_block=config_block_lines) | 950,185 |
parse `listen` sections, and return a config.Listen
Args:
listen_node (TreeNode): Description
Returns:
config.Listen: an object | def build_listen(self, listen_node):
proxy_name = listen_node.listen_header.proxy_name.text
service_address_node = listen_node.listen_header.service_address
# parse the config block
config_block_lines = self.__build_config_block(
listen_node.config_block)
#... | 950,187 |
parse `frontend` sections, and return a config.Frontend
Args:
frontend_node (TreeNode): Description
Raises:
Exception: Description
Returns:
config.Frontend: an object | def build_frontend(self, frontend_node):
proxy_name = frontend_node.frontend_header.proxy_name.text
service_address_node = frontend_node.frontend_header.service_address
# parse the config block
config_block_lines = self.__build_config_block(
frontend_node.config_blo... | 950,188 |
parse `backend` sections
Args:
backend_node (TreeNode): Description
Returns:
config.Backend: an object | def build_backend(self, backend_node):
proxy_name = backend_node.backend_header.proxy_name.text
config_block_lines = self.__build_config_block(
backend_node.config_block)
return config.Backend(name=proxy_name, config_block=config_block_lines) | 950,189 |
Summary
Args:
config_block [config.Item, ...]: config lines
Returns:
str: config block str | def __render_config_block(self, config_block):
config_block_str = ''
for line in config_block:
if isinstance(line, config.Option):
line_str = self.__render_option(line)
elif isinstance(line, config.Config):
line_str = self.__render_config(... | 950,440 |
Squared sum of total displacements for these atoms.
Args:
None
Returns:
(Float): The square of the summed total displacements for these atoms. | def collective_dr_squared( self ):
return sum( np.square( sum( [ atom.dr for atom in self.atoms ] ) ) ) | 950,510 |
Number of these atoms occupying a specific site type.
Args:
site_label (Str): Label for the site type being considered.
Returns:
(Int): Number of atoms occupying sites of type `site_label`. | def occupations( self, site_label ):
return sum( atom.site.label == site_label for atom in self.atoms ) | 950,511 |
Generate a honeycomb lattice.
Args:
a (Int): Number of lattice repeat units along x.
b (Int): Number of lattice repeat units along y.
spacing (Float): Distance between lattice sites.
alternating_sites (Bool, optional): Label alternating sites with 'A' and 'B'. Defaul... | def honeycomb_lattice( a, b, spacing, alternating_sites=False ):
if alternating_sites:
site_labels = [ 'A', 'B', 'A', 'B' ]
else:
site_labels = [ 'L', 'L', 'L', 'L' ]
unit_cell_lengths = np.array( [ sqrt(3), 3.0, 0.0 ] ) * spacing
cell_lengths = unit_cell_lengths * np.array( [ a, b,... | 950,539 |
Generate a cubic lattice.
Args:
a (Int): Number of lattice repeat units along x.
b (Int): Number of lattice repeat units along y.
c (Int): Number of lattice repeat units along z.
spacing (Float): Distance between lattice sites.
Returns:
(Lattice)... | def cubic_lattice( a, b, c, spacing ):
grid = np.array( list( range( 1, a * b * c + 1 ) ) ).reshape( a, b, c, order='F' )
it = np.nditer( grid, flags=[ 'multi_index' ] )
sites = []
while not it.finished:
x, y, z = it.multi_index
r = np.array( [ x, y, z ] ) * spacing
neighbou... | 950,540 |
Returns the number of occupied nearest neighbour sites, classified by site type.
Args:
None
Returns:
(Dict(Str:Int)): Dictionary of nearest-neighbour occupied site numbers, classified by site label, e.g. { 'A' : 2, 'B' : 1 }. | def site_specific_nn_occupation( self ):
to_return = { l : 0 for l in set( ( site.label for site in self.p_neighbours ) ) }
for site in self.p_neighbours:
if site.is_occupied:
to_return[ site.label ] += 1
return to_return | 950,545 |
The coordination-number dependent energy for this site.
Args:
delta_occupation (:obj:Dict(Str:Int), optional): A dictionary of a change in (site-type specific) coordination number, e.g. { 'A' : 1, 'B' : -1 }.
If this is not None, the coordination-number dependent energy is calculate... | def cn_occupation_energy( self, delta_occupation=None ):
nn_occupations = self.site_specific_nn_occupation()
if delta_occupation:
for site in delta_occupation:
assert( site in nn_occupations )
nn_occupations[ site ] += delta_occupation[ site ]
... | 950,546 |
Initialise a LookupTable object instance.
Args:
lattice (lattice_mc.Lattice): The lattice object, used to define the allowed jumps.
hamiltonian (Str): The model Hamiltonian used to define the jump energies.
Allowed values = `nearest-neigbour`
Returns:
... | def __init__( self, lattice, hamiltonian ):
expected_hamiltonian_values = [ 'nearest-neighbour' ]
if hamiltonian not in expected_hamiltonian_values:
raise ValueError( hamiltonian )
self.site_energies = lattice.site_energies
self.nn_energy = lattice.nn_energy
... | 950,561 |
The relative probability for a jump between two sites with specific site types and coordination numbers.
Args:
l1 (Str): Site label for the initial site.
l2 (Str): Site label for the final site.
c1 (Int): Coordination number for the initial site.
c2 (Int): Coordi... | def relative_probability( self, l1, l2, c1, c2 ):
if self.site_energies:
site_delta_E = self.site_energies[ l2 ] - self.site_energies[ l1 ]
else:
site_delta_E = 0.0
if self.nn_energy:
delta_nn = c2 - c1 - 1 # -1 because the hopping ion is not counted ... | 950,562 |
Construct a look-up table of relative jump probabilities for a nearest-neighbour interaction Hamiltonian.
Args:
None.
Returns:
None. | def generate_nearest_neighbour_lookup_table( self ):
self.jump_probability = {}
for site_label_1 in self.connected_site_pairs:
self.jump_probability[ site_label_1 ] = {}
for site_label_2 in self.connected_site_pairs[ site_label_1 ]:
self.jump_probability[... | 950,563 |
Initialise an Atom instance.
Args:
initial_site (Site): Lattice site initially occupied by this Atom.
Returns:
None | def __init__( self, initial_site ):
Atom.atom_number += 1
self.number = Atom.atom_number
self._site = initial_site
# check this site is not already occupied
if self._site.occupation == 0:
self._site.occupation = self.number
self._site.is_occupied ... | 950,564 |
Reinitialise the stored displacements, number of hops, and list of sites visited for this `Atom`.
Args:
None
Returns:
None | def reset( self ):
self.number_of_hops = 0
self.dr = np.array( [ 0.0, 0.0, 0.0 ] )
self.summed_dr2 = 0.0
self.sites_visited = [ self._site.number ] | 950,565 |
Initialise a Simulation object.
Args:
None
Returns:
None
Notes:
Simulation parameters need to be set using their corresponding setter methods. | def __init__( self ):
self.lattice = None
self.number_of_atoms = None
self.number_of_jumps = None
self.for_time = None
self.number_of_equilibration_jumps = 0
self.atoms = None
self.has_run = False | 950,631 |
Reset all counters for this simulation.
Args:
None
Returns:
None | def reset( self ):
self.lattice.reset()
for atom in self.atoms.atoms:
atom.reset() | 950,632 |
Set the number of atoms for the simulation, and populate the simulation lattice.
Args:
n (Int): Number of atoms for this simulation.
selected_sites (:obj:(List|Set|String), optional): Selects a subset of site types to be populated with atoms. Defaults to None.
Returns:
... | def set_number_of_atoms( self, n, selected_sites=None ):
self.number_of_atoms = n
self.atoms = species.Species( self.lattice.populate_sites( self.number_of_atoms, selected_sites=selected_sites ) ) | 950,633 |
Set up the simulation lattice from a file containing site data.
Uses `init_lattice.lattice_from_sites_file`, which defines the site file spec.
Args:
filename (Str): sites file filename.
cell_lengths (List(x,y,z)): cell lengths for the simulation cell.
Returns:
... | def define_lattice_from_file( self, filename, cell_lengths ):
self.lattice = init_lattice.lattice_from_sites_file( filename, cell_lengths = cell_lengths ) | 950,634 |
Check whether the simulation has been initialised.
Args:
None
Returns:
None | def is_initialised( self ):
if not self.lattice:
raise AttributeError('Running a simulation needs the lattice to be initialised')
if not self.atoms:
raise AttributeError('Running a simulation needs the atoms to be initialised')
if not self.number_of_jumps and not... | 950,635 |
Run the simulation.
Args:
for_time (:obj:Float, optional): If `for_time` is set, then run the simulation until a set amount of time has passed. Otherwise, run the simulation for a set number of jumps. Defaults to None.
Returns:
None | def run( self, for_time=None ):
self.for_time = for_time
try:
self.is_initialised()
except AttributeError:
raise
if self.number_of_equilibration_jumps > 0:
for step in range( self.number_of_equilibration_jumps ):
self.lattice.j... | 950,636 |
Deprecated tracer correlation factor for this simulation.
Args:
None
Returns:
(Float): The tracer correlation factor, f.
Notes:
This function assumes that the jump distance between sites has
been normalised to a=1. If the jump distance is... | def old_tracer_correlation( self ):
if self.has_run:
return self.atoms.sum_dr_squared() / float( self.number_of_jumps )
else:
return None | 950,637 |
Tracer diffusion coefficient, D*.
Args:
None
Returns:
(Float): The tracer diffusion coefficient, D*. | def tracer_diffusion_coefficient( self ):
if self.has_run:
return self.atoms.sum_dr_squared() / ( 6.0 * float( self.number_of_atoms ) * self.lattice.time )
else:
return None | 950,638 |
Returns the collective correlation factor, f_I
Args:
None
Returns:
(Float): The collective correlation factor, f_I.
Notes:
This function assumes that the jump distance between sites has
been normalised to a=1. If the jumps distance is not equal ... | def old_collective_correlation( self ):
if self.has_run:
return self.atoms.collective_dr_squared() / float( self.number_of_jumps )
else:
return None | 950,639 |
Returns the collective or "jump" diffusion coefficient, D_J.
Args:
None
Returns:
(Float): The collective diffusion coefficient, D_J. | def collective_diffusion_coefficient( self ):
if self.has_run:
return self.atoms.collective_dr_squared() / ( 6.0 * self.lattice.time )
else:
return None | 950,640 |
Create a jump-probability look-up table corresponding to the appropriate Hamiltonian.
Args:
hamiltonian (Str, optional): String specifying the simulation Hamiltonian.
valid values are 'nearest-neighbour' (default) and 'coordination_number'.
Returns:
None | def setup_lookup_table( self, hamiltonian='nearest-neighbour' ):
expected_hamiltonian_values = [ 'nearest-neighbour', 'coordination_number' ]
if hamiltonian not in expected_hamiltonian_values:
raise ValueError
self.lattice.jump_lookup_table = lookup_table.LookupTable( self.l... | 950,641 |
Initialise a Lattice instance.
Args:
sites (List(Site)): List of sites contained in the lattice.
cell_lengths (np.array(x,y,z)): Vector of cell lengths for the simulation cell.
Returns:
None | def __init__( self, sites, cell_lengths ):
self.cell_lengths = cell_lengths
self.sites = sites
self.number_of_sites = len( self.sites )
self.site_labels = set( [ site.label for site in self.sites ] )
self.site_populations = Counter( [ site.label for site in self.sites ] ... | 950,642 |
Ensure that all lattice sites are within the central periodic image of the simulation cell.
Sites that are outside the central simulation cell are mapped back into this cell.
Args:
None
Returns:
None | def enforce_periodic_boundary_conditions( self ):
for s in self.sites:
for i in range(3):
if s.r[i] < 0.0:
s.r[i] += self.cell_lengths[i]
if s.r[i] > self.cell_lengths[i]:
s.r[i] -= self.cell_lengths[i] | 950,643 |
Create a lookup table allowing sites in this lattice to be queried using `self.site_lookup[n]` where `n` is the identifying site numbe.
Args:
None
Returns:
None | def initialise_site_lookup_table( self ):
self.site_lookup = {}
for site in self.sites:
self.site_lookup[ site.number ] = site | 950,644 |
All nearest-neighbour jumps not blocked by volume exclusion
(i.e. from occupied to neighbouring unoccupied sites).
Args:
None
Returns:
(List(Jump)): List of possible jumps. | def potential_jumps( self ):
jumps = []
if self.number_of_occupied_sites <= self.number_of_sites / 2:
for occupied_site in self.occupied_sites():
unoccupied_neighbours = [ site for site in [ self.site_with_id( n ) for n in occupied_site.neighbours ] if not site.is_oc... | 950,645 |
Update the lattice state by accepting a specific jump
Args:
jump (Jump): The jump that has been accepted.
Returns:
None. | def update( self, jump ):
atom = jump.initial_site.atom
dr = jump.dr( self.cell_lengths )
#print( "atom {} jumped from site {} to site {}".format( atom.number, jump.initial_site.number, jump.final_site.number ) )
jump.final_site.occupation = atom.number
jump.final_site.a... | 950,646 |
Populate the lattice sites with a specific number of atoms.
Args:
number_of_atoms (Int): The number of atoms to populate the lattice sites with.
selected_sites (:obj:List, optional): List of site labels if only some sites are to be occupied. Defaults to None.
Returns:
... | def populate_sites( self, number_of_atoms, selected_sites=None ):
if number_of_atoms > self.number_of_sites:
raise ValueError
if selected_sites:
atoms = [ atom.Atom( initial_site = site ) for site in random.sample( [ s for s in self.sites if s.label in selected_sites ], ... | 950,647 |
Select a jump at random from all potential jumps, then update the lattice state.
Args:
None
Returns:
None | def jump( self ):
potential_jumps = self.potential_jumps()
if not potential_jumps:
raise BlockedLatticeError('No moves are possible in this lattice')
all_transitions = transitions.Transitions( self.potential_jumps() )
random_jump = all_transitions.random()
de... | 950,648 |
Average site occupation for each site type
Args:
None
Returns:
(Dict(Str:Float)): Dictionary of occupation statistics, e.g.::
{ 'A' : 2.5, 'B' : 25.3 } | def site_occupation_statistics( self ):
if self.time == 0.0:
return None
occupation_stats = { label : 0.0 for label in self.site_labels }
for site in self.sites:
occupation_stats[ site.label ] += site.time_occupied
for label in self.site_labels:
... | 950,649 |
Set the energies for every site in the lattice according to the site labels.
Args:
energies (Dict(Str:Float): Dictionary of energies for each site label, e.g.::
{ 'A' : 1.0, 'B', 0.0 }
Returns:
None | def set_site_energies( self, energies ):
self.site_energies = energies
for site_label in energies:
for site in self.sites:
if site.label == site_label:
site.energy = energies[ site_label ] | 950,650 |
Set the coordination number dependent energies for this lattice.
Args:
cn_energies (Dict(Str:Dict(Int:Float))): Dictionary of dictionaries specifying the coordination number dependent energies for each site type. e.g.::
{ 'A' : { 0 : 0.0, 1 : 1.0, 2 : 2.0 }, 'B' : { 0 : 0.0, 1 : 2.... | def set_cn_energies( self, cn_energies ):
for site in self.sites:
site.set_cn_occupation_energies( cn_energies[ site.label ] )
self.cn_energies = cn_energies | 950,651 |
Returns a dictionary of the coordination numbers for each site label. e.g.::
{ 'A' : { 4 }, 'B' : { 2, 4 } }
Args:
none
Returns:
coordination_numbers (Dict(Str:Set(Int))): dictionary of coordination
nu... | def site_coordination_numbers( self ):
coordination_numbers = {}
for l in self.site_labels:
coordination_numbers[ l ] = set( [ len( site.neighbours ) for site in self.sites if site.label is l ] )
return coordination_numbers | 950,652 |
Returns a dictionary of the maximum coordination number for each site label.
e.g.::
{ 'A' : 4, 'B' : 4 }
Args:
none
Returns:
max_coordination_numbers (Dict(Str:Int)): dictionary of maxmimum coordination
... | def max_site_coordination_numbers( self ):
return { l : max( c ) for l, c in self.site_coordination_numbers().items() } | 950,653 |
Returns a dictionary of coordination numbers for each site type.
Args:
None
Returns:
(Dict(Str:List(Int))) : Dictionary of coordination numbers for each site type, e.g.::
{ 'A' : [ 2, 4 ], 'B' : [ 2 ] } | def site_specific_coordination_numbers( self ):
specific_coordination_numbers = {}
for site in self.sites:
specific_coordination_numbers[ site.label ] = site.site_specific_neighbours()
return specific_coordination_numbers | 950,654 |
Returns a dictionary of all connections between pair of sites (by site label).
e.g. for a linear lattice A-B-C will return::
{ 'A' : [ 'B' ], 'B' : [ 'A', 'C' ], 'C' : [ 'B' ] }
Args:
None
Returns:
site_connections (Dict{Str List[Str]}): A dictionar... | def connected_site_pairs( self ):
site_connections = {}
for initial_site in self.sites:
if not initial_site.label in site_connections:
site_connections[ initial_site.label ] = []
for final_site in initial_site.p_neighbours:
if final_site.l... | 950,655 |
Selects a random subset of sites with a specific label and gives them a different label.
Args:
old_site_label (String or List(String)): Site label(s) of the sites to be modified..
new_site_label (String): Site label to be applied to the modified sites.
n_site... | def transmute_sites( self, old_site_label, new_site_label, n_sites_to_change ):
selected_sites = self.select_sites( old_site_label )
for site in random.sample( selected_sites, n_sites_to_change ):
site.label = new_site_label
self.site_labels = set( [ site.label for site in s... | 950,656 |
Selects sites in the lattice with specified labels.
Args:
site_labels (List(Str)|Set(Str)|Str): Labels of sites to select.
This can be a List [ 'A', 'B' ], a Set ( 'A', 'B' ), or a String 'A'.
Returns:
(List(Site)): List of sites with labels given by `site_label... | def select_sites( self, site_labels ):
if type( site_labels ) in ( list, set ):
selected_sites = [ s for s in self.sites if s.label in site_labels ]
elif type( site_labels ) is str:
selected_sites = [ s for s in self.sites if s.label is site_labels ]
else:
... | 950,658 |
Returns all sites in the lattice (optionally from the set of sites with specific labels)
that are not part of a percolating network.
This is determined from clusters of connected sites that do not wrap round to
themselves through a periodic boundary.
Args:
site_labels (Strin... | def detached_sites( self, site_labels=None ):
clusters = self.connected_sites( site_labels=site_labels )
island_clusters = [ c for c in clusters if not any( c.is_periodically_contiguous() ) ]
return list( itertools.chain.from_iterable( ( c.sites for c in island_clusters ) ) ) | 950,659 |
Initialise an Cluster instance.
Args:
sites (List(Site): The list of sites that make up the cluster.
Returns:
None | def __init__( self, sites ):
self.sites = set( sites )
self.neighbours = set()
for s in self.sites:
self.neighbours.update( s.p_neighbours )
self.neighbours = self.neighbours.difference( self.sites ) | 950,668 |
Combine two clusters into a single cluster.
Args:
other_cluster (Cluster): The second cluster to combine.
Returns:
(Cluster): The combination of both clusters. | def merge( self, other_cluster ):
new_cluster = Cluster( self.sites | other_cluster.sites )
new_cluster.neighbours = ( self.neighbours | other_cluster.neighbours ).difference( new_cluster.sites )
return new_cluster | 950,669 |
Finds the six sites with the maximum and minimum coordinates along x, y, and z.
Args:
None
Returns:
(List(List)): In the order [ +x, -x, +y, -y, +z, -z ] | def sites_at_edges( self ):
min_x = min( [ s.r[0] for s in self.sites ] )
max_x = max( [ s.r[0] for s in self.sites ] )
min_y = min( [ s.r[1] for s in self.sites ] )
max_y = max( [ s.r[1] for s in self.sites ] )
min_z = min( [ s.r[2] for s in self.sites ] )
max_z... | 950,670 |
logical check whether a cluster connects with itself across the
simulation periodic boundary conditions.
Args:
none
Returns
( Bool, Bool, Bool ): Contiguity along the x, y, and z coordinate axes | def is_periodically_contiguous( self ):
edges = self.sites_at_edges()
is_contiguous = [ False, False, False ]
along_x = any( [ s2 in s1.p_neighbours for s1 in edges[0] for s2 in edges[1] ] )
along_y = any( [ s2 in s1.p_neighbours for s1 in edges[2] for s2 in edges[3] ] )
... | 950,671 |
Removes sites from the set of neighbouring sites if these have labels in remove_labels.
Args:
Remove_labels (List) or (Str): List of Site labels to be removed from the cluster neighbour set.
Returns:
None | def remove_sites_from_neighbours( self, remove_labels ):
if type( remove_labels ) is str:
remove_labels = [ remove_labels ]
self.neighbours = set( n for n in self.neighbours if n.label not in remove_labels ) | 950,672 |
Initialise a Transitions object.
Args:
jumps (List(Jump)): List of jumps to be contained in this Transitions object.
Returns:
None | def __init__( self, jumps ):
self.jumps = jumps
self.p = np.array( [ jump.relative_probability for jump in self.jumps ] ) | 950,674 |
Cumulative sum of the relative probabilities for all possible jumps.
Args:
None
Returns:
(np.array): Cumulative sum of relative jump probabilities. | def cumulative_probabilities( self ):
partition_function = np.sum( self.p )
return np.cumsum( self.p ) / partition_function | 950,675 |
Select a jump at random with appropriate relative probabilities.
Args:
None
Returns:
(Jump): The randomly selected Jump. | def random( self ):
j = np.searchsorted( self.cumulative_probabilities(), random.random() )
return self.jumps[ j ] | 950,676 |
The timestep until the next jump.
Args:
None
Returns:
(Float): The timestep until the next jump. | def time_to_jump( self ):
k_tot = rate_prefactor * np.sum( self.p )
return -( 1.0 / k_tot ) * math.log( random.random() ) | 950,677 |
The change in system energy if this jump were accepted.
Args:
None
Returns:
(Float): delta E | def delta_E( self ):
site_delta_E = self.final_site.energy - self.initial_site.energy
if self.nearest_neighbour_energy:
site_delta_E += self.nearest_neighbour_delta_E()
if self.coordination_number_energy:
site_delta_E += self.coordination_number_delta_E()
... | 950,701 |
Nearest-neighbour interaction contribution to the change in system energy if this jump were accepted.
Args:
None
Returns:
(Float): delta E (nearest-neighbour) | def nearest_neighbour_delta_E( self ):
delta_nn = self.final_site.nn_occupation() - self.initial_site.nn_occupation() - 1 # -1 because the hopping ion is not counted in the final site occupation number
return ( delta_nn * self.nearest_neighbour_energy ) | 950,702 |
Coordination-number dependent energy conrtibution to the change in system energy if this jump were accepted.
Args:
None
Returns:
(Float): delta E (coordination-number) | def coordination_number_delta_E( self ):
initial_site_neighbours = [ s for s in self.initial_site.p_neighbours if s.is_occupied ] # excludes final site, since this is always unoccupied
final_site_neighbours = [ s for s in self.final_site.p_neighbours if s.is_occupied and s is not self.initial_s... | 950,703 |
Particle displacement vector for this jump
Args:
cell_lengths (np.array(x,y,z)): Cell lengths for the orthogonal simulation cell.
Returns
(np.array(x,y,z)): dr | def dr( self, cell_lengths ):
half_cell_lengths = cell_lengths / 2.0
this_dr = self.final_site.r - self.initial_site.r
for i in range( 3 ):
if this_dr[ i ] > half_cell_lengths[ i ]:
this_dr[ i ] -= cell_lengths[ i ]
if this_dr[ i ] < -half_cell_le... | 950,704 |
Relative probability of accepting this jump from a lookup-table.
Args:
jump_lookup_table (LookupTable): the lookup table to be used for this jump.
Returns:
(Float): relative probability of accepting this jump. | def relative_probability_from_lookup_table( self, jump_lookup_table ):
l1 = self.initial_site.label
l2 = self.final_site.label
c1 = self.initial_site.nn_occupation()
c2 = self.final_site.nn_occupation()
return jump_lookup_table.jump_probability[ l1 ][ l2 ][ c1 ][ c2 ] | 950,705 |
Create a switch.
Args:
type: (str): type of the switch [A,B,C,D]
settings (str): a comma separted list
pin (int): wiringPi pin
Returns:
switch | def create_switch(type, settings, pin):
switch = None
if type == "A":
group, device = settings.split(",")
switch = pi_switch.RCSwitchA(group, device)
elif type == "B":
addr, channel = settings.split(",")
addr = int(addr)
channel = int(channel)
switch = pi_switch.RCSwitchB(addr, channel)
elif type =... | 950,972 |
Deletes an individual issue.
If the issue has sub-tasks you must set the deleteSubtasks=true parameter to delete the issue. You cannot delete
an issue without deleting its sub-tasks.
Args:
issue_id:
params:
Returns: | def delete_issue(self, issue_id, params=None):
return self._delete(self.API_URL + 'issue/{}'.format(issue_id), params=params) | 951,169 |
Browser based upload
Creates the video entry and meta data to initiate a browser upload
Authentication is needed
Params:
title: string
description: string
keywords: comma seperated string
developer_tags: tuple
Return:
dict co... | def upload(self, title, description="", keywords="", developer_tags=None, access_control=AccessControl.Public):
# Raise ApiError if not authenticated
if not self.authenticated:
raise ApiError(_("Authentication is required"))
# create media group
my_media_group = gda... | 951,513 |
Updates the video
Authentication is required
Params:
entry: video entry fetch via 'fetch_video()'
title: string
description: string
keywords: string
Returns:
a video entry on success
None otherwise | def update_video(self, video_id, title="", description="", keywords="", access_control=AccessControl.Unlisted):
# Raise ApiError if not authenticated
if not self.authenticated:
raise ApiError(_("Authentication is required"))
entry = self.fetch_video(video_id)
# Se... | 951,515 |
Deletes the video
Authentication is required
Params:
entry: video entry fetch via 'fetch_video()'
Return:
True if successful
Raise:
OperationError: on unsuccessful deletion | def delete_video(self, video_id):
# Raise ApiError if not authenticated
if not self.authenticated:
raise ApiError(_("Authentication is required"))
entry = self.fetch_video(video_id)
response = Api.yt_service.DeleteVideoEntry(entry)
if not response:
... | 951,516 |
The upload result page
Youtube will redirect to this page after upload is finished
Saves the video data and redirects to the next page
Params:
status: status of the upload (200 for success)
id: id number of the video | def upload_return(request):
status = request.GET.get("status")
video_id = request.GET.get("id")
if status == "200" and video_id:
# upload is successful
# save the video entry
video = Video()
video.user = request.user
video.video_id = video_id
video.save... | 951,527 |
Send an event to the IFTTT maker channel
Parameters:
-----------
api_key : string
Your IFTTT API key
event : string
The name of the IFTTT event to trigger
value1 :
Optional: Extra data sent with the event (default: None)
value2 :
Optional: Extra data sent with th... | def send_event(api_key, event, value1=None, value2=None, value3=None):
url = 'https://maker.ifttt.com/trigger/{e}/with/key/{k}/'.format(e=event,
k=api_key)
payload = {'value1': value1, 'value2': value2, 'value3': value3}
return reque... | 951,822 |
Converts from the original ASCII format of the Chen+ (2014) 3D dust map to
the HDF5 format.
Args:
dat_fname (:obj:`str`): Filename of the original ASCII .dat file.
h5_fname (:obj:`str`): Output filename to write the resulting HDF5 file to. | def ascii2h5(dat_fname, h5_fname):
table = np.loadtxt(dat_fname, skiprows=1, dtype='f4')
filter_kwargs = dict(
chunks=True,
compression='gzip',
compression_opts=3)
# Filter out pixels with all zeros
idx = ~np.all(table[:,2:32] < 1.e-5, axis=1)
with h5py.File(h5_fname,... | 952,184 |
Downloads the Chen et al. (2014) dust map.
Args:
clobber (Optional[:obj:`bool`]): If ``True``, any existing file will be
overwritten, even if it appears to match. If ``False`` (the
default), :obj:`fetch()` will attempt to determine if the dataset
already exists. This det... | def fetch(clobber=False):
dest_dir = fname_pattern = os.path.join(data_dir(), 'chen2014')
url = 'http://lamost973.pku.edu.cn/site/Photometric-Extinctions-and-Distances/table2.dat'
dat_fname = os.path.join(dest_dir, 'chen2014.dat')
h5_fname = os.path.join(dest_dir, 'chen2014.h5')
md5 = 'f8a2bc4... | 952,185 |
Downloads the IPHAS 3D dust map of Sale et al. (2014).
Args:
clobber (Optional[bool]): If ``True``, any existing file will be
overwritten, even if it appears to match. If ``False`` (the
default), ``fetch()`` will attempt to determine if the dataset
already exists. This d... | def fetch(clobber=False):
dest_dir = fname_pattern = os.path.join(data_dir(), 'iphas')
url_pattern = 'http://www.iphas.org/data/extinction/A_samp_{:03d}.tar.gz'
fname_pattern = os.path.join(dest_dir, 'A_samp_') + '{:03d}.tar.gz'
# Check if file already exists
if not clobber:
h5_fname ... | 952,221 |
Checks, if the dihedral defining atom is colinear.
Checks for each index starting from the third row of the
``construction_table``, if the reference atoms are colinear.
Args:
construction_table (pd.DataFrame):
Returns:
list: A list of problematic indices. | def check_dihedral(self, construction_table):
c_table = construction_table
angles = self.get_angle_degrees(c_table.iloc[3:, :].values)
problem_index = np.nonzero((175 < angles) | (angles < 5))[0]
rename = dict(enumerate(c_table.index[3:]))
problem_index = [rename[i] for ... | 952,230 |
Reindexe the dihedral defining atom if linear reference is used.
Uses :meth:`~Cartesian.check_dihedral` to obtain the problematic
indices.
Args:
construction_table (pd.DataFrame):
use_lookup (bool): Use a lookup variable for
:meth:`~chemcoord.Cartesian.g... | def correct_dihedral(self, construction_table,
use_lookup=None):
if use_lookup is None:
use_lookup = settings['defaults']['use_lookup']
problem_index = self.check_dihedral(construction_table)
bond_dict = self._give_val_sorted_bond_dict(use_lookup=us... | 952,231 |
Reindexe construction_table if linear reference in first three rows
present.
Uses :meth:`~Cartesian.check_absolute_refs` to obtain the problematic
indices.
Args:
construction_table (pd.DataFrame):
Returns:
pd.DataFrame: Appropiately renamed construction... | def correct_absolute_refs(self, construction_table):
c_table = construction_table.copy()
abs_refs = constants.absolute_refs
problem_index = self.check_absolute_refs(c_table)
for i in problem_index:
order_of_refs = iter(permutations(abs_refs.keys()))
finis... | 952,234 |
Create the Zmatrix from a construction table.
Args:
Construction table (pd.DataFrame):
Returns:
Zmat: A new instance of :class:`Zmat`. | def _build_zmat(self, construction_table):
c_table = construction_table
default_cols = ['atom', 'b', 'bond', 'a', 'angle', 'd', 'dihedral']
optional_cols = list(set(self.columns) - {'atom', 'x', 'y', 'z'})
zmat_frame = pd.DataFrame(columns=default_cols + optional_cols,
... | 952,236 |
Returns the total mass in g/mol.
Args:
None
Returns:
float: | def get_total_mass(self):
try:
mass = self.loc[:, 'mass'].sum()
except KeyError:
mass_molecule = self.add_data('mass')
mass = mass_molecule.loc[:, 'mass'].sum()
return mass | 952,240 |
Determines if ``other`` has the same sumformula
Args:
other (molecule):
Returns:
bool: | def has_same_sumformula(self, other):
same_atoms = True
for atom in set(self['atom']):
own_atom_number = len(self[self['atom'] == atom])
other_atom_number = len(other[other['atom'] == atom])
same_atoms = (own_atom_number == other_atom_number)
if n... | 952,241 |
Return the number of electrons.
Args:
charge (int): Charge of the molecule.
Returns:
int: | def get_electron_number(self, charge=0):
atomic_number = constants.elements['atomic_number'].to_dict()
return sum([atomic_number[atom] for atom in self['atom']]) - charge | 952,242 |
Returns E(B-V) at the specified location(s) on the sky.
Args:
coords (`astropy.coordinates.SkyCoord`): The coordinates to query.
Returns:
A float array of reddening, in units of E(B-V), at the given
coordinates. The shape of the output is the same as the sha... | def query(self, coords):
# gal = coords.transform_to('galactic')
gal = coords
l = gal.l.deg
b = gal.b.deg
# Detect scalar input
scalar_input = not hasattr(l, '__len__')
if scalar_input:
l = np.array([l])
b = np.array([b])
... | 952,391 |
Return a dictionary with id, user, user_id, bounds, date of creation
and all the tags of the changeset.
Args:
changeset: the XML string of the changeset. | def changeset_info(changeset):
keys = [tag.attrib.get('k') for tag in changeset.getchildren()]
keys += ['id', 'user', 'uid', 'bbox', 'created_at']
values = [tag.attrib.get('v') for tag in changeset.getchildren()]
values += [
changeset.get('id'), changeset.get('user'), changeset.get('uid'),
... | 952,423 |
Get the changeset using the OSM API and return the content as a XML
ElementTree.
Args:
changeset: the id of the changeset. | def get_changeset(changeset):
url = 'https://www.openstreetmap.org/api/0.6/changeset/{}/download'.format(
changeset
)
return ET.fromstring(requests.get(url).content) | 952,424 |
Get the metadata of a changeset using the OSM API and return it as a XML
ElementTree.
Args:
changeset: the id of the changeset. | def get_metadata(changeset):
url = 'https://www.openstreetmap.org/api/0.6/changeset/{}'.format(changeset)
return ET.fromstring(requests.get(url).content).getchildren()[0] | 952,425 |
Get the bounds of the changeset and return it as a Polygon object. If
the changeset has not coordinates (case of the changesets that deal only
with relations), it returns an empty Polygon.
Args:
changeset: the XML string of the changeset. | def get_bounds(changeset):
try:
return Polygon([
(float(changeset.get('min_lon')), float(changeset.get('min_lat'))),
(float(changeset.get('max_lon')), float(changeset.get('min_lat'))),
(float(changeset.get('max_lon')), float(changeset.get('max_lat'))),
(f... | 952,426 |
Converts from sky coordinates to pixel indices.
Args:
coords (:obj:`astropy.coordinates.SkyCoord`): Sky coordinates.
Returns:
Pixel indices of the coordinates, with the same shape as the input
coordinates. Pixels which are outside the map are given an index
... | def _coords2idx(self, coords):
x = self._coords2vec(coords)
idx = self._kd.query(x, p=self._metric_p,
distance_upper_bound=self._max_pix_scale)
return idx[1] | 952,566 |
Downloads the Marshall et al. (2006) dust map, which is based on 2MASS
stellar photometry.
Args:
clobber (Optional[:obj:`bool`]): If ``True``, any existing file will be
overwritten, even if it appears to match. If ``False`` (the
default), :obj:`fetch()` will attempt to determine... | def fetch(clobber=False):
table_dir = os.path.join(data_dir(), 'marshall')
# Check if file already exists
if not clobber:
h5_fname = os.path.join(table_dir, 'marshall.h5')
h5_size = 5033290 # Guess, in Bytes
h5_dsets = {
'l': (801, 81),
'b': (801, 81),
... | 952,568 |
Converts from Galactic coordinates to pixel indices.
Args:
gal (:obj:`astropy.coordinates.SkyCoord`): Galactic coordinates. Must
store an array of coordinates (i.e., not be scalar).
Returns:
``j, k, mask`` - Pixel indices of the coordinates, as well as a mask
... | def _gal2idx(self, gal):
# Make sure that l is in domain [-180 deg, 180 deg)
l = coordinates.Longitude(gal.l, wrap_angle=180.*units.deg)
j = (self._inv_pix_scale * (l.deg - self._l_bounds[0])).astype('i4')
k = (self._inv_pix_scale * (gal.b.deg - self._b_bounds[0])).astype('i4'... | 952,570 |
Read a molden file.
Args:
inputfile (str):
start_index (int):
Returns:
list: A list containing :class:`~chemcoord.Cartesian` is returned. | def read_molden(inputfile, start_index=0, get_bonds=True):
from chemcoord.cartesian_coordinates.cartesian_class_main import Cartesian
with open(inputfile, 'r') as f:
found = False
while not found:
line = f.readline()
if '[N_GEO]' in line:
found = True... | 952,600 |
Matrix multiplication between A and B
This function is equivalent to ``A @ B``, which is unfortunately
not possible under python 2.x.
Args:
A (sequence):
B (sequence):
Returns:
sequence: | def dot(A, B):
try:
result = A.__matmul__(B)
if result is NotImplemented:
result = B.__rmatmul__(A)
except AttributeError:
result = B.__rmatmul__(A)
return result | 952,604 |
Returns the rotation matrix.
This function returns a matrix for the counterclockwise rotation
around the given axis.
The Input angle is in radians.
Args:
axis (vector):
angle (float):
Returns:
Rotation matrix (np.array): | def get_rotation_matrix(axis, angle):
axis = normalize(np.array(axis))
if not (np.array([1, 1, 1]).shape) == (3, ):
raise ValueError('axis.shape has to be 3')
angle = float(angle)
return _jit_get_rotation_matrix(axis, angle) | 952,608 |
Returns the rotation matrix.
This function returns a matrix for the counterclockwise rotation
around the given axis.
The Input angle is in radians.
Args:
axis (vector):
angle (float):
Returns:
Rotation matrix (np.array): | def _jit_get_rotation_matrix(axis, angle):
axis = _jit_normalize(axis)
a = m.cos(angle / 2)
b, c, d = axis * m.sin(angle / 2)
rot_matrix = np.empty((3, 3))
rot_matrix[0, 0] = a**2 + b**2 - c**2 - d**2
rot_matrix[0, 1] = 2. * (b * c - a * d)
rot_matrix[0, 2] = 2. * (b * d + a * c)
ro... | 952,609 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.