docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Generic decorator for coroutines helper functions allowing multiple variadic initialization arguments. This function is intended to be used internally. Arguments: fn (function): target function to decorate. Raises: TypeError: if function or coroutine function is not provided. Returns: function: decorated function.
def decorate(fn): if not isfunction(fn): raise TypeError('paco: fn must be a callable object') @functools.wraps(fn) def decorator(*args, **kw): # If coroutine object is passed for arg in args: if iscoro_or_corofunc(arg): return fn(*args, **kw) # Explicit argument must be at least a coroutine if len(args) and args[0] is None: raise TypeError('paco: first argument cannot be empty') def wrapper(coro, *_args, **_kw): # coro must be a valid type if not iscoro_or_corofunc(coro): raise TypeError('paco: first argument must be a ' 'coroutine or coroutine function') # Merge call arguments _args = ((coro,) + (args + _args)) kw.update(_kw) # Trigger original decorated function return fn(*_args, **kw) return wrapper return decorator
949,096
Returns a coroutine function that when called, always returns the provided value. This function has an alias: `paco.identity`. Arguments: value (mixed): value to constantly return when coroutine is called. delay (int/float): optional return value delay in seconds. Returns: coroutinefunction Usage:: coro = paco.constant('foo') await coro() # => 'foo' await coro() # => 'foo'
def constant(value, delay=None): @asyncio.coroutine def coro(): if delay: yield from asyncio.sleep(delay) return value return coro
949,327
Convenient shortcut alias to ``loop.run_until_complete``. Arguments: coro (coroutine): coroutine object to schedule. loop (asyncio.BaseEventLoop): optional event loop to use. Defaults to: ``asyncio.get_event_loop()``. Returns: mixed: returned value by coroutine. Usage:: async def mul_2(num): return num * 2 paco.run(mul_2(4)) # => 8
def run(coro, loop=None): loop = loop or asyncio.get_event_loop() return loop.run_until_complete(coro)
949,375
Adds a new coroutine function with optional variadic argumetns. Arguments: coro (coroutine function): coroutine to execute. *args (mixed): optional variadic arguments Raises: TypeError: if the coro object is not a valid coroutine Returns: future: coroutine wrapped future
def add(self, coro, *args, **kw): # Create coroutine object if a function is provided if asyncio.iscoroutinefunction(coro): coro = coro(*args, **kw) # Verify coroutine if not asyncio.iscoroutine(coro): raise TypeError('paco: coro must be a coroutine object') # Store coroutine with arguments for deferred execution index = max(len(self.pool), 0) task = Task(index, coro) # Append the coroutine data to the pool self.pool.append(task) return coro
949,446
_add_sphere(ax) Add a wireframe unit sphere onto matplotlib 3D axes Args: ax - matplotlib 3D axes object Returns: updated matplotlib 3D axes
def _add_sphere(ax): (u, v) = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j] x = np.cos(u) * np.sin(v) y = np.sin(u) * np.sin(v) z = np.cos(v) ax.plot_wireframe(x, y, z, color='grey', linewidth=0.2) return ax
949,857
Template for Tika app commands Args: switches (list): list of switches to Tika app Jar objectInput (object): file object/standard input to analyze Return: Standard output data (unicode Python 2, str Python 3)
def _command_template(self, switches, objectInput=None): command = ["java", "-jar", self.file_jar, "-eUTF-8"] if self.memory_allocation: command.append("-Xmx{}".format(self.memory_allocation)) command.extend(switches) if not objectInput: objectInput = subprocess.PIPE log.debug("Subprocess command: {}".format(", ".join(command))) if six.PY2: with open(os.devnull, "w") as devnull: out = subprocess.Popen( command, stdin=objectInput, stdout=subprocess.PIPE, stderr=devnull) elif six.PY3: out = subprocess.Popen( command, stdin=objectInput, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) stdoutdata, _ = out.communicate() return stdoutdata.decode("utf-8").strip()
949,942
Return the content type of passed file or payload. Args: path (string): Path of file to analyze payload (string): Payload base64 to analyze objectInput (object): file object/standard input to analyze Returns: content type of file (string)
def detect_content_type(self, path=None, payload=None, objectInput=None): # From Python detection content type from stdin doesn't work TO FIX if objectInput: message = "Detection content type with file object is not stable." log.exception(message) raise TikaAppError(message) f = file_path(path, payload, objectInput) switches = ["-d", f] result = self._command_template(switches).lower() return result, path, f
949,943
Return only the text content of passed file. These parameters are in OR. Only one of them can be analyzed. Args: path (string): Path of file to analyze payload (string): Payload base64 to analyze objectInput (object): file object/standard input to analyze Returns: text of file passed (string)
def extract_only_content(self, path=None, payload=None, objectInput=None): if objectInput: switches = ["-t"] result = self._command_template(switches, objectInput) return result, True, None else: f = file_path(path, payload) switches = ["-t", f] result = self._command_template(switches) return result, path, f
949,944
This function returns a JSON of all contents and metadata of passed file Args: path (string): Path of file to analyze payload (string): Payload base64 to analyze objectInput (object): file object/standard input to analyze pretty_print (boolean): If True adds newlines and whitespace, for better readability convert_to_obj (boolean): If True convert JSON in object
def extract_all_content( self, path=None, payload=None, objectInput=None, pretty_print=False, convert_to_obj=False, ): f = file_path(path, payload, objectInput) switches = ["-J", "-t", "-r", f] if not pretty_print: switches.remove("-r") result = self._command_template(switches) if result and convert_to_obj: result = json.loads(result, encoding="utf-8") return result, path, f
949,945
Given a file path, payload or file object, it writes file on disk and returns the temp path. Args: path (string): path of real file payload(string): payload in base64 of file objectInput (object): file object/standard input to analyze Returns: Path of file
def file_path(path=None, payload=None, objectInput=None): f = path if path else write_payload(payload, objectInput) if not os.path.exists(f): msg = "File {!r} does not exist".format(f) log.exception(msg) raise TikaAppFilePathError(msg) return f
949,948
This function writes a base64 payload or file object on disk. Args: payload (string): payload in base64 objectInput (object): file object/standard input to analyze Returns: Path of file
def write_payload(payload=None, objectInput=None): temp = tempfile.mkstemp()[1] log.debug("Write payload in temp file {!r}".format(temp)) with open(temp, 'wb') as f: if payload: payload = base64.b64decode(payload) elif objectInput: if six.PY3: payload = objectInput.buffer.read() elif six.PY2: payload = objectInput.read() f.write(payload) return temp
949,949
parse `global` section, and return the config.Global Args: global_node (TreeNode): `global` section treenode Returns: config.Global: an object
def build_global(self, global_node): config_block_lines = self.__build_config_block( global_node.config_block) return config.Global(config_block=config_block_lines)
950,183
parse `config_block` in each section Args: config_block_node (TreeNode): Description Returns: [line_node1, line_node2, ...]
def __build_config_block(self, config_block_node): node_lists = [] for line_node in config_block_node: if isinstance(line_node, pegnode.ConfigLine): node_lists.append(self.__build_config(line_node)) elif isinstance(line_node, pegnode.OptionLine): node_lists.append(self.__build_option(line_node)) elif isinstance(line_node, pegnode.ServerLine): node_lists.append( self.__build_server(line_node)) elif isinstance(line_node, pegnode.BindLine): node_lists.append( self.__build_bind(line_node)) elif isinstance(line_node, pegnode.AclLine): node_lists.append( self.__build_acl(line_node)) elif isinstance(line_node, pegnode.BackendLine): node_lists.append( self.__build_usebackend(line_node)) elif isinstance(line_node, pegnode.UserLine): node_lists.append( self.__build_user(line_node)) elif isinstance(line_node, pegnode.GroupLine): node_lists.append( self.__build_group(line_node)) else: # may blank_line, comment_line pass return node_lists
950,184
parse `defaults` sections, and return a config.Defaults Args: defaults_node (TreeNode): Description Returns: config.Defaults: an object
def build_defaults(self, defaults_node): proxy_name = defaults_node.defaults_header.proxy_name.text config_block_lines = self.__build_config_block( defaults_node.config_block) return config.Defaults( name=proxy_name, config_block=config_block_lines)
950,185
parse `listen` sections, and return a config.Listen Args: listen_node (TreeNode): Description Returns: config.Listen: an object
def build_listen(self, listen_node): proxy_name = listen_node.listen_header.proxy_name.text service_address_node = listen_node.listen_header.service_address # parse the config block config_block_lines = self.__build_config_block( listen_node.config_block) # parse host and port host, port = '', '' if isinstance(service_address_node, pegnode.ServiceAddress): host = service_address_node.host.text port = service_address_node.port.text else: # use `bind` in config lines to fill in host and port # just use the first for line in config_block_lines: if isinstance(line, config.Bind): host, port = line.host, line.port break else: raise Exception( 'Not specify host and port in `listen` definition') return config.Listen( name=proxy_name, host=host, port=port, config_block=config_block_lines)
950,187
parse `frontend` sections, and return a config.Frontend Args: frontend_node (TreeNode): Description Raises: Exception: Description Returns: config.Frontend: an object
def build_frontend(self, frontend_node): proxy_name = frontend_node.frontend_header.proxy_name.text service_address_node = frontend_node.frontend_header.service_address # parse the config block config_block_lines = self.__build_config_block( frontend_node.config_block) # parse host and port host, port = '', '' if isinstance(service_address_node, pegnode.ServiceAddress): host = service_address_node.host.text port = service_address_node.port.text else: # use `bind` in config lines to fill in host and port # just use the first for line in config_block_lines: if isinstance(line, config.Bind): host, port = line.host, line.port break else: raise Exception( 'Not specify host and port in `frontend` definition') return config.Frontend( name=proxy_name, host=host, port=port, config_block=config_block_lines)
950,188
parse `backend` sections Args: backend_node (TreeNode): Description Returns: config.Backend: an object
def build_backend(self, backend_node): proxy_name = backend_node.backend_header.proxy_name.text config_block_lines = self.__build_config_block( backend_node.config_block) return config.Backend(name=proxy_name, config_block=config_block_lines)
950,189
Summary Args: config_block [config.Item, ...]: config lines Returns: str: config block str
def __render_config_block(self, config_block): config_block_str = '' for line in config_block: if isinstance(line, config.Option): line_str = self.__render_option(line) elif isinstance(line, config.Config): line_str = self.__render_config(line) elif isinstance(line, config.Server): line_str = self.__render_server(line) elif isinstance(line, config.Bind): line_str = self.__render_bind(line) elif isinstance(line, config.Acl): line_str = self.__render_acl(line) elif isinstance(line, config.UseBackend): line_str = self.__render_usebackend(line) elif isinstance(line, config.User): line_str = self.__render_user(line) elif isinstance(line, config.Group): line_str = self.__render_group(line) # append line str config_block_str = config_block_str + line_str return config_block_str
950,440
Squared sum of total displacements for these atoms. Args: None Returns: (Float): The square of the summed total displacements for these atoms.
def collective_dr_squared( self ): return sum( np.square( sum( [ atom.dr for atom in self.atoms ] ) ) )
950,510
Number of these atoms occupying a specific site type. Args: site_label (Str): Label for the site type being considered. Returns: (Int): Number of atoms occupying sites of type `site_label`.
def occupations( self, site_label ): return sum( atom.site.label == site_label for atom in self.atoms )
950,511
Generate a honeycomb lattice. Args: a (Int): Number of lattice repeat units along x. b (Int): Number of lattice repeat units along y. spacing (Float): Distance between lattice sites. alternating_sites (Bool, optional): Label alternating sites with 'A' and 'B'. Defaults to False. Returns: (Lattice): The new lattice Notes: The returned lattice is 3D periodic, but all sites and edges lie in the xy plane.
def honeycomb_lattice( a, b, spacing, alternating_sites=False ): if alternating_sites: site_labels = [ 'A', 'B', 'A', 'B' ] else: site_labels = [ 'L', 'L', 'L', 'L' ] unit_cell_lengths = np.array( [ sqrt(3), 3.0, 0.0 ] ) * spacing cell_lengths = unit_cell_lengths * np.array( [ a, b, 1.0 ] ) grid = np.array( list( range( 1, int( a * b * 4 + 1 ) ) ) ).reshape( a, b, 4, order='C' ) sites = [] for i in range( a ): for j in range( b ): # site 1 r = np.array( [ i * sqrt(3) * spacing, j * 3 * spacing, 0.0 ] ) neighbours = [ grid[ i, j, 1 ], np.roll( grid, +1, axis=0 )[ i, j, 1 ], np.roll( grid, +1, axis=1 )[ i, j, 3 ] ] sites.append( lattice_site.Site( grid[ i, j, 0 ], r, neighbours, 0.0, site_labels[0] ) ) # site 2 r = np.array( [ i * sqrt(3) * spacing + sqrt(3)/2 * spacing, ( j * 3 + 0.5 ) * spacing, 0.0 ] ) neighbours = [ grid[ i, j, 0 ], grid[ i, j, 2 ], np.roll( grid, -1, axis=0 )[ i, j, 0 ] ] sites.append( lattice_site.Site( grid[ i, j, 1 ], r, neighbours, 0.0, site_labels[1] ) ) # site 3 r = np.array( [ i * sqrt(3) * spacing + sqrt(3)/2 * spacing, ( j * 3 + 1.5 ) * spacing, 0.0 ] ) neighbours = [ grid[ i, j, 1 ], grid[ i, j, 3 ], np.roll( grid, -1, axis=0 )[ i, j, 3 ] ] sites.append( lattice_site.Site( grid[ i, j, 2 ], r, neighbours, 0.0, site_labels[2] ) ) # site 4 r = np.array( [ i * sqrt(3) * spacing, ( j * 3 + 2 ) * spacing, 0.0 ] ) neighbours = [ grid[ i, j, 2 ], np.roll( grid, +1, axis=0 )[ i, j, 2 ], np.roll( grid, -1, axis=1 )[ i, j, 0 ] ] sites.append( lattice_site.Site( grid[ i, j, 3 ], r, neighbours, 0.0, site_labels[3] ) ) return lattice.Lattice( sites, cell_lengths=cell_lengths )
950,539
Generate a cubic lattice. Args: a (Int): Number of lattice repeat units along x. b (Int): Number of lattice repeat units along y. c (Int): Number of lattice repeat units along z. spacing (Float): Distance between lattice sites. Returns: (Lattice): The new lattice
def cubic_lattice( a, b, c, spacing ): grid = np.array( list( range( 1, a * b * c + 1 ) ) ).reshape( a, b, c, order='F' ) it = np.nditer( grid, flags=[ 'multi_index' ] ) sites = [] while not it.finished: x, y, z = it.multi_index r = np.array( [ x, y, z ] ) * spacing neighbours = [ np.roll( grid, +1, axis=0 )[x,y,z], np.roll( grid, -1, axis=0 )[x,y,z], np.roll( grid, +1, axis=1 )[x,y,z], np.roll( grid, -1, axis=1 )[x,y,z], np.roll( grid, +1, axis=2 )[x,y,z], np.roll( grid, -1, axis=2 )[x,y,z] ] sites.append( lattice_site.Site( int( it[0] ), r, neighbours, 0.0, 'L' ) ) it.iternext() return lattice.Lattice( sites, cell_lengths = np.array( [ a, b, c ] ) * spacing )
950,540
Returns the number of occupied nearest neighbour sites, classified by site type. Args: None Returns: (Dict(Str:Int)): Dictionary of nearest-neighbour occupied site numbers, classified by site label, e.g. { 'A' : 2, 'B' : 1 }.
def site_specific_nn_occupation( self ): to_return = { l : 0 for l in set( ( site.label for site in self.p_neighbours ) ) } for site in self.p_neighbours: if site.is_occupied: to_return[ site.label ] += 1 return to_return
950,545
The coordination-number dependent energy for this site. Args: delta_occupation (:obj:Dict(Str:Int), optional): A dictionary of a change in (site-type specific) coordination number, e.g. { 'A' : 1, 'B' : -1 }. If this is not None, the coordination-number dependent energy is calculated including these changes in neighbour-site occupations. Defaults to None Returns: (Float): The coordination-number dependent energy for this site.
def cn_occupation_energy( self, delta_occupation=None ): nn_occupations = self.site_specific_nn_occupation() if delta_occupation: for site in delta_occupation: assert( site in nn_occupations ) nn_occupations[ site ] += delta_occupation[ site ] return sum( [ self.cn_occupation_energies[ s ][ n ] for s, n in nn_occupations.items() ] )
950,546
Initialise a LookupTable object instance. Args: lattice (lattice_mc.Lattice): The lattice object, used to define the allowed jumps. hamiltonian (Str): The model Hamiltonian used to define the jump energies. Allowed values = `nearest-neigbour` Returns: None
def __init__( self, lattice, hamiltonian ): expected_hamiltonian_values = [ 'nearest-neighbour' ] if hamiltonian not in expected_hamiltonian_values: raise ValueError( hamiltonian ) self.site_energies = lattice.site_energies self.nn_energy = lattice.nn_energy self.cn_energy = lattice.cn_energies self.connected_site_pairs = lattice.connected_site_pairs() self.max_coordination_per_site = lattice.max_site_coordination_numbers() self.site_specific_coordination_per_site = lattice.site_specific_coordination_numbers() if hamiltonian == 'nearest-neighbour': self.generate_nearest_neighbour_lookup_table()
950,561
The relative probability for a jump between two sites with specific site types and coordination numbers. Args: l1 (Str): Site label for the initial site. l2 (Str): Site label for the final site. c1 (Int): Coordination number for the initial site. c2 (Int): Coordination number for the final site. Returns: (Float): The relative probability of this jump occurring.
def relative_probability( self, l1, l2, c1, c2 ): if self.site_energies: site_delta_E = self.site_energies[ l2 ] - self.site_energies[ l1 ] else: site_delta_E = 0.0 if self.nn_energy: delta_nn = c2 - c1 - 1 # -1 because the hopping ion is not counted in the final site occupation number site_delta_E += delta_nn * self.nn_energy return metropolis( site_delta_E )
950,562
Construct a look-up table of relative jump probabilities for a nearest-neighbour interaction Hamiltonian. Args: None. Returns: None.
def generate_nearest_neighbour_lookup_table( self ): self.jump_probability = {} for site_label_1 in self.connected_site_pairs: self.jump_probability[ site_label_1 ] = {} for site_label_2 in self.connected_site_pairs[ site_label_1 ]: self.jump_probability[ site_label_1 ][ site_label_2 ] = {} for coordination_1 in range( self.max_coordination_per_site[ site_label_1 ] ): self.jump_probability[ site_label_1 ][ site_label_2 ][ coordination_1 ] = {} for coordination_2 in range( 1, self.max_coordination_per_site[ site_label_2 ] + 1 ): self.jump_probability[ site_label_1 ][ site_label_2 ][ coordination_1 ][ coordination_2 ] = self.relative_probability( site_label_1, site_label_2, coordination_1, coordination_2 )
950,563
Initialise an Atom instance. Args: initial_site (Site): Lattice site initially occupied by this Atom. Returns: None
def __init__( self, initial_site ): Atom.atom_number += 1 self.number = Atom.atom_number self._site = initial_site # check this site is not already occupied if self._site.occupation == 0: self._site.occupation = self.number self._site.is_occupied = True self._site.atom = self else: raise ValueError( "This site is already occupied by atom {}".format( initial_site.occupation ) ) self.reset()
950,564
Reinitialise the stored displacements, number of hops, and list of sites visited for this `Atom`. Args: None Returns: None
def reset( self ): self.number_of_hops = 0 self.dr = np.array( [ 0.0, 0.0, 0.0 ] ) self.summed_dr2 = 0.0 self.sites_visited = [ self._site.number ]
950,565
Initialise a Simulation object. Args: None Returns: None Notes: Simulation parameters need to be set using their corresponding setter methods.
def __init__( self ): self.lattice = None self.number_of_atoms = None self.number_of_jumps = None self.for_time = None self.number_of_equilibration_jumps = 0 self.atoms = None self.has_run = False
950,631
Reset all counters for this simulation. Args: None Returns: None
def reset( self ): self.lattice.reset() for atom in self.atoms.atoms: atom.reset()
950,632
Set the number of atoms for the simulation, and populate the simulation lattice. Args: n (Int): Number of atoms for this simulation. selected_sites (:obj:(List|Set|String), optional): Selects a subset of site types to be populated with atoms. Defaults to None. Returns: None
def set_number_of_atoms( self, n, selected_sites=None ): self.number_of_atoms = n self.atoms = species.Species( self.lattice.populate_sites( self.number_of_atoms, selected_sites=selected_sites ) )
950,633
Set up the simulation lattice from a file containing site data. Uses `init_lattice.lattice_from_sites_file`, which defines the site file spec. Args: filename (Str): sites file filename. cell_lengths (List(x,y,z)): cell lengths for the simulation cell. Returns: None
def define_lattice_from_file( self, filename, cell_lengths ): self.lattice = init_lattice.lattice_from_sites_file( filename, cell_lengths = cell_lengths )
950,634
Check whether the simulation has been initialised. Args: None Returns: None
def is_initialised( self ): if not self.lattice: raise AttributeError('Running a simulation needs the lattice to be initialised') if not self.atoms: raise AttributeError('Running a simulation needs the atoms to be initialised') if not self.number_of_jumps and not self.for_time: raise AttributeError('Running a simulation needs number_of_jumps or for_time to be set')
950,635
Run the simulation. Args: for_time (:obj:Float, optional): If `for_time` is set, then run the simulation until a set amount of time has passed. Otherwise, run the simulation for a set number of jumps. Defaults to None. Returns: None
def run( self, for_time=None ): self.for_time = for_time try: self.is_initialised() except AttributeError: raise if self.number_of_equilibration_jumps > 0: for step in range( self.number_of_equilibration_jumps ): self.lattice.jump() self.reset() if self.for_time: self.number_of_jumps = 0 while self.lattice.time < self.for_time: self.lattice.jump() self.number_of_jumps += 1 else: for step in range( self.number_of_jumps ): self.lattice.jump() self.has_run = True
950,636
Deprecated tracer correlation factor for this simulation. Args: None Returns: (Float): The tracer correlation factor, f. Notes: This function assumes that the jump distance between sites has been normalised to a=1. If the jump distance is not equal to 1 then the value returned by this function should be divided by a^2. Even better, use `self.tracer_correlation`.
def old_tracer_correlation( self ): if self.has_run: return self.atoms.sum_dr_squared() / float( self.number_of_jumps ) else: return None
950,637
Tracer diffusion coefficient, D*. Args: None Returns: (Float): The tracer diffusion coefficient, D*.
def tracer_diffusion_coefficient( self ): if self.has_run: return self.atoms.sum_dr_squared() / ( 6.0 * float( self.number_of_atoms ) * self.lattice.time ) else: return None
950,638
Returns the collective correlation factor, f_I Args: None Returns: (Float): The collective correlation factor, f_I. Notes: This function assumes that the jump distance between sites has been normalised to a=1. If the jumps distance is not equal to 1 then the value returned by this function should be divided by a^2. Even better, use self.collective_correlation
def old_collective_correlation( self ): if self.has_run: return self.atoms.collective_dr_squared() / float( self.number_of_jumps ) else: return None
950,639
Returns the collective or "jump" diffusion coefficient, D_J. Args: None Returns: (Float): The collective diffusion coefficient, D_J.
def collective_diffusion_coefficient( self ): if self.has_run: return self.atoms.collective_dr_squared() / ( 6.0 * self.lattice.time ) else: return None
950,640
Create a jump-probability look-up table corresponding to the appropriate Hamiltonian. Args: hamiltonian (Str, optional): String specifying the simulation Hamiltonian. valid values are 'nearest-neighbour' (default) and 'coordination_number'. Returns: None
def setup_lookup_table( self, hamiltonian='nearest-neighbour' ): expected_hamiltonian_values = [ 'nearest-neighbour', 'coordination_number' ] if hamiltonian not in expected_hamiltonian_values: raise ValueError self.lattice.jump_lookup_table = lookup_table.LookupTable( self.lattice, hamiltonian )
950,641
Initialise a Lattice instance. Args: sites (List(Site)): List of sites contained in the lattice. cell_lengths (np.array(x,y,z)): Vector of cell lengths for the simulation cell. Returns: None
def __init__( self, sites, cell_lengths ): self.cell_lengths = cell_lengths self.sites = sites self.number_of_sites = len( self.sites ) self.site_labels = set( [ site.label for site in self.sites ] ) self.site_populations = Counter( [ site.label for site in self.sites ] ) self.enforce_periodic_boundary_conditions() self.initialise_site_lookup_table() self.nn_energy = False self.cn_energies = False self.site_energies = False self.jump_lookup_table = False for site in self.sites: site.p_neighbours = [ self.site_with_id( i ) for i in site.neighbours ] self.reset()
950,642
Ensure that all lattice sites are within the central periodic image of the simulation cell. Sites that are outside the central simulation cell are mapped back into this cell. Args: None Returns: None
def enforce_periodic_boundary_conditions( self ): for s in self.sites: for i in range(3): if s.r[i] < 0.0: s.r[i] += self.cell_lengths[i] if s.r[i] > self.cell_lengths[i]: s.r[i] -= self.cell_lengths[i]
950,643
Create a lookup table allowing sites in this lattice to be queried using `self.site_lookup[n]` where `n` is the identifying site numbe. Args: None Returns: None
def initialise_site_lookup_table( self ): self.site_lookup = {} for site in self.sites: self.site_lookup[ site.number ] = site
950,644
All nearest-neighbour jumps not blocked by volume exclusion (i.e. from occupied to neighbouring unoccupied sites). Args: None Returns: (List(Jump)): List of possible jumps.
def potential_jumps( self ): jumps = [] if self.number_of_occupied_sites <= self.number_of_sites / 2: for occupied_site in self.occupied_sites(): unoccupied_neighbours = [ site for site in [ self.site_with_id( n ) for n in occupied_site.neighbours ] if not site.is_occupied ] for vacant_site in unoccupied_neighbours: jumps.append( jump.Jump( occupied_site, vacant_site, self.nn_energy, self.cn_energies, self.jump_lookup_table ) ) else: for vacant_site in self.vacant_sites(): occupied_neighbours = [ site for site in [ self.site_with_id( n ) for n in vacant_site.neighbours ] if site.is_occupied ] for occupied_site in occupied_neighbours: jumps.append( jump.Jump( occupied_site, vacant_site, self.nn_energy, self.cn_energies, self.jump_lookup_table ) ) return jumps
950,645
Update the lattice state by accepting a specific jump Args: jump (Jump): The jump that has been accepted. Returns: None.
def update( self, jump ): atom = jump.initial_site.atom dr = jump.dr( self.cell_lengths ) #print( "atom {} jumped from site {} to site {}".format( atom.number, jump.initial_site.number, jump.final_site.number ) ) jump.final_site.occupation = atom.number jump.final_site.atom = atom jump.final_site.is_occupied = True jump.initial_site.occupation = 0 jump.initial_site.atom = None jump.initial_site.is_occupied = False # TODO: updating atom counters could be contained in an atom.move_to( site ) method atom.site = jump.final_site atom.number_of_hops += 1 atom.dr += dr atom.summed_dr2 += np.dot( dr, dr )
950,646
Populate the lattice sites with a specific number of atoms. Args: number_of_atoms (Int): The number of atoms to populate the lattice sites with. selected_sites (:obj:List, optional): List of site labels if only some sites are to be occupied. Defaults to None. Returns: None
def populate_sites( self, number_of_atoms, selected_sites=None ): if number_of_atoms > self.number_of_sites: raise ValueError if selected_sites: atoms = [ atom.Atom( initial_site = site ) for site in random.sample( [ s for s in self.sites if s.label in selected_sites ], number_of_atoms ) ] else: atoms = [ atom.Atom( initial_site = site ) for site in random.sample( self.sites, number_of_atoms ) ] self.number_of_occupied_sites = number_of_atoms return atoms
950,647
Select a jump at random from all potential jumps, then update the lattice state. Args: None Returns: None
def jump( self ): potential_jumps = self.potential_jumps() if not potential_jumps: raise BlockedLatticeError('No moves are possible in this lattice') all_transitions = transitions.Transitions( self.potential_jumps() ) random_jump = all_transitions.random() delta_t = all_transitions.time_to_jump() self.time += delta_t self.update_site_occupation_times( delta_t ) self.update( random_jump ) return( all_transitions.time_to_jump() )
950,648
Average site occupation for each site type Args: None Returns: (Dict(Str:Float)): Dictionary of occupation statistics, e.g.:: { 'A' : 2.5, 'B' : 25.3 }
def site_occupation_statistics( self ): if self.time == 0.0: return None occupation_stats = { label : 0.0 for label in self.site_labels } for site in self.sites: occupation_stats[ site.label ] += site.time_occupied for label in self.site_labels: occupation_stats[ label ] /= self.time return occupation_stats
950,649
Set the energies for every site in the lattice according to the site labels. Args: energies (Dict(Str:Float): Dictionary of energies for each site label, e.g.:: { 'A' : 1.0, 'B', 0.0 } Returns: None
def set_site_energies( self, energies ): self.site_energies = energies for site_label in energies: for site in self.sites: if site.label == site_label: site.energy = energies[ site_label ]
950,650
Set the coordination number dependent energies for this lattice. Args: cn_energies (Dict(Str:Dict(Int:Float))): Dictionary of dictionaries specifying the coordination number dependent energies for each site type. e.g.:: { 'A' : { 0 : 0.0, 1 : 1.0, 2 : 2.0 }, 'B' : { 0 : 0.0, 1 : 2.0 } } Returns: None
def set_cn_energies( self, cn_energies ): for site in self.sites: site.set_cn_occupation_energies( cn_energies[ site.label ] ) self.cn_energies = cn_energies
950,651
Returns a dictionary of the coordination numbers for each site label. e.g.:: { 'A' : { 4 }, 'B' : { 2, 4 } } Args: none Returns: coordination_numbers (Dict(Str:Set(Int))): dictionary of coordination numbers for each site label.
def site_coordination_numbers( self ): coordination_numbers = {} for l in self.site_labels: coordination_numbers[ l ] = set( [ len( site.neighbours ) for site in self.sites if site.label is l ] ) return coordination_numbers
950,652
Returns a dictionary of the maximum coordination number for each site label. e.g.:: { 'A' : 4, 'B' : 4 } Args: none Returns: max_coordination_numbers (Dict(Str:Int)): dictionary of maxmimum coordination number for each site label.
def max_site_coordination_numbers( self ): return { l : max( c ) for l, c in self.site_coordination_numbers().items() }
950,653
Returns a dictionary of coordination numbers for each site type. Args: None Returns: (Dict(Str:List(Int))) : Dictionary of coordination numbers for each site type, e.g.:: { 'A' : [ 2, 4 ], 'B' : [ 2 ] }
def site_specific_coordination_numbers( self ): specific_coordination_numbers = {} for site in self.sites: specific_coordination_numbers[ site.label ] = site.site_specific_neighbours() return specific_coordination_numbers
950,654
Returns a dictionary of all connections between pair of sites (by site label). e.g. for a linear lattice A-B-C will return:: { 'A' : [ 'B' ], 'B' : [ 'A', 'C' ], 'C' : [ 'B' ] } Args: None Returns: site_connections (Dict{Str List[Str]}): A dictionary of neighbouring site types in the lattice.
def connected_site_pairs( self ): site_connections = {} for initial_site in self.sites: if not initial_site.label in site_connections: site_connections[ initial_site.label ] = [] for final_site in initial_site.p_neighbours: if final_site.label not in site_connections[ initial_site.label ]: site_connections[ initial_site.label ].append( final_site.label ) return site_connections
950,655
Selects a random subset of sites with a specific label and gives them a different label. Args: old_site_label (String or List(String)): Site label(s) of the sites to be modified.. new_site_label (String): Site label to be applied to the modified sites. n_sites_to_change (Int): Number of sites to modify. Returns: None
def transmute_sites( self, old_site_label, new_site_label, n_sites_to_change ): selected_sites = self.select_sites( old_site_label ) for site in random.sample( selected_sites, n_sites_to_change ): site.label = new_site_label self.site_labels = set( [ site.label for site in self.sites ] )
950,656
Selects sites in the lattice with specified labels. Args: site_labels (List(Str)|Set(Str)|Str): Labels of sites to select. This can be a List [ 'A', 'B' ], a Set ( 'A', 'B' ), or a String 'A'. Returns: (List(Site)): List of sites with labels given by `site_labels`.
def select_sites( self, site_labels ): if type( site_labels ) in ( list, set ): selected_sites = [ s for s in self.sites if s.label in site_labels ] elif type( site_labels ) is str: selected_sites = [ s for s in self.sites if s.label is site_labels ] else: raise ValueError( str( site_labels ) ) return selected_sites
950,658
Returns all sites in the lattice (optionally from the set of sites with specific labels) that are not part of a percolating network. This is determined from clusters of connected sites that do not wrap round to themselves through a periodic boundary. Args: site_labels (String or List(String)): Lables of sites to be considered. Returns: (List(Site)): List of sites not in a periodic percolating network.
def detached_sites( self, site_labels=None ): clusters = self.connected_sites( site_labels=site_labels ) island_clusters = [ c for c in clusters if not any( c.is_periodically_contiguous() ) ] return list( itertools.chain.from_iterable( ( c.sites for c in island_clusters ) ) )
950,659
Initialise an Cluster instance. Args: sites (List(Site): The list of sites that make up the cluster. Returns: None
def __init__( self, sites ): self.sites = set( sites ) self.neighbours = set() for s in self.sites: self.neighbours.update( s.p_neighbours ) self.neighbours = self.neighbours.difference( self.sites )
950,668
Combine two clusters into a single cluster. Args: other_cluster (Cluster): The second cluster to combine. Returns: (Cluster): The combination of both clusters.
def merge( self, other_cluster ): new_cluster = Cluster( self.sites | other_cluster.sites ) new_cluster.neighbours = ( self.neighbours | other_cluster.neighbours ).difference( new_cluster.sites ) return new_cluster
950,669
Finds the six sites with the maximum and minimum coordinates along x, y, and z. Args: None Returns: (List(List)): In the order [ +x, -x, +y, -y, +z, -z ]
def sites_at_edges( self ): min_x = min( [ s.r[0] for s in self.sites ] ) max_x = max( [ s.r[0] for s in self.sites ] ) min_y = min( [ s.r[1] for s in self.sites ] ) max_y = max( [ s.r[1] for s in self.sites ] ) min_z = min( [ s.r[2] for s in self.sites ] ) max_z = max( [ s.r[2] for s in self.sites ] ) x_max = [ s for s in self.sites if s.r[0] == min_x ] x_min = [ s for s in self.sites if s.r[0] == max_x ] y_max = [ s for s in self.sites if s.r[1] == min_y ] y_min = [ s for s in self.sites if s.r[1] == max_y ] z_max = [ s for s in self.sites if s.r[2] == min_z ] z_min = [ s for s in self.sites if s.r[2] == max_z ] return ( x_max, x_min, y_max, y_min, z_max, z_min )
950,670
logical check whether a cluster connects with itself across the simulation periodic boundary conditions. Args: none Returns ( Bool, Bool, Bool ): Contiguity along the x, y, and z coordinate axes
def is_periodically_contiguous( self ): edges = self.sites_at_edges() is_contiguous = [ False, False, False ] along_x = any( [ s2 in s1.p_neighbours for s1 in edges[0] for s2 in edges[1] ] ) along_y = any( [ s2 in s1.p_neighbours for s1 in edges[2] for s2 in edges[3] ] ) along_z = any( [ s2 in s1.p_neighbours for s1 in edges[4] for s2 in edges[5] ] ) return ( along_x, along_y, along_z )
950,671
Removes sites from the set of neighbouring sites if these have labels in remove_labels. Args: Remove_labels (List) or (Str): List of Site labels to be removed from the cluster neighbour set. Returns: None
def remove_sites_from_neighbours( self, remove_labels ): if type( remove_labels ) is str: remove_labels = [ remove_labels ] self.neighbours = set( n for n in self.neighbours if n.label not in remove_labels )
950,672
Initialise a Transitions object. Args: jumps (List(Jump)): List of jumps to be contained in this Transitions object. Returns: None
def __init__( self, jumps ): self.jumps = jumps self.p = np.array( [ jump.relative_probability for jump in self.jumps ] )
950,674
Cumulative sum of the relative probabilities for all possible jumps. Args: None Returns: (np.array): Cumulative sum of relative jump probabilities.
def cumulative_probabilities( self ): partition_function = np.sum( self.p ) return np.cumsum( self.p ) / partition_function
950,675
Select a jump at random with appropriate relative probabilities. Args: None Returns: (Jump): The randomly selected Jump.
def random( self ): j = np.searchsorted( self.cumulative_probabilities(), random.random() ) return self.jumps[ j ]
950,676
The timestep until the next jump. Args: None Returns: (Float): The timestep until the next jump.
def time_to_jump( self ): k_tot = rate_prefactor * np.sum( self.p ) return -( 1.0 / k_tot ) * math.log( random.random() )
950,677
The change in system energy if this jump were accepted. Args: None Returns: (Float): delta E
def delta_E( self ): site_delta_E = self.final_site.energy - self.initial_site.energy if self.nearest_neighbour_energy: site_delta_E += self.nearest_neighbour_delta_E() if self.coordination_number_energy: site_delta_E += self.coordination_number_delta_E() return site_delta_E
950,701
Nearest-neighbour interaction contribution to the change in system energy if this jump were accepted. Args: None Returns: (Float): delta E (nearest-neighbour)
def nearest_neighbour_delta_E( self ): delta_nn = self.final_site.nn_occupation() - self.initial_site.nn_occupation() - 1 # -1 because the hopping ion is not counted in the final site occupation number return ( delta_nn * self.nearest_neighbour_energy )
950,702
Coordination-number dependent energy conrtibution to the change in system energy if this jump were accepted. Args: None Returns: (Float): delta E (coordination-number)
def coordination_number_delta_E( self ): initial_site_neighbours = [ s for s in self.initial_site.p_neighbours if s.is_occupied ] # excludes final site, since this is always unoccupied final_site_neighbours = [ s for s in self.final_site.p_neighbours if s.is_occupied and s is not self.initial_site ] # excludes initial site initial_cn_occupation_energy = ( self.initial_site.cn_occupation_energy() + sum( [ site.cn_occupation_energy() for site in initial_site_neighbours ] ) + sum( [ site.cn_occupation_energy() for site in final_site_neighbours ] ) ) final_cn_occupation_energy = ( self.final_site.cn_occupation_energy( delta_occupation = { self.initial_site.label : -1 } ) + sum( [ site.cn_occupation_energy( delta_occupation = { self.initial_site.label : -1 } ) for site in initial_site_neighbours ] ) + sum( [ site.cn_occupation_energy( delta_occupation = { self.final_site.label : +1 } ) for site in final_site_neighbours ] ) ) return ( final_cn_occupation_energy - initial_cn_occupation_energy )
950,703
Particle displacement vector for this jump Args: cell_lengths (np.array(x,y,z)): Cell lengths for the orthogonal simulation cell. Returns (np.array(x,y,z)): dr
def dr( self, cell_lengths ): half_cell_lengths = cell_lengths / 2.0 this_dr = self.final_site.r - self.initial_site.r for i in range( 3 ): if this_dr[ i ] > half_cell_lengths[ i ]: this_dr[ i ] -= cell_lengths[ i ] if this_dr[ i ] < -half_cell_lengths[ i ]: this_dr[ i ] += cell_lengths[ i ] return this_dr
950,704
Relative probability of accepting this jump from a lookup-table. Args: jump_lookup_table (LookupTable): the lookup table to be used for this jump. Returns: (Float): relative probability of accepting this jump.
def relative_probability_from_lookup_table( self, jump_lookup_table ): l1 = self.initial_site.label l2 = self.final_site.label c1 = self.initial_site.nn_occupation() c2 = self.final_site.nn_occupation() return jump_lookup_table.jump_probability[ l1 ][ l2 ][ c1 ][ c2 ]
950,705
Create a switch. Args: type: (str): type of the switch [A,B,C,D] settings (str): a comma separted list pin (int): wiringPi pin Returns: switch
def create_switch(type, settings, pin): switch = None if type == "A": group, device = settings.split(",") switch = pi_switch.RCSwitchA(group, device) elif type == "B": addr, channel = settings.split(",") addr = int(addr) channel = int(channel) switch = pi_switch.RCSwitchB(addr, channel) elif type == "C": family, group, device = settings.split(",") group = int(group) device = int(device) switch = pi_switch.RCSwitchC(family, group, device) elif type == "D": group, device = settings.split(",") device = int(device) switch = pi_switch.RCSwitchD(group, device) else: print "Type %s is not supported!" % type sys.exit() switch.enableTransmit(pin) return switch
950,972
Deletes an individual issue. If the issue has sub-tasks you must set the deleteSubtasks=true parameter to delete the issue. You cannot delete an issue without deleting its sub-tasks. Args: issue_id: params: Returns:
def delete_issue(self, issue_id, params=None): return self._delete(self.API_URL + 'issue/{}'.format(issue_id), params=params)
951,169
Browser based upload Creates the video entry and meta data to initiate a browser upload Authentication is needed Params: title: string description: string keywords: comma seperated string developer_tags: tuple Return: dict contains post_url and youtube_token. i.e { 'post_url': post_url, 'youtube_token': youtube_token } Raises: ApiError: on no authentication
def upload(self, title, description="", keywords="", developer_tags=None, access_control=AccessControl.Public): # Raise ApiError if not authenticated if not self.authenticated: raise ApiError(_("Authentication is required")) # create media group my_media_group = gdata.media.Group( title=gdata.media.Title(text=title), description=gdata.media.Description(description_type='plain', text=description), keywords=gdata.media.Keywords(text=keywords), category=[gdata.media.Category( text='Autos', scheme='http://gdata.youtube.com/schemas/2007/categories.cat', label='Autos')], #player = None ) # Access Control extension = self._access_control(access_control, my_media_group) # create video entry video_entry = gdata.youtube.YouTubeVideoEntry( media=my_media_group, extension_elements=extension) # add developer tags if developer_tags: video_entry.AddDeveloperTags(developer_tags) # upload meta data only response = Api.yt_service.GetFormUploadToken(video_entry) # parse response tuple and use the variables to build a form post_url = response[0] youtube_token = response[1] return {'post_url': post_url, 'youtube_token': youtube_token}
951,513
Updates the video Authentication is required Params: entry: video entry fetch via 'fetch_video()' title: string description: string keywords: string Returns: a video entry on success None otherwise
def update_video(self, video_id, title="", description="", keywords="", access_control=AccessControl.Unlisted): # Raise ApiError if not authenticated if not self.authenticated: raise ApiError(_("Authentication is required")) entry = self.fetch_video(video_id) # Set Access Control extension = self._access_control(access_control) if extension: entry.extension_elements = extension if title: entry.media.title.text = title if description: entry.media.description.text = description #if keywords: # entry.media.keywords.text = keywords success = Api.yt_service.UpdateVideoEntry(entry) return success
951,515
Deletes the video Authentication is required Params: entry: video entry fetch via 'fetch_video()' Return: True if successful Raise: OperationError: on unsuccessful deletion
def delete_video(self, video_id): # Raise ApiError if not authenticated if not self.authenticated: raise ApiError(_("Authentication is required")) entry = self.fetch_video(video_id) response = Api.yt_service.DeleteVideoEntry(entry) if not response: raise OperationError(_("Cannot be deleted from Youtube")) return True
951,516
The upload result page Youtube will redirect to this page after upload is finished Saves the video data and redirects to the next page Params: status: status of the upload (200 for success) id: id number of the video
def upload_return(request): status = request.GET.get("status") video_id = request.GET.get("id") if status == "200" and video_id: # upload is successful # save the video entry video = Video() video.user = request.user video.video_id = video_id video.save() # send a signal video_created.send(sender=video, video=video) # Redirect to the video page or the specified page try: next_url = settings.YOUTUBE_UPLOAD_REDIRECT_URL except AttributeError: next_url = reverse( "django_youtube.views.video", kwargs={"video_id": video_id}) return HttpResponseRedirect(next_url) else: # upload failed, redirect to upload page from django.contrib import messages messages.add_message( request, messages.ERROR, _('Upload failed, Please try again.')) return HttpResponseRedirect(reverse("django_youtube.views.upload"))
951,527
Send an event to the IFTTT maker channel Parameters: ----------- api_key : string Your IFTTT API key event : string The name of the IFTTT event to trigger value1 : Optional: Extra data sent with the event (default: None) value2 : Optional: Extra data sent with the event (default: None) value3 : Optional: Extra data sent with the event (default: None)
def send_event(api_key, event, value1=None, value2=None, value3=None): url = 'https://maker.ifttt.com/trigger/{e}/with/key/{k}/'.format(e=event, k=api_key) payload = {'value1': value1, 'value2': value2, 'value3': value3} return requests.post(url, data=payload)
951,822
Converts from the original ASCII format of the Chen+ (2014) 3D dust map to the HDF5 format. Args: dat_fname (:obj:`str`): Filename of the original ASCII .dat file. h5_fname (:obj:`str`): Output filename to write the resulting HDF5 file to.
def ascii2h5(dat_fname, h5_fname): table = np.loadtxt(dat_fname, skiprows=1, dtype='f4') filter_kwargs = dict( chunks=True, compression='gzip', compression_opts=3) # Filter out pixels with all zeros idx = ~np.all(table[:,2:32] < 1.e-5, axis=1) with h5py.File(h5_fname, 'w') as f: d = np.arange(0., 4.351, 0.15).astype('f4') dset = f.create_dataset('dists', data=d, **filter_kwargs) dset.attrs['description'] = 'Distances at which extinction is measured' dset.attrs['units'] = 'kpc' dset = f.create_dataset('pix_lb', data=table[idx,0:2], **filter_kwargs) dset.attrs['description'] = 'Galactic (l, b) of each pixel' dset.attrs['units'] = 'deg' dset = f.create_dataset('A_r', data=table[idx,2:32], **filter_kwargs) dset.attrs['description'] = 'Extinction' dset.attrs['shape'] = '(pixel, distance)' dset.attrs['band'] = 'r' dset.attrs['units'] = 'mag' dset = f.create_dataset('A_r_err', data=table[idx,32:], **filter_kwargs) dset.attrs['description'] = 'Gaussian uncertainty in extinction' dset.attrs['shape'] = '(pixel, distance)' dset.attrs['band'] = 'r' dset.attrs['units'] = 'mag'
952,184
Downloads the Chen et al. (2014) dust map. Args: clobber (Optional[:obj:`bool`]): If ``True``, any existing file will be overwritten, even if it appears to match. If ``False`` (the default), :obj:`fetch()` will attempt to determine if the dataset already exists. This determination is not 100\% robust against data corruption.
def fetch(clobber=False): dest_dir = fname_pattern = os.path.join(data_dir(), 'chen2014') url = 'http://lamost973.pku.edu.cn/site/Photometric-Extinctions-and-Distances/table2.dat' dat_fname = os.path.join(dest_dir, 'chen2014.dat') h5_fname = os.path.join(dest_dir, 'chen2014.h5') md5 = 'f8a2bc46d411c57ca4c76dc344e291f1' # Check if file already exists if not clobber: h5_size = 52768768 # Guess, in Bytes h5_dsets = { 'dists': (30,), 'pix_lb': (557398, 2), 'A_r': (557398, 30), 'A_r_err': (557398, 30) } if fetch_utils.h5_file_exists(h5_fname, h5_size, dsets=h5_dsets): print('File appears to exist already. Call `fetch(clobber=True)` ' 'to force overwriting of existing file.') return # Download the table print('Downloading {}'.format(url)) fetch_utils.download_and_verify(url, md5, fname=dat_fname) # Convert from ASCII to HDF5 format print('Repacking files...') ascii2h5(dat_fname, h5_fname) # Cleanup print('Removing original file...') os.remove(dat_fname)
952,185
Downloads the IPHAS 3D dust map of Sale et al. (2014). Args: clobber (Optional[bool]): If ``True``, any existing file will be overwritten, even if it appears to match. If ``False`` (the default), ``fetch()`` will attempt to determine if the dataset already exists. This determination is not 100\% robust against data corruption.
def fetch(clobber=False): dest_dir = fname_pattern = os.path.join(data_dir(), 'iphas') url_pattern = 'http://www.iphas.org/data/extinction/A_samp_{:03d}.tar.gz' fname_pattern = os.path.join(dest_dir, 'A_samp_') + '{:03d}.tar.gz' # Check if file already exists if not clobber: h5_fname = os.path.join(dest_dir, 'iphas.h5') h5_size = 227817543 # Guess, in Bytes h5_dsets = { 'samples': (61130,) } if fetch_utils.h5_file_exists(h5_fname, h5_size, dsets=h5_dsets): print('File appears to exist already. Call `fetch(clobber=True)` ' 'to force overwriting of existing file.') return # Expected MD5 sums of .samp files file_md5sum = { 30: 'dd531e397622bc97d4ff92b6c7863ade', 40: 'b0f925eb3e46b77876e4054a26ad5b52', 50: 'ea3b9500f0419d66dd92d9f9c127c2b5', 60: 'cccf136f4e2306a6038e8093499216fd', 70: 'a05fe2f815086686056c18087cc5410b', 80: '799bf618c8827b3d7250c884ec66ec49', 90: 'd2a302d917da768bacf6ea74cb9dcfad', 100: '2c75e31ad9320818556c4c9964b6af65', 110: '742ea8de6f5f8a7e549f6c56b0088789', 120: '9beabfa2c9634f953adadb5016eab072', 130: '7cd7313f466eb60e8318d0f1bd32e035', 140: 'fb6d09e4d939081b891e245c30b791f1', 150: '8e9b6dc1561183aeadc64f41c85a64a8', 160: '8a35828457b7b1d53d06998114553674', 170: '7ffb29ec23e2f625dcfaaa84c293821d', 180: 'c737da479d132b88483d6ddab5b25fc8', 190: '9bc5fc7f7ba55f36a167473bb3679601', 200: '7d8ffc4aa2f7c7026d8aa3ffb670d48e', 210: 'e31b04964b7970b81fc90c120b4ebc24' } # Download the .samp files for key in file_md5sum: url = url_pattern.format(key) print('Downloading {}'.format(url)) fetch_utils.download_and_verify( url, file_md5sum[key], fname_pattern.format(key)) # Convert from ASCII to HDF5 format print('Repacking files...') ascii2h5(dest_dir, os.path.join(dest_dir, 'iphas.h5')) # Cleanup print('Removing original files...') for key in file_md5sum: os.remove(fname_pattern.format(key))
952,221
Checks, if the dihedral defining atom is colinear. Checks for each index starting from the third row of the ``construction_table``, if the reference atoms are colinear. Args: construction_table (pd.DataFrame): Returns: list: A list of problematic indices.
def check_dihedral(self, construction_table): c_table = construction_table angles = self.get_angle_degrees(c_table.iloc[3:, :].values) problem_index = np.nonzero((175 < angles) | (angles < 5))[0] rename = dict(enumerate(c_table.index[3:])) problem_index = [rename[i] for i in problem_index] return problem_index
952,230
Reindexe the dihedral defining atom if linear reference is used. Uses :meth:`~Cartesian.check_dihedral` to obtain the problematic indices. Args: construction_table (pd.DataFrame): use_lookup (bool): Use a lookup variable for :meth:`~chemcoord.Cartesian.get_bonds`. The default is specified in ``settings['defaults']['use_lookup']`` Returns: pd.DataFrame: Appropiately renamed construction table.
def correct_dihedral(self, construction_table, use_lookup=None): if use_lookup is None: use_lookup = settings['defaults']['use_lookup'] problem_index = self.check_dihedral(construction_table) bond_dict = self._give_val_sorted_bond_dict(use_lookup=use_lookup) c_table = construction_table.copy() for i in problem_index: loc_i = c_table.index.get_loc(i) b, a, problem_d = c_table.loc[i, ['b', 'a', 'd']] try: c_table.loc[i, 'd'] = (bond_dict[a] - {b, a, problem_d} - set(c_table.index[loc_i:]))[0] except IndexError: visited = set(c_table.index[loc_i:]) | {b, a, problem_d} tmp_bond_dict = OrderedDict([(j, bond_dict[j] - visited) for j in bond_dict[problem_d]]) found = False while tmp_bond_dict and not found: new_tmp_bond_dict = OrderedDict() for new_d in tmp_bond_dict: if new_d in visited: continue angle = self.get_angle_degrees([b, a, new_d])[0] if 5 < angle < 175: found = True c_table.loc[i, 'd'] = new_d else: visited.add(new_d) for j in tmp_bond_dict[new_d]: new_tmp_bond_dict[j] = bond_dict[j] - visited tmp_bond_dict = new_tmp_bond_dict if not found: other_atoms = c_table.index[:loc_i].difference({b, a}) molecule = self.get_distance_to(origin=i, sort=True, other_atoms=other_atoms) k = 0 while not found and k < len(molecule): new_d = molecule.index[k] angle = self.get_angle_degrees([b, a, new_d])[0] if 5 < angle < 175: found = True c_table.loc[i, 'd'] = new_d k = k + 1 if not found: message = ('The atom with index {} has no possibility ' 'to get nonlinear reference atoms'.format) raise UndefinedCoordinateSystem(message(i)) return c_table
952,231
Reindexe construction_table if linear reference in first three rows present. Uses :meth:`~Cartesian.check_absolute_refs` to obtain the problematic indices. Args: construction_table (pd.DataFrame): Returns: pd.DataFrame: Appropiately renamed construction table.
def correct_absolute_refs(self, construction_table): c_table = construction_table.copy() abs_refs = constants.absolute_refs problem_index = self.check_absolute_refs(c_table) for i in problem_index: order_of_refs = iter(permutations(abs_refs.keys())) finished = False while not finished: if self._has_valid_abs_ref(i, c_table): finished = True else: row = c_table.index.get_loc(i) c_table.iloc[row, row:] = next(order_of_refs)[row:3] return c_table
952,234
Create the Zmatrix from a construction table. Args: Construction table (pd.DataFrame): Returns: Zmat: A new instance of :class:`Zmat`.
def _build_zmat(self, construction_table): c_table = construction_table default_cols = ['atom', 'b', 'bond', 'a', 'angle', 'd', 'dihedral'] optional_cols = list(set(self.columns) - {'atom', 'x', 'y', 'z'}) zmat_frame = pd.DataFrame(columns=default_cols + optional_cols, dtype='float', index=c_table.index) zmat_frame.loc[:, optional_cols] = self.loc[c_table.index, optional_cols] zmat_frame.loc[:, 'atom'] = self.loc[c_table.index, 'atom'] zmat_frame.loc[:, ['b', 'a', 'd']] = c_table zmat_values = self._calculate_zmat_values(c_table) zmat_frame.loc[:, ['bond', 'angle', 'dihedral']] = zmat_values zmatrix = Zmat(zmat_frame, metadata=self.metadata, _metadata={'last_valid_cartesian': self.copy()}) return zmatrix
952,236
Returns the total mass in g/mol. Args: None Returns: float:
def get_total_mass(self): try: mass = self.loc[:, 'mass'].sum() except KeyError: mass_molecule = self.add_data('mass') mass = mass_molecule.loc[:, 'mass'].sum() return mass
952,240
Determines if ``other`` has the same sumformula Args: other (molecule): Returns: bool:
def has_same_sumformula(self, other): same_atoms = True for atom in set(self['atom']): own_atom_number = len(self[self['atom'] == atom]) other_atom_number = len(other[other['atom'] == atom]) same_atoms = (own_atom_number == other_atom_number) if not same_atoms: break return same_atoms
952,241
Return the number of electrons. Args: charge (int): Charge of the molecule. Returns: int:
def get_electron_number(self, charge=0): atomic_number = constants.elements['atomic_number'].to_dict() return sum([atomic_number[atom] for atom in self['atom']]) - charge
952,242
Returns E(B-V) at the specified location(s) on the sky. Args: coords (`astropy.coordinates.SkyCoord`): The coordinates to query. Returns: A float array of reddening, in units of E(B-V), at the given coordinates. The shape of the output is the same as the shape of the coordinates stored by `coords`.
def query(self, coords): # gal = coords.transform_to('galactic') gal = coords l = gal.l.deg b = gal.b.deg # Detect scalar input scalar_input = not hasattr(l, '__len__') if scalar_input: l = np.array([l]) b = np.array([b]) # Fill return array with NaNs ebv = np.empty(l.shape, dtype='f8') ebv[:] = np.nan # Fill northern cap idx = (b >= 65.) & (b <= 90.) ebv[idx] = self._lb2ebv_northcap(l[idx], b[idx]) # Fill southern cap idx = (b <= -65.) & (b >= -90.) ebv[idx] = self._lb2ebv_southcap(l[idx], b[idx]) # Fill northern midplane idx = (b < 65.) & (b >= 10.) ebv[idx] = self._lb2ebv_midnorth(l[idx], b[idx]) # Fill southern midplane idx = (b > -65.) & (b <= -10.) ebv[idx] = self._lb2ebv_midsouth(l[idx], b[idx]) if scalar_input: ebv = ebv[0] return ebv
952,391
Return a dictionary with id, user, user_id, bounds, date of creation and all the tags of the changeset. Args: changeset: the XML string of the changeset.
def changeset_info(changeset): keys = [tag.attrib.get('k') for tag in changeset.getchildren()] keys += ['id', 'user', 'uid', 'bbox', 'created_at'] values = [tag.attrib.get('v') for tag in changeset.getchildren()] values += [ changeset.get('id'), changeset.get('user'), changeset.get('uid'), get_bounds(changeset), changeset.get('created_at') ] return dict(zip(keys, values))
952,423
Get the changeset using the OSM API and return the content as a XML ElementTree. Args: changeset: the id of the changeset.
def get_changeset(changeset): url = 'https://www.openstreetmap.org/api/0.6/changeset/{}/download'.format( changeset ) return ET.fromstring(requests.get(url).content)
952,424
Get the metadata of a changeset using the OSM API and return it as a XML ElementTree. Args: changeset: the id of the changeset.
def get_metadata(changeset): url = 'https://www.openstreetmap.org/api/0.6/changeset/{}'.format(changeset) return ET.fromstring(requests.get(url).content).getchildren()[0]
952,425
Get the bounds of the changeset and return it as a Polygon object. If the changeset has not coordinates (case of the changesets that deal only with relations), it returns an empty Polygon. Args: changeset: the XML string of the changeset.
def get_bounds(changeset): try: return Polygon([ (float(changeset.get('min_lon')), float(changeset.get('min_lat'))), (float(changeset.get('max_lon')), float(changeset.get('min_lat'))), (float(changeset.get('max_lon')), float(changeset.get('max_lat'))), (float(changeset.get('min_lon')), float(changeset.get('max_lat'))), (float(changeset.get('min_lon')), float(changeset.get('min_lat'))), ]) except TypeError: return Polygon()
952,426
Converts from sky coordinates to pixel indices. Args: coords (:obj:`astropy.coordinates.SkyCoord`): Sky coordinates. Returns: Pixel indices of the coordinates, with the same shape as the input coordinates. Pixels which are outside the map are given an index equal to the number of pixels in the map.
def _coords2idx(self, coords): x = self._coords2vec(coords) idx = self._kd.query(x, p=self._metric_p, distance_upper_bound=self._max_pix_scale) return idx[1]
952,566
Downloads the Marshall et al. (2006) dust map, which is based on 2MASS stellar photometry. Args: clobber (Optional[:obj:`bool`]): If ``True``, any existing file will be overwritten, even if it appears to match. If ``False`` (the default), :obj:`fetch()` will attempt to determine if the dataset already exists. This determination is not 100\% robust against data corruption.
def fetch(clobber=False): table_dir = os.path.join(data_dir(), 'marshall') # Check if file already exists if not clobber: h5_fname = os.path.join(table_dir, 'marshall.h5') h5_size = 5033290 # Guess, in Bytes h5_dsets = { 'l': (801, 81), 'b': (801, 81), 'chi2_all': (801, 81), 'chi2_giants': (801, 81), 'A': (801, 81, 33), 'sigma_A': (801, 81, 33), 'dist': (801, 81, 33), 'sigma_dist': (801, 81, 33) } if fetch_utils.h5_file_exists(h5_fname, h5_size, dsets=h5_dsets): print('File appears to exist already. Call ``fetch(clobber=True)`` ' 'to force overwriting of existing file.') return # Download the ASCII table url = 'ftp://cdsarc.u-strasbg.fr/pub/cats/J/A%2BA/453/635/table1.dat.gz' md5 = '637b95b025517a8b9757b6465b632285' table_fname = os.path.join(table_dir, 'table1.dat.gz') fetch_utils.download_and_verify(url, md5, fname=table_fname) # Download the README url = 'ftp://cdsarc.u-strasbg.fr/pub/cats/J/A%2BA/453/635/ReadMe' md5 = '3b7c1296b181b3d77106ab50193dc7ee' readme_fname = os.path.join(table_dir, 'ReadMe') fetch_utils.download_and_verify(url, md5, fname=readme_fname) # Convert from ASCII table to HDF5 dat2hdf5(table_dir) # Cleanup print('Cleaning up ...') os.remove(table_fname) os.remove(readme_fname)
952,568
Converts from Galactic coordinates to pixel indices. Args: gal (:obj:`astropy.coordinates.SkyCoord`): Galactic coordinates. Must store an array of coordinates (i.e., not be scalar). Returns: ``j, k, mask`` - Pixel indices of the coordinates, as well as a mask of in-bounds coordinates. Outputs have the same shape as the input coordinates.
def _gal2idx(self, gal): # Make sure that l is in domain [-180 deg, 180 deg) l = coordinates.Longitude(gal.l, wrap_angle=180.*units.deg) j = (self._inv_pix_scale * (l.deg - self._l_bounds[0])).astype('i4') k = (self._inv_pix_scale * (gal.b.deg - self._b_bounds[0])).astype('i4') idx = (j < 0) | (j >= self._shape[0]) | (k < 0) | (k >= self._shape[1]) if np.any(idx): j[idx] = -1 k[idx] = -1 return j, k, ~idx
952,570
Read a molden file. Args: inputfile (str): start_index (int): Returns: list: A list containing :class:`~chemcoord.Cartesian` is returned.
def read_molden(inputfile, start_index=0, get_bonds=True): from chemcoord.cartesian_coordinates.cartesian_class_main import Cartesian with open(inputfile, 'r') as f: found = False while not found: line = f.readline() if '[N_GEO]' in line: found = True number_of_molecules = int(f.readline().strip()) energies = [] found = False while not found: line = f.readline() if 'energy' in line: found = True for _ in range(number_of_molecules): energies.append(float(f.readline().strip())) found = False while not found: line = f.readline() if '[GEOMETRIES] (XYZ)' in line: found = True current_line = f.tell() number_of_atoms = int(f.readline().strip()) f.seek(current_line) cartesians = [] for energy in energies: cartesian = Cartesian.read_xyz( f, start_index=start_index, get_bonds=get_bonds, nrows=number_of_atoms, engine='python') cartesian.metadata['energy'] = energy cartesians.append(cartesian) return cartesians
952,600
Matrix multiplication between A and B This function is equivalent to ``A @ B``, which is unfortunately not possible under python 2.x. Args: A (sequence): B (sequence): Returns: sequence:
def dot(A, B): try: result = A.__matmul__(B) if result is NotImplemented: result = B.__rmatmul__(A) except AttributeError: result = B.__rmatmul__(A) return result
952,604
Returns the rotation matrix. This function returns a matrix for the counterclockwise rotation around the given axis. The Input angle is in radians. Args: axis (vector): angle (float): Returns: Rotation matrix (np.array):
def get_rotation_matrix(axis, angle): axis = normalize(np.array(axis)) if not (np.array([1, 1, 1]).shape) == (3, ): raise ValueError('axis.shape has to be 3') angle = float(angle) return _jit_get_rotation_matrix(axis, angle)
952,608
Returns the rotation matrix. This function returns a matrix for the counterclockwise rotation around the given axis. The Input angle is in radians. Args: axis (vector): angle (float): Returns: Rotation matrix (np.array):
def _jit_get_rotation_matrix(axis, angle): axis = _jit_normalize(axis) a = m.cos(angle / 2) b, c, d = axis * m.sin(angle / 2) rot_matrix = np.empty((3, 3)) rot_matrix[0, 0] = a**2 + b**2 - c**2 - d**2 rot_matrix[0, 1] = 2. * (b * c - a * d) rot_matrix[0, 2] = 2. * (b * d + a * c) rot_matrix[1, 0] = 2. * (b * c + a * d) rot_matrix[1, 1] = a**2 + c**2 - b**2 - d**2 rot_matrix[1, 2] = 2. * (c * d - a * b) rot_matrix[2, 0] = 2. * (b * d - a * c) rot_matrix[2, 1] = 2. * (c * d + a * b) rot_matrix[2, 2] = a**2 + d**2 - b**2 - c**2 return rot_matrix
952,609